diff --git a/cmd/tools/bench/wyhash.v b/cmd/tools/bench/wyhash.v
index 5104434ef1..bf9831c016 100644
--- a/cmd/tools/bench/wyhash.v
+++ b/cmd/tools/bench/wyhash.v
@@ -10,7 +10,7 @@ fn main() {
sample_size := 10000000
min_str_len := 20
max_str_len := 40
- println('Generating $sample_size strings between $min_str_len - $max_str_len chars long...')
+ println('Generating ${sample_size} strings between ${min_str_len} - ${max_str_len} chars long...')
mut checksum := u64(0)
mut start_pos := 0
mut bgenerating := benchmark.start()
diff --git a/cmd/tools/check_os_api_parity.v b/cmd/tools/check_os_api_parity.v
index 78e951a962..9565c6ef22 100644
--- a/cmd/tools/check_os_api_parity.v
+++ b/cmd/tools/check_os_api_parity.v
@@ -52,7 +52,7 @@ fn main() {
}
for mname in app.modules {
if !app.is_verbose {
- eprintln('Checking module: $mname ...')
+ eprintln('Checking module: ${mname} ...')
}
api_base := app.gen_api_for_module_in_os(mname, base_os)
for oname in os_names {
@@ -65,9 +65,9 @@ fn main() {
}
howmany := app.api_differences.len
if howmany > 0 {
- eprintln(term.header('Found $howmany modules with different APIs', '='))
+ eprintln(term.header('Found ${howmany} modules with different APIs', '='))
for m in app.api_differences.keys() {
- eprintln('Module: $m')
+ eprintln('Module: ${m}')
}
exit(1)
}
@@ -108,7 +108,7 @@ fn (app App) gen_api_for_module_in_os(mod_name string, os_name string) string {
fn_signature := s.stringify(b.table, mod_name, map[string]string{})
fn_mod := s.modname()
if fn_mod == mod_name {
- fline := '$fn_mod: $fn_signature'
+ fline := '${fn_mod}: ${fn_signature}'
res << fline
}
}
@@ -122,7 +122,7 @@ fn (app App) gen_api_for_module_in_os(mod_name string, os_name string) string {
fn (mut app App) compare_api(api_base string, api_os string, mod_name string, os_base string, os_target string) {
res := diff.color_compare_strings(app.diff_cmd, rand.ulid(), api_base, api_os)
if res.len > 0 {
- summary := 'Different APIs found for module: `$mod_name`, between OS base: `$os_base` and OS: `$os_target`'
+ summary := 'Different APIs found for module: `${mod_name}`, between OS base: `${os_base}` and OS: `${os_target}`'
eprintln(term.header(summary, '-'))
eprintln(res)
eprintln(term.h_divider('-'))
diff --git a/cmd/tools/fast/fast.v b/cmd/tools/fast/fast.v
index baede39d3a..1c8b800994 100644
--- a/cmd/tools/fast/fast.v
+++ b/cmd/tools/fast/fast.v
@@ -18,7 +18,7 @@ const fast_dir = os.dir(@FILE)
const vdir = os.dir(os.dir(os.dir(fast_dir)))
fn elog(msg string) {
- eprintln('$time.now().format_ss_micro() $msg')
+ eprintln('${time.now().format_ss_micro()} ${msg}')
}
fn main() {
@@ -35,10 +35,10 @@ fn main() {
if os.args.contains('-clang') {
ccompiler_path = 'clang'
}
- elog('fast_dir: $fast_dir | vdir: $vdir | compiler: $ccompiler_path')
+ elog('fast_dir: ${fast_dir} | vdir: ${vdir} | compiler: ${ccompiler_path}')
os.chdir(fast_dir)!
- if !os.exists('$vdir/v') && !os.is_dir('$vdir/vlib') {
+ if !os.exists('${vdir}/v') && !os.is_dir('${vdir}/vlib') {
elog('fast.html generator needs to be located in `v/cmd/tools/fast`')
exit(1)
}
@@ -48,9 +48,9 @@ fn main() {
if !os.args.contains('-noupdate') {
elog('Fetching updates...')
- ret := os.system('$vdir/v up')
+ ret := os.system('${vdir}/v up')
if ret != 0 {
- elog('failed to update V, exit_code: $ret')
+ elog('failed to update V, exit_code: ${ret}')
return
}
}
@@ -59,8 +59,8 @@ fn main() {
commit := exec('git rev-parse HEAD')[..8]
if os.exists('website/index.html') {
uploaded_index := os.read_file('website/index.html')!
- if uploaded_index.contains('>$commit<') {
- elog('NOTE: commit $commit had been benchmarked already.')
+ if uploaded_index.contains('>${commit}<') {
+ elog('NOTE: commit ${commit} had been benchmarked already.')
if !os.args.contains('-force') {
elog('nothing more to do')
return
@@ -69,16 +69,16 @@ fn main() {
}
os.chdir(vdir)!
- message := exec('git log --pretty=format:"%s" -n1 $commit')
- commit_date := exec('git log -n1 --pretty="format:%at" $commit')
+ message := exec('git log --pretty=format:"%s" -n1 ${commit}')
+ commit_date := exec('git log -n1 --pretty="format:%at" ${commit}')
date := time.unix(commit_date.i64())
- elog('Benchmarking commit $commit , with commit message: "$message", commit_date: $commit_date, date: $date')
+ elog('Benchmarking commit ${commit} , with commit message: "${message}", commit_date: ${commit_date}, date: ${date}')
// build an optimized V
if os.args.contains('-do-not-rebuild-vprod') {
if !os.exists('vprod') {
- elog('Exiting, since if you use `-do-not-rebuild-vprod`, you should already have a `$vdir/vprod` executable, but it is missing!')
+ elog('Exiting, since if you use `-do-not-rebuild-vprod`, you should already have a `${vdir}/vprod` executable, but it is missing!')
return
}
} else {
@@ -93,15 +93,15 @@ fn main() {
if !os.args.contains('-do-not-rebuild-caches') {
elog('clearing caches...')
// cache vlib modules
- exec('$vdir/v wipe-cache')
- exec('$vdir/v -o vwarm_caches -cc $ccompiler_path cmd/v')
+ exec('${vdir}/v wipe-cache')
+ exec('${vdir}/v -o vwarm_caches -cc ${ccompiler_path} cmd/v')
}
// measure
- diff1 := measure('$vdir/vprod $voptions -o v.c cmd/v', 'v.c')
- diff2 := measure('$vdir/vprod $voptions -cc $ccompiler_path -o v2 cmd/v', 'v2')
+ diff1 := measure('${vdir}/vprod ${voptions} -o v.c cmd/v', 'v.c')
+ diff2 := measure('${vdir}/vprod ${voptions} -cc ${ccompiler_path} -o v2 cmd/v', 'v2')
diff3 := 0 // measure('$vdir/vprod -native $vdir/cmd/tools/1mil.v', 'native 1mil')
- diff4 := measure('$vdir/vprod $voptions -cc $ccompiler_path examples/hello_world.v',
+ diff4 := measure('${vdir}/vprod ${voptions} -cc ${ccompiler_path} examples/hello_world.v',
'hello.v')
vc_size := os.file_size('v.c') / 1000
scan, parse, check, cgen, vlines := measure_steps_minimal(vdir)!
@@ -113,19 +113,19 @@ fn main() {
table := os.read_file('table.html')!
new_table :=
'
- $date.format() |
- $commit |
- $html_message |
+ ${date.format()} |
+ ${commit} |
+ ${html_message} |
${diff1}ms |
${diff2}ms |
${diff3}ms |
${diff4}ms |
- $vc_size KB |
+ ${vc_size} KB |
${parse}ms |
${check}ms |
${cgen}ms |
${scan}ms |
- $vlines |
+ ${vlines} |
${int(f64(vlines) / f64(diff1) * 1000.0)} |
\n' +
table.trim_space() + '\n'
@@ -159,7 +159,7 @@ fn exec(s string) string {
// measure returns milliseconds
fn measure(cmd string, description string) int {
- elog(' Measuring $description, warmups: $warmup_samples, samples: $max_samples, discard: $discard_highest_samples, with cmd: `$cmd`')
+ elog(' Measuring ${description}, warmups: ${warmup_samples}, samples: ${max_samples}, discard: ${discard_highest_samples}, with cmd: `${cmd}`')
for _ in 0 .. warmup_samples {
exec(cmd)
}
@@ -170,23 +170,23 @@ fn measure(cmd string, description string) int {
exec(cmd)
sample := int(sw.elapsed().milliseconds())
runs << sample
- println('$sample ms')
+ println('${sample} ms')
flush_stdout()
}
runs.sort()
- elog(' runs before discarding: $runs, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
+ elog(' runs before discarding: ${runs}, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
// Discard the highest times, since on AWS, they are caused by random load spikes,
// that are unpredictable, add noise and skew the statistics, without adding useful
// insights:
for _ in 0 .. discard_highest_samples {
runs.pop()
}
- elog(' runs after discarding: $runs, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
+ elog(' runs after discarding: ${runs}, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
return int(f64(arrays.sum(runs) or { 0 }) / runs.len)
}
fn measure_steps_minimal(vdir string) !(int, int, int, int, int) {
- elog('measure_steps_minimal $vdir, samples: $max_samples')
+ elog('measure_steps_minimal ${vdir}, samples: ${max_samples}')
mut scans, mut parses, mut checks, mut cgens, mut vliness := []int{}, []int{}, []int{}, []int{}, []int{}
for i in 0 .. max_samples {
scan, parse, check, cgen, vlines := measure_steps_one_sample(vdir)
@@ -195,15 +195,15 @@ fn measure_steps_minimal(vdir string) !(int, int, int, int, int) {
checks << check
cgens << cgen
vliness << vlines
- elog(' [${i:2}/${max_samples:2}] scan: $scan ms, min parse: $parse ms, min check: $check ms, min cgen: $cgen ms, min vlines: $vlines ms')
+ elog(' [${i:2}/${max_samples:2}] scan: ${scan} ms, min parse: ${parse} ms, min check: ${check} ms, min cgen: ${cgen} ms, min vlines: ${vlines} ms')
}
scan, parse, check, cgen, vlines := arrays.min(scans)!, arrays.min(parses)!, arrays.min(checks)!, arrays.min(cgens)!, arrays.min(vliness)!
- elog('measure_steps_minimal => min scan: $scan ms, min parse: $parse ms, min check: $check ms, min cgen: $cgen ms, min vlines: $vlines ms')
+ elog('measure_steps_minimal => min scan: ${scan} ms, min parse: ${parse} ms, min check: ${check} ms, min cgen: ${cgen} ms, min vlines: ${vlines} ms')
return scan, parse, check, cgen, vlines
}
fn measure_steps_one_sample(vdir string) (int, int, int, int, int) {
- resp := os.execute_or_exit('$vdir/vprod $voptions -o v.c cmd/v')
+ resp := os.execute_or_exit('${vdir}/vprod ${voptions} -o v.c cmd/v')
mut scan, mut parse, mut check, mut cgen, mut vlines := 0, 0, 0, 0, 0
lines := resp.output.split_into_lines()
diff --git a/cmd/tools/fast/fast_job.v b/cmd/tools/fast/fast_job.v
index df6ecbf9c9..08f2d4b358 100644
--- a/cmd/tools/fast/fast_job.v
+++ b/cmd/tools/fast/fast_job.v
@@ -13,11 +13,11 @@ const vexe = os.join_path(vdir, 'v')
const sleep_period = 120
fn elog(msg string) {
- eprintln('$time.now().format_ss_micro() $msg')
+ eprintln('${time.now().format_ss_micro()} ${msg}')
}
fn delay() {
- elog('Sleeping for $sleep_period seconds...')
+ elog('Sleeping for ${sleep_period} seconds...')
time.sleep(sleep_period * time.second)
}
@@ -25,11 +25,11 @@ fn delay() {
// runs fast.v, pushes the HTML result to the fast.vlang.io GH pages repo.
fn main() {
os.setenv('LANG', 'C', true)
- elog('fast_job fast_dir: $fast_dir | vdir: $vdir | vexe: $vexe')
+ elog('fast_job fast_dir: ${fast_dir} | vdir: ${vdir} | vexe: ${vexe}')
os.chdir(fast_dir)!
- elog('fast_job start in os.getwd(): $os.getwd()')
+ elog('fast_job start in os.getwd(): ${os.getwd()}')
defer {
elog('fast_job end')
}
@@ -41,7 +41,7 @@ fn main() {
for {
elog('------------------- Checking for updates ... -------------------')
res_pull := os.execute('git pull --rebase')
- elog('> res_pull.output: $res_pull.output')
+ elog('> res_pull.output: ${res_pull.output}')
if res_pull.exit_code != 0 {
elog('Git pull failed. You may have uncommitted changes?')
delay()
@@ -72,7 +72,7 @@ fn main() {
elog('running ./fast -upload')
fast_exit_code := os.system('./fast -upload')
if fast_exit_code != 0 {
- println('fast_exit_code = $fast_exit_code, != 0')
+ println('fast_exit_code = ${fast_exit_code}, != 0')
}
delay()
diff --git a/cmd/tools/fuzz/map_fuzz.v b/cmd/tools/fuzz/map_fuzz.v
index 19d845d33a..a9e77e4aa3 100644
--- a/cmd/tools/fuzz/map_fuzz.v
+++ b/cmd/tools/fuzz/map_fuzz.v
@@ -133,7 +133,7 @@ fn fuzz6() {
fn main() {
seed := u32(time.ticks())
- println('seed: $seed.hex()')
+ println('seed: ${seed.hex()}')
rand.seed([seed, seed])
fuzz1()
fuzz2()
diff --git a/cmd/tools/gen1m.v b/cmd/tools/gen1m.v
index 68b552764a..6dc9c9bdc8 100644
--- a/cmd/tools/gen1m.v
+++ b/cmd/tools/gen1m.v
@@ -2,7 +2,7 @@ fn main() {
for i in 0 .. 100000 {
println('
fn foo${i}() {
- x := $i
+ x := ${i}
mut a := 1 + x
a += 2
print(a)
diff --git a/cmd/tools/gen_vc.v b/cmd/tools/gen_vc.v
index 7428bb433d..33d694421d 100644
--- a/cmd/tools/gen_vc.v
+++ b/cmd/tools/gen_vc.v
@@ -64,7 +64,7 @@ const (
// server port
server_port = 7171
// log file
- log_file = '$work_dir/log.txt'
+ log_file = '${work_dir}/log.txt'
// log_to is either 'file' or 'terminal'
log_to = 'terminal'
)
@@ -209,7 +209,7 @@ fn (mut gen_vc GenVC) generate() {
os.mkdir(gen_vc.options.work_dir) or { panic(err) }
// still dosen't exist... we have a problem
if !os.is_dir(gen_vc.options.work_dir) {
- gen_vc.logger.error('error creating directory: $gen_vc.options.work_dir')
+ gen_vc.logger.error('error creating directory: ${gen_vc.options.work_dir}')
gen_vc.gen_error = true
return
}
@@ -221,10 +221,10 @@ fn (mut gen_vc GenVC) generate() {
// first check to see if the local v repo is behind master
// if it isn't behind theres no point continuing further
if !gen_vc.options.serve && os.is_dir(git_repo_dir_v) {
- gen_vc.cmd_exec('git -C $git_repo_dir_v checkout master')
+ gen_vc.cmd_exec('git -C ${git_repo_dir_v} checkout master')
// fetch the remote repo just in case there are newer commits there
- gen_vc.cmd_exec('git -C $git_repo_dir_v fetch')
- git_status := gen_vc.cmd_exec('git -C $git_repo_dir_v status')
+ gen_vc.cmd_exec('git -C ${git_repo_dir_v} fetch')
+ git_status := gen_vc.cmd_exec('git -C ${git_repo_dir_v} status')
if !git_status.contains('behind') && !gen_vc.options.force {
gen_vc.logger.warn('v repository is already up to date.')
return
@@ -233,11 +233,11 @@ fn (mut gen_vc GenVC) generate() {
// delete repos
gen_vc.purge_repos()
// clone repos
- gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_v $git_repo_dir_v')
- gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_vc $git_repo_dir_vc')
+ gen_vc.cmd_exec('git clone --depth 1 https://${git_repo_v} ${git_repo_dir_v}')
+ gen_vc.cmd_exec('git clone --depth 1 https://${git_repo_vc} ${git_repo_dir_vc}')
// get output of git log -1 (last commit)
- git_log_v := gen_vc.cmd_exec('git -C $git_repo_dir_v log -1 --format="commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
- git_log_vc := gen_vc.cmd_exec('git -C $git_repo_dir_vc log -1 --format="Commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
+ git_log_v := gen_vc.cmd_exec('git -C ${git_repo_dir_v} log -1 --format="commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
+ git_log_vc := gen_vc.cmd_exec('git -C ${git_repo_dir_vc} log -1 --format="Commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
// date of last commit in each repo
ts_v := git_log_v.find_between('Date:', '\n').trim_space()
ts_vc := git_log_vc.find_between('Date:', '\n').trim_space()
@@ -255,45 +255,45 @@ fn (mut gen_vc GenVC) generate() {
last_commit_subject := git_log_v.find_between('Subject:', '\n').trim_space().replace("'",
'"')
// log some info
- gen_vc.logger.debug('last commit time ($git_repo_v): ' + last_commit_time_v.format_ss())
- gen_vc.logger.debug('last commit time ($git_repo_vc): ' + last_commit_time_vc.format_ss())
- gen_vc.logger.debug('last commit hash ($git_repo_v): $last_commit_hash_v')
- gen_vc.logger.debug('last commit subject ($git_repo_v): $last_commit_subject')
+ gen_vc.logger.debug('last commit time (${git_repo_v}): ' + last_commit_time_v.format_ss())
+ gen_vc.logger.debug('last commit time (${git_repo_vc}): ' + last_commit_time_vc.format_ss())
+ gen_vc.logger.debug('last commit hash (${git_repo_v}): ${last_commit_hash_v}')
+ gen_vc.logger.debug('last commit subject (${git_repo_v}): ${last_commit_subject}')
// if vc repo already has a newer commit than the v repo, assume it's up to date
if t_unix_vc >= t_unix_v && !gen_vc.options.force {
gen_vc.logger.warn('vc repository is already up to date.')
return
}
// try build v for current os (linux in this case)
- gen_vc.cmd_exec('make -C $git_repo_dir_v')
- v_exec := '$git_repo_dir_v/v'
+ gen_vc.cmd_exec('make -C ${git_repo_dir_v}')
+ v_exec := '${git_repo_dir_v}/v'
// check if make was successful
gen_vc.assert_file_exists_and_is_not_too_short(v_exec, err_msg_make)
// build v.c for each os
for os_name in vc_build_oses {
c_file := if os_name == 'nix' { 'v.c' } else { 'v_win.c' }
- v_flags := if os_name == 'nix' { '-os cross' } else { '-os $os_name' }
+ v_flags := if os_name == 'nix' { '-os cross' } else { '-os ${os_name}' }
// try generate .c file
- gen_vc.cmd_exec('$v_exec $v_flags -o $c_file $git_repo_dir_v/cmd/v')
+ gen_vc.cmd_exec('${v_exec} ${v_flags} -o ${c_file} ${git_repo_dir_v}/cmd/v')
// check if the c file seems ok
gen_vc.assert_file_exists_and_is_not_too_short(c_file, err_msg_gen_c)
// embed the latest v commit hash into the c file
- gen_vc.cmd_exec('sed -i \'1s/^/#define V_COMMIT_HASH "$last_commit_hash_v_short"\\n/\' $c_file')
+ gen_vc.cmd_exec('sed -i \'1s/^/#define V_COMMIT_HASH "${last_commit_hash_v_short}"\\n/\' ${c_file}')
// move to vc repo
- gen_vc.cmd_exec('mv $c_file $git_repo_dir_vc/$c_file')
+ gen_vc.cmd_exec('mv ${c_file} ${git_repo_dir_vc}/${c_file}')
// add new .c file to local vc repo
- gen_vc.cmd_exec('git -C $git_repo_dir_vc add $c_file')
+ gen_vc.cmd_exec('git -C ${git_repo_dir_vc} add ${c_file}')
}
// check if the vc repo actually changed
- git_status := gen_vc.cmd_exec('git -C $git_repo_dir_vc status')
+ git_status := gen_vc.cmd_exec('git -C ${git_repo_dir_vc} status')
if git_status.contains('nothing to commit') {
gen_vc.logger.error('no changes to vc repo: something went wrong.')
gen_vc.gen_error = true
}
// commit changes to local vc repo
- gen_vc.cmd_exec_safe("git -C $git_repo_dir_vc commit -m '[v:master] $last_commit_hash_v_short - $last_commit_subject'")
+ gen_vc.cmd_exec_safe("git -C ${git_repo_dir_vc} commit -m '[v:master] ${last_commit_hash_v_short} - ${last_commit_subject}'")
// push changes to remote vc repo
- gen_vc.cmd_exec_safe('git -C $git_repo_dir_vc push https://${urllib.query_escape(git_username)}:${urllib.query_escape(git_password)}@$git_repo_vc master')
+ gen_vc.cmd_exec_safe('git -C ${git_repo_dir_vc} push https://${urllib.query_escape(git_username)}:${urllib.query_escape(git_password)}@${git_repo_vc} master')
}
// only execute when dry_run option is false, otherwise just log
@@ -312,10 +312,10 @@ fn (mut gen_vc GenVC) command_execute(cmd string, dry bool) string {
if dry {
return gen_vc.command_execute_dry(cmd)
}
- gen_vc.logger.info('cmd: $cmd')
+ gen_vc.logger.info('cmd: ${cmd}')
r := os.execute(cmd)
if r.exit_code < 0 {
- gen_vc.logger.error('$err_msg_cmd_x: "$cmd" could not start.')
+ gen_vc.logger.error('${err_msg_cmd_x}: "${cmd}" could not start.')
gen_vc.logger.error(r.output)
// something went wrong, better start fresh next time
gen_vc.purge_repos()
@@ -323,7 +323,7 @@ fn (mut gen_vc GenVC) command_execute(cmd string, dry bool) string {
return ''
}
if r.exit_code != 0 {
- gen_vc.logger.error('$err_msg_cmd_x: "$cmd" failed.')
+ gen_vc.logger.error('${err_msg_cmd_x}: "${cmd}" failed.')
gen_vc.logger.error(r.output)
// something went wrong, better start fresh next time
gen_vc.purge_repos()
@@ -335,35 +335,35 @@ fn (mut gen_vc GenVC) command_execute(cmd string, dry bool) string {
// just log cmd, dont execute
fn (mut gen_vc GenVC) command_execute_dry(cmd string) string {
- gen_vc.logger.info('cmd (dry): "$cmd"')
+ gen_vc.logger.info('cmd (dry): "${cmd}"')
return ''
}
// delete repo directories
fn (mut gen_vc GenVC) purge_repos() {
// delete old repos (better to be fully explicit here, since these are destructive operations)
- mut repo_dir := '$gen_vc.options.work_dir/$git_repo_dir_v'
+ mut repo_dir := '${gen_vc.options.work_dir}/${git_repo_dir_v}'
if os.is_dir(repo_dir) {
- gen_vc.logger.info('purging local repo: "$repo_dir"')
- gen_vc.cmd_exec('rm -rf $repo_dir')
+ gen_vc.logger.info('purging local repo: "${repo_dir}"')
+ gen_vc.cmd_exec('rm -rf ${repo_dir}')
}
- repo_dir = '$gen_vc.options.work_dir/$git_repo_dir_vc'
+ repo_dir = '${gen_vc.options.work_dir}/${git_repo_dir_vc}'
if os.is_dir(repo_dir) {
- gen_vc.logger.info('purging local repo: "$repo_dir"')
- gen_vc.cmd_exec('rm -rf $repo_dir')
+ gen_vc.logger.info('purging local repo: "${repo_dir}"')
+ gen_vc.cmd_exec('rm -rf ${repo_dir}')
}
}
// check if file size is too short
fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f string, emsg string) {
if !os.exists(f) {
- gen_vc.logger.error('$err_msg_build: $emsg .')
+ gen_vc.logger.error('${err_msg_build}: ${emsg} .')
gen_vc.gen_error = true
return
}
fsize := os.file_size(f)
if fsize < too_short_file_limit {
- gen_vc.logger.error('$err_msg_build: $f exists, but is too short: only $fsize bytes.')
+ gen_vc.logger.error('${err_msg_build}: ${f} exists, but is too short: only ${fsize} bytes.')
gen_vc.gen_error = true
return
}
diff --git a/cmd/tools/git_pre_commit_hook.vsh b/cmd/tools/git_pre_commit_hook.vsh
index 42dffb5a60..4a461d20f2 100755
--- a/cmd/tools/git_pre_commit_hook.vsh
+++ b/cmd/tools/git_pre_commit_hook.vsh
@@ -39,12 +39,14 @@ fn main() {
}
exit(verify_result.exit_code)
} else {
- eprintln('The V pre commit hook will format $vfiles.len V file(s):')
+ eprintln('The V pre commit hook will format ${vfiles.len} V file(s):')
+ // vfmt off
for vfile in vfiles {
eprintln(' ${term.bold('$vfile')}')
}
+ // vfmt on
all_vfiles_on_a_line := vfiles.map(os.quoted_path(it)).join(' ')
- os.system('v fmt -w $all_vfiles_on_a_line')
- os.system('git add $all_vfiles_on_a_line')
+ os.system('v fmt -w ${all_vfiles_on_a_line}')
+ os.system('git add ${all_vfiles_on_a_line}')
}
}
diff --git a/cmd/tools/measure/parser_speed.v b/cmd/tools/measure/parser_speed.v
index 0b82a5d5e7..93aa7436e9 100644
--- a/cmd/tools/measure/parser_speed.v
+++ b/cmd/tools/measure/parser_speed.v
@@ -44,7 +44,7 @@ fn process_files(files []string) ! {
total_us += f_us
total_bytes += p.scanner.text.len
total_tokens += p.scanner.all_tokens.len
- println('${f_us:10}us ${p.scanner.all_tokens.len:10} ${p.scanner.text.len:10} ${(f64(p.scanner.text.len) / p.scanner.all_tokens.len):7.3} ${p.errors.len:4} $f')
+ println('${f_us:10}us ${p.scanner.all_tokens.len:10} ${p.scanner.text.len:10} ${(f64(p.scanner.text.len) / p.scanner.all_tokens.len):7.3} ${p.errors.len:4} ${f}')
}
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
}
diff --git a/cmd/tools/measure/scanner_speed.v b/cmd/tools/measure/scanner_speed.v
index 224f11704d..c690a3398f 100644
--- a/cmd/tools/measure/scanner_speed.v
+++ b/cmd/tools/measure/scanner_speed.v
@@ -36,7 +36,7 @@ fn process_files(files []string) ! {
total_us += f_us
total_bytes += s.text.len
total_tokens += s.all_tokens.len
- println('${f_us:10}us ${s.all_tokens.len:10} ${s.text.len:10} ${(f64(s.text.len) / s.all_tokens.len):7.3f} $f')
+ println('${f_us:10}us ${s.all_tokens.len:10} ${s.text.len:10} ${(f64(s.text.len) / s.all_tokens.len):7.3f} ${f}')
}
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3f} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
}
diff --git a/cmd/tools/modules/scripting/scripting.v b/cmd/tools/modules/scripting/scripting.v
index ce936e7827..017a4eea54 100644
--- a/cmd/tools/modules/scripting/scripting.v
+++ b/cmd/tools/modules/scripting/scripting.v
@@ -50,19 +50,19 @@ pub fn cprintln_strong(omessage string) {
pub fn verbose_trace(label string, message string) {
if os.getenv('VERBOSE').len > 0 {
- slabel := '$time.now().format_ss_milli() $label'
- cprintln('# ${slabel:-43s} : $message')
+ slabel := '${time.now().format_ss_milli()} ${label}'
+ cprintln('# ${slabel:-43s} : ${message}')
}
}
pub fn verbose_trace_strong(label string, omessage string) {
if os.getenv('VERBOSE').len > 0 {
- slabel := '$time.now().format_ss_milli() $label'
+ slabel := '${time.now().format_ss_milli()} ${label}'
mut message := omessage
if scripting.term_colors {
message = term.bright_green(message)
}
- cprintln('# ${slabel:-43s} : $message')
+ cprintln('# ${slabel:-43s} : ${message}')
}
}
@@ -76,7 +76,7 @@ pub fn verbose_trace_exec_result(x os.Result) {
if scripting.term_colors {
line = term.bright_green(line)
}
- cprintln('# ${lnum:3d}: $line')
+ cprintln('# ${lnum:3d}: ${line}')
lnum++
}
cprintln('# ----------------------------------------------------------------------')
@@ -84,11 +84,11 @@ pub fn verbose_trace_exec_result(x os.Result) {
}
fn modfn(mname string, fname string) string {
- return '${mname}.$fname'
+ return '${mname}.${fname}'
}
pub fn chdir(path string) {
- verbose_trace_strong(modfn(@MOD, @FN), 'cd $path')
+ verbose_trace_strong(modfn(@MOD, @FN), 'cd ${path}')
os.chdir(path) or {
verbose_trace(modfn(@MOD, @FN), '## failed.')
return
@@ -96,7 +96,7 @@ pub fn chdir(path string) {
}
pub fn mkdir(path string) ? {
- verbose_trace_strong(modfn(@MOD, @FN), 'mkdir $path')
+ verbose_trace_strong(modfn(@MOD, @FN), 'mkdir ${path}')
os.mkdir(path) or {
verbose_trace(modfn(@MOD, @FN), '## failed.')
return err
@@ -104,7 +104,7 @@ pub fn mkdir(path string) ? {
}
pub fn mkdir_all(path string) ? {
- verbose_trace_strong(modfn(@MOD, @FN), 'mkdir -p $path')
+ verbose_trace_strong(modfn(@MOD, @FN), 'mkdir -p ${path}')
os.mkdir_all(path) or {
verbose_trace(modfn(@MOD, @FN), '## failed.')
return err
@@ -112,7 +112,7 @@ pub fn mkdir_all(path string) ? {
}
pub fn rmrf(path string) {
- verbose_trace_strong(modfn(@MOD, @FN), 'rm -rf $path')
+ verbose_trace_strong(modfn(@MOD, @FN), 'rm -rf ${path}')
if os.exists(path) {
if os.is_dir(path) {
os.rmdir_all(path) or { panic(err) }
@@ -165,10 +165,10 @@ pub fn exit_0_status(cmd string) bool {
pub fn tool_must_exist(toolcmd string) {
verbose_trace(modfn(@MOD, @FN), toolcmd)
- if exit_0_status('type $toolcmd') {
+ if exit_0_status('type ${toolcmd}') {
return
}
- eprintln('Missing tool: $toolcmd')
+ eprintln('Missing tool: ${toolcmd}')
eprintln('Please try again after you install it.')
exit(1)
}
@@ -182,6 +182,6 @@ pub fn used_tools_must_exist(tools []string) {
pub fn show_sizes_of_files(files []string) {
for f in files {
size := os.file_size(f)
- println('$size $f') // println('${size:10d} $f')
+ println('${size} ${f}') // println('${size:10d} $f')
}
}
diff --git a/cmd/tools/modules/testing/common.v b/cmd/tools/modules/testing/common.v
index daa5e93e14..0c7d674bad 100644
--- a/cmd/tools/modules/testing/common.v
+++ b/cmd/tools/modules/testing/common.v
@@ -80,7 +80,7 @@ pub fn (mut ts TestSession) add_failed_cmd(cmd string) {
pub fn (mut ts TestSession) show_list_of_failed_tests() {
for i, cmd in ts.failed_cmds {
- eprintln(term.failed('Failed command ${i + 1}:') + ' $cmd')
+ eprintln(term.failed('Failed command ${i + 1}:') + ' ${cmd}')
}
}
@@ -132,12 +132,12 @@ pub fn (mut ts TestSession) print_messages() {
if ts.progress_mode {
// progress mode, the last line is rewritten many times:
if is_ok && !ts.silent_mode {
- print('\r$empty\r$msg')
+ print('\r${empty}\r${msg}')
flush_stdout()
} else {
// the last \n is needed, so SKIP/FAIL messages
// will not get overwritten by the OK ones
- eprint('\r$empty\r$msg\n')
+ eprint('\r${empty}\r${msg}\n')
}
continue
}
@@ -393,10 +393,10 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
mut status := os.system(cmd)
if status != 0 {
details := get_test_details(file)
- os.setenv('VTEST_RETRY_MAX', '$details.retry', true)
+ os.setenv('VTEST_RETRY_MAX', '${details.retry}', true)
for retry := 1; retry <= details.retry; retry++ {
- ts.append_message(.info, ' [stats] retrying $retry/$details.retry of $relative_file ; known flaky: $details.flaky ...')
- os.setenv('VTEST_RETRY', '$retry', true)
+ ts.append_message(.info, ' [stats] retrying ${retry}/${details.retry} of ${relative_file} ; known flaky: ${details.flaky} ...')
+ os.setenv('VTEST_RETRY', '${retry}', true)
status = os.system(cmd)
if status == 0 {
unsafe {
@@ -406,7 +406,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
time.sleep(500 * time.millisecond)
}
if details.flaky && !testing.fail_flaky {
- ts.append_message(.info, ' *FAILURE* of the known flaky test file $relative_file is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: $details.retry .')
+ ts.append_message(.info, ' *FAILURE* of the known flaky test file ${relative_file} is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: ${details.retry} .')
unsafe {
goto test_passed_system
}
@@ -422,7 +422,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
}
} else {
if testing.show_start {
- ts.append_message(.info, ' starting $relative_file ...')
+ ts.append_message(.info, ' starting ${relative_file} ...')
}
mut r := os.execute(cmd)
if r.exit_code < 0 {
@@ -434,10 +434,10 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
}
if r.exit_code != 0 {
details := get_test_details(file)
- os.setenv('VTEST_RETRY_MAX', '$details.retry', true)
+ os.setenv('VTEST_RETRY_MAX', '${details.retry}', true)
for retry := 1; retry <= details.retry; retry++ {
- ts.append_message(.info, ' retrying $retry/$details.retry of $relative_file ; known flaky: $details.flaky ...')
- os.setenv('VTEST_RETRY', '$retry', true)
+ ts.append_message(.info, ' retrying ${retry}/${details.retry} of ${relative_file} ; known flaky: ${details.flaky} ...')
+ os.setenv('VTEST_RETRY', '${retry}', true)
r = os.execute(cmd)
if r.exit_code == 0 {
unsafe {
@@ -446,7 +446,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
}
}
if details.flaky && !testing.fail_flaky {
- ts.append_message(.info, ' *FAILURE* of the known flaky test file $relative_file is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: $details.retry .')
+ ts.append_message(.info, ' *FAILURE* of the known flaky test file ${relative_file} is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: ${details.retry} .')
unsafe {
goto test_passed_execute
}
@@ -454,7 +454,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
ts.benchmark.fail()
tls_bench.fail()
ending_newline := if r.output.ends_with('\n') { '\n' } else { '' }
- ts.append_message(.fail, tls_bench.step_message_fail('$normalised_relative_file\n$r.output.trim_space()$ending_newline'))
+ ts.append_message(.fail, tls_bench.step_message_fail('${normalised_relative_file}\n${r.output.trim_space()}${ending_newline}'))
ts.add_failed_cmd(cmd)
} else {
test_passed_execute:
@@ -474,7 +474,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
pub fn vlib_should_be_present(parent_dir string) {
vlib_dir := os.join_path_single(parent_dir, 'vlib')
if !os.is_dir(vlib_dir) {
- eprintln('$vlib_dir is missing, it must be next to the V executable')
+ eprintln('${vlib_dir} is missing, it must be next to the V executable')
exit(1)
}
}
@@ -486,7 +486,7 @@ pub fn prepare_test_session(zargs string, folder string, oskipped []string, main
vargs := zargs.replace(vexe, '')
eheader(main_label)
if vargs.len > 0 {
- eprintln('v compiler args: "$vargs"')
+ eprintln('v compiler args: "${vargs}"')
}
mut session := new_test_session(vargs, true)
files := os.walk_ext(os.join_path_single(parent_dir, folder), '.v')
@@ -532,8 +532,8 @@ pub fn prepare_test_session(zargs string, folder string, oskipped []string, main
pub type FnTestSetupCb = fn (mut session TestSession)
pub fn v_build_failing_skipped(zargs string, folder string, oskipped []string, cb FnTestSetupCb) bool {
- main_label := 'Building $folder ...'
- finish_label := 'building $folder'
+ main_label := 'Building ${folder} ...'
+ finish_label := 'building ${folder}'
mut session := prepare_test_session(zargs, folder, oskipped, main_label)
cb(mut session)
session.test()
@@ -562,22 +562,22 @@ pub fn building_any_v_binaries_failed() bool {
vlib_should_be_present(parent_dir)
os.chdir(parent_dir) or { panic(err) }
mut failed := false
- v_build_commands := ['$vexe -o v_g -g cmd/v', '$vexe -o v_prod_g -prod -g cmd/v',
- '$vexe -o v_cg -cg cmd/v', '$vexe -o v_prod_cg -prod -cg cmd/v',
- '$vexe -o v_prod -prod cmd/v']
+ v_build_commands := ['${vexe} -o v_g -g cmd/v',
+ '${vexe} -o v_prod_g -prod -g cmd/v', '${vexe} -o v_cg -cg cmd/v',
+ '${vexe} -o v_prod_cg -prod -cg cmd/v', '${vexe} -o v_prod -prod cmd/v']
mut bmark := benchmark.new_benchmark()
for cmd in v_build_commands {
bmark.step()
if build_v_cmd_failed(cmd) {
bmark.fail()
failed = true
- eprintln(bmark.step_message_fail('command: $cmd . See details above ^^^^^^^'))
+ eprintln(bmark.step_message_fail('command: ${cmd} . See details above ^^^^^^^'))
eprintln('')
continue
}
bmark.ok()
if !testing.hide_oks {
- eprintln(bmark.step_message_ok('command: $cmd'))
+ eprintln(bmark.step_message_ok('command: ${cmd}'))
}
}
bmark.stop()
@@ -600,7 +600,7 @@ pub fn header(msg string) {
// The new nested folder, and its contents, will get removed after all tests/programs succeed.
pub fn setup_new_vtmp_folder() string {
now := time.sys_mono_now()
- new_vtmp_dir := os.join_path(os.vtmp_dir(), 'tsession_${sync.thread_id().hex()}_$now')
+ new_vtmp_dir := os.join_path(os.vtmp_dir(), 'tsession_${sync.thread_id().hex()}_${now}')
os.mkdir_all(new_vtmp_dir) or { panic(err) }
os.setenv('VTMP', new_vtmp_dir, true)
return new_vtmp_dir
@@ -632,5 +632,5 @@ pub fn find_started_process(pname string) ?string {
return line
}
}
- return error('could not find process matching $pname')
+ return error('could not find process matching ${pname}')
}
diff --git a/cmd/tools/modules/vgit/vgit.v b/cmd/tools/modules/vgit/vgit.v
index e85c743d3d..389da4851f 100644
--- a/cmd/tools/modules/vgit/vgit.v
+++ b/cmd/tools/modules/vgit/vgit.v
@@ -22,9 +22,9 @@ pub fn validate_commit_exists(commit string) {
if commit.len == 0 {
return
}
- cmd := "git cat-file -t '$commit' "
+ cmd := "git cat-file -t '${commit}' "
if !scripting.exit_0_status(cmd) {
- eprintln('Commit: "$commit" does not exist in the current repository.')
+ eprintln('Commit: "${commit}" does not exist in the current repository.')
exit(3)
}
}
@@ -50,25 +50,25 @@ pub fn prepare_vc_source(vcdir string, cdir string, commit string) (string, stri
// Building a historic v with the latest vc is not always possible ...
// It is more likely, that the vc *at the time of the v commit*,
// or slightly before that time will be able to build the historic v:
- vline := scripting.run('git rev-list -n1 --timestamp "$commit" ')
+ vline := scripting.run('git rev-list -n1 --timestamp "${commit}" ')
v_timestamp, v_commithash := line_to_timestamp_and_commit(vline)
- scripting.verbose_trace(@FN, 'v_timestamp: $v_timestamp | v_commithash: $v_commithash')
+ scripting.verbose_trace(@FN, 'v_timestamp: ${v_timestamp} | v_commithash: ${v_commithash}')
check_v_commit_timestamp_before_self_rebuilding(v_timestamp)
scripting.chdir(vcdir)
scripting.run('git checkout --quiet master')
//
mut vccommit := ''
vcbefore_subject_match := scripting.run('git rev-list HEAD -n1 --timestamp --grep=${v_commithash[0..7]} ')
- scripting.verbose_trace(@FN, 'vcbefore_subject_match: $vcbefore_subject_match')
+ scripting.verbose_trace(@FN, 'vcbefore_subject_match: ${vcbefore_subject_match}')
if vcbefore_subject_match.len > 3 {
_, vccommit = line_to_timestamp_and_commit(vcbefore_subject_match)
} else {
scripting.verbose_trace(@FN, 'the v commit did not match anything in the vc log; try --timestamp instead.')
- vcbefore := scripting.run('git rev-list HEAD -n1 --timestamp --before=$v_timestamp ')
+ vcbefore := scripting.run('git rev-list HEAD -n1 --timestamp --before=${v_timestamp} ')
_, vccommit = line_to_timestamp_and_commit(vcbefore)
}
- scripting.verbose_trace(@FN, 'vccommit: $vccommit')
- scripting.run('git checkout --quiet "$vccommit" ')
+ scripting.verbose_trace(@FN, 'vccommit: ${vccommit}')
+ scripting.run('git checkout --quiet "${vccommit}" ')
scripting.run('wc *.c')
scripting.chdir(cdir)
return v_commithash, vccommit
@@ -78,11 +78,11 @@ pub fn clone_or_pull(remote_git_url string, local_worktree_path string) {
// Note: after clone_or_pull, the current repo branch is === HEAD === master
if os.is_dir(local_worktree_path) && os.is_dir(os.join_path_single(local_worktree_path, '.git')) {
// Already existing ... Just pulling in this case is faster usually.
- scripting.run('git -C "$local_worktree_path" checkout --quiet master')
- scripting.run('git -C "$local_worktree_path" pull --quiet ')
+ scripting.run('git -C "${local_worktree_path}" checkout --quiet master')
+ scripting.run('git -C "${local_worktree_path}" pull --quiet ')
} else {
// Clone a fresh
- scripting.run('git clone --quiet "$remote_git_url" "$local_worktree_path" ')
+ scripting.run('git clone --quiet "${remote_git_url}" "${local_worktree_path}" ')
}
}
@@ -111,17 +111,17 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
mut command_for_building_v_from_c_source := ''
mut command_for_selfbuilding := ''
if 'windows' == os.user_os() {
- command_for_building_v_from_c_source = '$vgit_context.cc -std=c99 -I ./thirdparty/stdatomic/win -municode -w -o cv.exe "$vgit_context.path_vc/v_win.c" '
- command_for_selfbuilding = './cv.exe -o $vgit_context.vexename {SOURCE}'
+ command_for_building_v_from_c_source = '${vgit_context.cc} -std=c99 -I ./thirdparty/stdatomic/win -municode -w -o cv.exe "${vgit_context.path_vc}/v_win.c" '
+ command_for_selfbuilding = './cv.exe -o ${vgit_context.vexename} {SOURCE}'
} else {
- command_for_building_v_from_c_source = '$vgit_context.cc -std=gnu11 -I ./thirdparty/stdatomic/nix -w -o cv "$vgit_context.path_vc/v.c" -lm -lpthread'
- command_for_selfbuilding = './cv -o $vgit_context.vexename {SOURCE}'
+ command_for_building_v_from_c_source = '${vgit_context.cc} -std=gnu11 -I ./thirdparty/stdatomic/nix -w -o cv "${vgit_context.path_vc}/v.c" -lm -lpthread'
+ command_for_selfbuilding = './cv -o ${vgit_context.vexename} {SOURCE}'
}
scripting.chdir(vgit_context.workdir)
clone_or_pull(vgit_context.v_repo_url, vgit_context.path_v)
clone_or_pull(vgit_context.vc_repo_url, vgit_context.path_vc)
scripting.chdir(vgit_context.path_v)
- scripting.run('git checkout --quiet $vgit_context.commit_v')
+ scripting.run('git checkout --quiet ${vgit_context.commit_v}')
if os.is_dir(vgit_context.path_v) && os.exists(vgit_context.vexepath) {
// already compiled, so no need to compile v again
vgit_context.commit_v__hash = get_current_folder_commit_hash()
@@ -163,7 +163,7 @@ pub mut:
pub fn add_common_tool_options(mut context VGitOptions, mut fp flag.FlagParser) []string {
tdir := os.temp_dir()
- context.workdir = os.real_path(fp.string('workdir', `w`, context.workdir, 'A writable base folder. Default: $tdir'))
+ context.workdir = os.real_path(fp.string('workdir', `w`, context.workdir, 'A writable base folder. Default: ${tdir}'))
context.v_repo_url = fp.string('vrepo', 0, context.v_repo_url, 'The url of the V repository. You can clone it locally too. See also --vcrepo below.')
context.vc_repo_url = fp.string('vcrepo', 0, context.vc_repo_url, 'The url of the vc repository. You can clone it
${flag.space}beforehand, and then just give the local folder
@@ -187,7 +187,7 @@ ${flag.space}to script it/run it in a restrictive vps/docker.
context.vc_repo_url = os.real_path(context.vc_repo_url)
}
commits := fp.finalize() or {
- eprintln('Error: $err')
+ eprintln('Error: ${err}')
exit(1)
}
for commit in commits {
diff --git a/cmd/tools/modules/vhelp/vhelp.v b/cmd/tools/modules/vhelp/vhelp.v
index 347ba75821..d49b30edd1 100644
--- a/cmd/tools/modules/vhelp/vhelp.v
+++ b/cmd/tools/modules/vhelp/vhelp.v
@@ -7,7 +7,7 @@ pub fn show_topic(topic string) {
vroot := os.dir(vexe)
target_topic := os.join_path(vroot, 'cmd', 'v', 'help', '${topic}.txt')
content := os.read_file(target_topic) or {
- eprintln('Unknown topic: $topic')
+ eprintln('Unknown topic: ${topic}')
exit(1)
}
println(content)
diff --git a/cmd/tools/oldv.v b/cmd/tools/oldv.v
index 5eaef980a4..72ac4fb866 100644
--- a/cmd/tools/oldv.v
+++ b/cmd/tools/oldv.v
@@ -72,7 +72,7 @@ const cache_oldv_folder_vc = os.join_path(cache_oldv_folder, 'vc')
fn sync_cache() {
scripting.verbose_trace(@FN, 'start')
if !os.exists(cache_oldv_folder) {
- scripting.verbose_trace(@FN, 'creating $cache_oldv_folder')
+ scripting.verbose_trace(@FN, 'creating ${cache_oldv_folder}')
scripting.mkdir_all(cache_oldv_folder) or {
scripting.verbose_trace(@FN, '## failed.')
exit(1)
@@ -82,16 +82,16 @@ fn sync_cache() {
for reponame in ['v', 'vc'] {
repofolder := os.join_path(cache_oldv_folder, reponame)
if !os.exists(repofolder) {
- scripting.verbose_trace(@FN, 'cloning to $repofolder')
- scripting.exec('git clone --quiet https://github.com/vlang/$reponame $repofolder') or {
- scripting.verbose_trace(@FN, '## error during clone: $err')
+ scripting.verbose_trace(@FN, 'cloning to ${repofolder}')
+ scripting.exec('git clone --quiet https://github.com/vlang/${reponame} ${repofolder}') or {
+ scripting.verbose_trace(@FN, '## error during clone: ${err}')
exit(1)
}
}
scripting.chdir(repofolder)
scripting.exec('git pull --quiet') or {
- scripting.verbose_trace(@FN, 'pulling to $repofolder')
- scripting.verbose_trace(@FN, '## error during pull: $err')
+ scripting.verbose_trace(@FN, 'pulling to ${repofolder}')
+ scripting.verbose_trace(@FN, '## error during pull: ${err}')
exit(1)
}
}
@@ -150,11 +150,11 @@ fn main() {
} else {
context.commit_v = scripting.run('git rev-list -n1 HEAD')
}
- scripting.cprintln('################# context.commit_v: $context.commit_v #####################')
+ scripting.cprintln('################# context.commit_v: ${context.commit_v} #####################')
context.path_v = vgit.normalized_workpath_for_commit(context.vgo.workdir, context.commit_v)
context.path_vc = vgit.normalized_workpath_for_commit(context.vgo.workdir, 'vc')
if !os.is_dir(context.vgo.workdir) {
- eprintln('Work folder: $context.vgo.workdir , does not exist.')
+ eprintln('Work folder: ${context.vgo.workdir} , does not exist.')
exit(2)
}
ecc := os.getenv('CC')
@@ -168,7 +168,7 @@ fn main() {
context.compile_oldv_if_needed()
scripting.chdir(context.path_v)
shorter_hash := context.commit_v_hash[0..10]
- scripting.cprintln('# v commit hash: $shorter_hash | folder: $context.path_v')
+ scripting.cprintln('# v commit hash: ${shorter_hash} | folder: ${context.path_v}')
if context.cmd_to_run.len > 0 {
scripting.cprintln_strong('# command: ${context.cmd_to_run:-34s}')
cmdres := os.execute_or_exit(context.cmd_to_run)
diff --git a/cmd/tools/performance_compare.v b/cmd/tools/performance_compare.v
index 183b8a3c2f..cec4f78b40 100644
--- a/cmd/tools/performance_compare.v
+++ b/cmd/tools/performance_compare.v
@@ -37,10 +37,10 @@ fn (c Context) compare_versions() {
// Input is validated at this point...
// Cleanup artifacts from previous runs of this tool:
scripting.chdir(c.vgo.workdir)
- scripting.run('rm -rf "$c.a" "$c.b" "$c.vc" ')
+ scripting.run('rm -rf "${c.a}" "${c.b}" "${c.vc}" ')
// clone the VC source *just once per comparison*, and reuse it:
- scripting.run('git clone --quiet "$c.vgo.vc_repo_url" "$c.vc" ')
- println('Comparing V performance of commit $c.commit_before (before) vs commit $c.commit_after (after) ...')
+ scripting.run('git clone --quiet "${c.vgo.vc_repo_url}" "${c.vc}" ')
+ println('Comparing V performance of commit ${c.commit_before} (before) vs commit ${c.commit_after} (after) ...')
c.prepare_v(c.b, c.commit_before)
c.prepare_v(c.a, c.commit_after)
scripting.chdir(c.vgo.workdir)
@@ -72,7 +72,7 @@ fn (c Context) compare_versions() {
])
println('All performance files:')
for f in perf_files {
- println(' $f')
+ println(' ${f}')
}
}
@@ -92,11 +92,11 @@ fn (c &Context) prepare_v(cdir string, commit string) {
}
vgit_context.compile_oldv_if_needed()
scripting.chdir(cdir)
- println('Making a v compiler in $cdir')
- scripting.run('./v -cc $cc -o v $vgit_context.vvlocation')
- println('Making a vprod compiler in $cdir')
- scripting.run('./v -cc $cc -prod -o vprod $vgit_context.vvlocation')
- println('Stripping and compressing cv v and vprod binaries in $cdir')
+ println('Making a v compiler in ${cdir}')
+ scripting.run('./v -cc ${cc} -o v ${vgit_context.vvlocation}')
+ println('Making a vprod compiler in ${cdir}')
+ scripting.run('./v -cc ${cc} -prod -o vprod ${vgit_context.vvlocation}')
+ println('Stripping and compressing cv v and vprod binaries in ${cdir}')
scripting.run('cp cv cv_stripped')
scripting.run('cp v v_stripped')
scripting.run('cp vprod vprod_stripped')
@@ -107,13 +107,13 @@ fn (c &Context) prepare_v(cdir string, commit string) {
scripting.run('upx -qqq --lzma cv_stripped_upxed')
scripting.run('upx -qqq --lzma v_stripped_upxed')
scripting.run('upx -qqq --lzma vprod_stripped_upxed')
- scripting.show_sizes_of_files(['$cdir/cv', '$cdir/cv_stripped', '$cdir/cv_stripped_upxed'])
- scripting.show_sizes_of_files(['$cdir/v', '$cdir/v_stripped', '$cdir/v_stripped_upxed'])
- scripting.show_sizes_of_files(['$cdir/vprod', '$cdir/vprod_stripped',
- '$cdir/vprod_stripped_upxed'])
- vversion := scripting.run('$cdir/v -version')
+ scripting.show_sizes_of_files(['${cdir}/cv', '${cdir}/cv_stripped', '${cdir}/cv_stripped_upxed'])
+ scripting.show_sizes_of_files(['${cdir}/v', '${cdir}/v_stripped', '${cdir}/v_stripped_upxed'])
+ scripting.show_sizes_of_files(['${cdir}/vprod', '${cdir}/vprod_stripped',
+ '${cdir}/vprod_stripped_upxed'])
+ vversion := scripting.run('${cdir}/v -version')
vcommit := scripting.run('git rev-parse --short --verify HEAD')
- println('V version is: $vversion , local source commit: $vcommit')
+ println('V version is: ${vversion} , local source commit: ${vcommit}')
if vgit_context.vvlocation == 'cmd/v' {
if os.exists('vlib/v/ast/ast.v') {
println('Source lines of the compiler: ' +
@@ -132,21 +132,21 @@ fn (c &Context) prepare_v(cdir string, commit string) {
fn (c Context) compare_v_performance(label string, commands []string) string {
println('---------------------------------------------------------------------------------')
- println('Compare v performance when doing the following commands ($label):')
+ println('Compare v performance when doing the following commands (${label}):')
mut source_location_a := ''
mut source_location_b := ''
- if os.exists('$c.a/cmd/v') {
+ if os.exists('${c.a}/cmd/v') {
source_location_a = 'cmd/v'
} else {
- source_location_a = if os.exists('$c.a/v.v') { 'v.v ' } else { 'compiler/ ' }
+ source_location_a = if os.exists('${c.a}/v.v') { 'v.v ' } else { 'compiler/ ' }
}
- if os.exists('$c.b/cmd/v') {
+ if os.exists('${c.b}/cmd/v') {
source_location_b = 'cmd/v'
} else {
- source_location_b = if os.exists('$c.b/v.v') { 'v.v ' } else { 'compiler/ ' }
+ source_location_b = if os.exists('${c.b}/v.v') { 'v.v ' } else { 'compiler/ ' }
}
- timestamp_a, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.a/ ; git rev-list -n1 --timestamp HEAD'))
- timestamp_b, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.b/ ; git rev-list -n1 --timestamp HEAD'))
+ timestamp_a, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd ${c.a}/ ; git rev-list -n1 --timestamp HEAD'))
+ timestamp_b, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd ${c.b}/ ; git rev-list -n1 --timestamp HEAD'))
debug_option_a := if timestamp_a > 1570877641 { '-cg ' } else { '-debug ' }
debug_option_b := if timestamp_b > 1570877641 { '-cg ' } else { '-debug ' }
mut hyperfine_commands_arguments := []string{}
@@ -154,7 +154,7 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
println(cmd)
}
for cmd in commands {
- hyperfine_commands_arguments << ' \'cd ${c.b:-34s} ; ./$cmd \' '.replace_each([
+ hyperfine_commands_arguments << ' \'cd ${c.b:-34s} ; ./${cmd} \' '.replace_each([
'@COMPILER@',
source_location_b,
'@DEBUG@',
@@ -162,7 +162,7 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
])
}
for cmd in commands {
- hyperfine_commands_arguments << ' \'cd ${c.a:-34s} ; ./$cmd \' '.replace_each([
+ hyperfine_commands_arguments << ' \'cd ${c.a:-34s} ; ./${cmd} \' '.replace_each([
'@COMPILER@',
source_location_a,
'@DEBUG@',
@@ -171,15 +171,15 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
}
// /////////////////////////////////////////////////////////////////////////////
cmd_stats_file := os.real_path([c.vgo.workdir, 'v_performance_stats_${label}.json'].join(os.path_separator))
- comparison_cmd := 'hyperfine $c.hyperfineopts ' + '--export-json $cmd_stats_file ' +
- '--time-unit millisecond ' + '--style full --warmup $c.warmups ' +
+ comparison_cmd := 'hyperfine ${c.hyperfineopts} ' + '--export-json ${cmd_stats_file} ' +
+ '--time-unit millisecond ' + '--style full --warmup ${c.warmups} ' +
hyperfine_commands_arguments.join(' ')
// /////////////////////////////////////////////////////////////////////////////
if c.vgo.verbose {
println(comparison_cmd)
}
os.system(comparison_cmd)
- println('The detailed performance comparison report was saved to: $cmd_stats_file .')
+ println('The detailed performance comparison report was saved to: ${cmd_stats_file} .')
println('')
return cmd_stats_file
}
@@ -198,7 +198,7 @@ fn main() {
context.vflags = fp.string('vflags', 0, '', 'Additional options to pass to the v commands, for example "-cc tcc"')
context.hyperfineopts = fp.string('hyperfine_options', 0, '', 'Additional options passed to hyperfine.
${flag.space}For example on linux, you may want to pass:
-$flag.space--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'"
+${flag.space}--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'"
')
commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
context.commit_before = commits[0]
diff --git a/cmd/tools/regress.v b/cmd/tools/regress.v
index 8c2babf2c7..0726fadaba 100644
--- a/cmd/tools/regress.v
+++ b/cmd/tools/regress.v
@@ -56,24 +56,24 @@ fn main() {
}
if !os.exists(oldvexe) {
if 0 != execute('${os.quoted_path(vexe)} -o ${os.quoted_path(oldvexe)} ${os.quoted_path(oldv_source)}') {
- panic('can not compile $oldvexe')
+ panic('can not compile ${oldvexe}')
}
}
os.execute('git checkout master')
os.execute('git bisect reset')
- os.execute('git checkout $context.new_commit')
+ os.execute('git checkout ${context.new_commit}')
os.execute('git bisect start')
os.execute('git bisect new')
- os.execute('git checkout $context.old_commit')
+ os.execute('git checkout ${context.old_commit}')
os.execute('git bisect old')
println(term.colorize(term.bright_yellow, term.header('', '-')))
- execute('git bisect run ${os.quoted_path(oldvexe)} --bisect -c "$context.command"')
+ execute('git bisect run ${os.quoted_path(oldvexe)} --bisect -c "${context.command}"')
println(term.colorize(term.bright_yellow, term.header('', '-')))
os.execute('git bisect reset')
os.execute('git checkout master')
}
fn execute(cmd string) int {
- eprintln('### $cmd')
+ eprintln('### ${cmd}')
return os.system(cmd)
}
diff --git a/cmd/tools/repeat.v b/cmd/tools/repeat.v
index 6e0571a2dd..e22f6206f3 100644
--- a/cmd/tools/repeat.v
+++ b/cmd/tools/repeat.v
@@ -183,7 +183,7 @@ fn (mut context Context) parse_options() ! {
scripting.set_verbose(true)
}
commands := fp.finalize() or {
- eprintln('Error: $err')
+ eprintln('Error: ${err}')
exit(1)
}
context.commands = context.expand_all_commands(commands)
@@ -249,7 +249,7 @@ fn (mut context Context) run() {
mut duration := 0
mut sum := 0
mut oldres := ''
- println('Series: ${si:4}/${context.series:-4}, command: $cmd')
+ println('Series: ${si:4}/${context.series:-4}, command: ${cmd}')
if context.warmup > 0 && run_warmups < context.commands.len {
for i in 1 .. context.warmup + 1 {
flushed_print('${context.cgoback}warming up run: ${i:4}/${context.warmup:-4} for ${cmd:-50s} took ${duration:6} ms ...')
@@ -273,7 +273,7 @@ fn (mut context Context) run() {
res := scripting.exec(cmd) or { continue }
duration = int(sw.elapsed().milliseconds())
if res.exit_code != 0 {
- eprintln('${i:10} non 0 exit code for cmd: $cmd')
+ eprintln('${i:10} non 0 exit code for cmd: ${cmd}')
continue
}
trimed_output := res.output.trim_right('\r\n')
@@ -308,7 +308,7 @@ fn (mut context Context) run() {
for k, v in m {
// show a temporary summary for the current series/cmd cycle
s := new_aints(v, context.nmins, context.nmaxs)
- println(' $k: $s')
+ println(' ${k}: ${s}')
summary[k] = s
}
// merge current raw results to the previous ones
@@ -346,7 +346,7 @@ fn (mut context Context) show_diff_summary() {
}
return 0
})
- println('Summary (commands are ordered by ascending mean time), after $context.series series of $context.count repetitions:')
+ println('Summary (commands are ordered by ascending mean time), after ${context.series} series of ${context.count} repetitions:')
base := context.results[0].atiming.average
mut first_cmd_percentage := f64(100.0)
mut first_marker := ''
@@ -357,14 +357,14 @@ fn (mut context Context) show_diff_summary() {
first_marker = bold('>')
first_cmd_percentage = cpercent
}
- println(' $first_marker${(i + 1):3} | ${cpercent:5.1f}% slower | ${r.cmd:-57s} | $r.atiming')
+ println(' ${first_marker}${(i + 1):3} | ${cpercent:5.1f}% slower | ${r.cmd:-57s} | ${r.atiming}')
}
$if debugcontext ? {
- println('context: $context')
+ println('context: ${context}')
}
if int(base) > context.fail_on_maxtime {
flushed_print(performance_regression_label)
- println('average time: ${base:6.1f} ms > $context.fail_on_maxtime ms threshold.')
+ println('average time: ${base:6.1f} ms > ${context.fail_on_maxtime} ms threshold.')
exit(2)
}
if context.fail_on_regress_percent == max_fail_percent || context.results.len < 2 {
diff --git a/cmd/tools/report_v_module_folders_without_tests.v b/cmd/tools/report_v_module_folders_without_tests.v
index b3036ec433..f9a8e5edbb 100644
--- a/cmd/tools/report_v_module_folders_without_tests.v
+++ b/cmd/tools/report_v_module_folders_without_tests.v
@@ -33,7 +33,7 @@ fn main() {
known_skip_patterns = known_skip_patterns_env.split(',').filter(it != '')
}
for path in places {
- eprintln('> Checking folder: `$path` ...')
+ eprintln('> Checking folder: `${path}` ...')
mut found := 0
files := os.walk_ext(path, '.v')
mut v_files := map[string]int{}
@@ -54,15 +54,15 @@ fn main() {
for folder, n_v_files in v_files {
n_test_v_files := v_test_files[folder]
if n_v_files > 1 && n_test_v_files == 0 {
- println('> ${n_test_v_files:5} _test.v files, with ${n_v_files:5} .v files, in folder: $folder')
+ println('> ${n_test_v_files:5} _test.v files, with ${n_v_files:5} .v files, in folder: ${folder}')
compilation := os.execute('${os.quoted_path(vexe)} -shared -W -Wfatal-errors -check ${os.quoted_path(folder)}')
if compilation.exit_code != 0 {
- eprintln('> $folder has parser/checker errors!')
+ eprintln('> ${folder} has parser/checker errors!')
eprintln(compilation.output)
}
found++
}
}
- eprintln('> Found $found module folders without _test.v files in `$path` .')
+ eprintln('> Found ${found} module folders without _test.v files in `${path}` .')
}
}
diff --git a/cmd/tools/test_if_v_test_system_works.v b/cmd/tools/test_if_v_test_system_works.v
index 54cf8d00a5..d58db904f5 100644
--- a/cmd/tools/test_if_v_test_system_works.v
+++ b/cmd/tools/test_if_v_test_system_works.v
@@ -17,7 +17,7 @@ fn get_vexe_path() string {
return env_vexe
}
me := os.executable()
- eprintln('me: $me')
+ eprintln('me: ${me}')
mut vexe_ := os.join_path(os.dir(os.dir(os.dir(me))), 'v')
if os.user_os() == 'windows' {
vexe_ += '.exe'
@@ -34,7 +34,7 @@ fn new_tdir() string {
}
fn cleanup_tdir() {
- println('... removing tdir: $tdir')
+ println('... removing tdir: ${tdir}')
os.rmdir_all(tdir) or { eprintln(err) }
}
@@ -42,20 +42,20 @@ type MyResult = string
[noreturn]
fn (result MyResult) fail(reason string) {
- eprintln('> $reason, but it does not. Result:\n$result')
+ eprintln('> ${reason}, but it does not. Result:\n${result}')
exit(1)
}
fn (result MyResult) has(sub string) MyResult {
if !result.contains(sub) {
- result.fail(' result should have the substring `$sub`')
+ result.fail(' result should have the substring `${sub}`')
}
return result
}
fn (result MyResult) matches(gpattern string) MyResult {
if !result.match_glob(gpattern) {
- result.fail('result should match the glob pattern `$gpattern`')
+ result.fail('result should match the glob pattern `${gpattern}`')
}
return result
}
@@ -63,14 +63,14 @@ fn (result MyResult) matches(gpattern string) MyResult {
fn create_test(tname string, tcontent string) !string {
tpath := os.join_path(tdir, tname)
os.write_file(tpath, tcontent)!
- eprintln('>>>>>>>> tpath: $tpath | tcontent: $tcontent')
+ eprintln('>>>>>>>> tpath: ${tpath} | tcontent: ${tcontent}')
return os.quoted_path(tpath)
}
fn check_assert_continues_works() ! {
os.chdir(tdir)!
create_test('assert_continues_option_works_test.v', 'fn test_fail1() { assert 2==4\nassert 2==1\nassert 2==0 }\nfn test_ok(){ assert true }\nfn test_fail2() { assert false }')!
- result := check_fail('$vexe -assert continues assert_continues_option_works_test.v')
+ result := check_fail('${vexe} -assert continues assert_continues_option_works_test.v')
result.has('assert_continues_option_works_test.v:1: fn test_fail1')
result.has('assert_continues_option_works_test.v:2: fn test_fail1')
result.has('assert_continues_option_works_test.v:3: fn test_fail1')
@@ -78,7 +78,7 @@ fn check_assert_continues_works() ! {
result.has('> assert 2 == 4').has('> assert 2 == 1').has('> assert 2 == 0')
// Check if a test function, tagged with [assert_continues], has the same behaviour, without needing additional options
create_test('assert_continues_tag_works_test.v', '[assert_continues]fn test_fail1() { assert 2==4\nassert 2==1\nassert 2==0 }\nfn test_ok(){ assert true }\nfn test_fail2() { assert false\n assert false }')!
- tag_res := check_fail('$vexe assert_continues_tag_works_test.v')
+ tag_res := check_fail('${vexe} assert_continues_tag_works_test.v')
tag_res.has('assert_continues_tag_works_test.v:1: fn test_fail1')
tag_res.has('assert_continues_tag_works_test.v:2: fn test_fail1')
tag_res.has('assert_continues_tag_works_test.v:3: fn test_fail1')
@@ -89,20 +89,20 @@ fn check_assert_continues_works() ! {
}
fn check_ok(cmd string) MyResult {
- println('> check_ok cmd: $cmd')
+ println('> check_ok cmd: ${cmd}')
res := os.execute(cmd)
if res.exit_code != 0 {
- eprintln('> check_ok failed.\n$res.output')
+ eprintln('> check_ok failed.\n${res.output}')
exit(1)
}
return res.output
}
fn check_fail(cmd string) MyResult {
- println('> check_fail cmd: $cmd')
+ println('> check_fail cmd: ${cmd}')
res := os.execute(cmd)
if res.exit_code == 0 {
- eprintln('> check_fail succeeded, but it should have failed.\n$res.output')
+ eprintln('> check_fail succeeded, but it should have failed.\n${res.output}')
exit(1)
}
return res.output
@@ -112,23 +112,23 @@ fn main() {
defer {
os.chdir(os.wd_at_startup) or {}
}
- println('> vroot: $vroot | vexe: $vexe | tdir: $tdir')
+ println('> vroot: ${vroot} | vexe: ${vexe} | tdir: ${tdir}')
ok_fpath := create_test('a_single_ok_test.v', 'fn test_ok(){ assert true }')!
- if check_ok('$vexe $ok_fpath') != '' {
+ if check_ok('${vexe} ${ok_fpath}') != '' {
exit(1)
}
- check_ok('$vexe test $ok_fpath').matches('*OK*a_single_ok_test.v*')
- check_ok('$vexe test "$tdir"').matches('*OK*a_single_ok_test.v*')
+ check_ok('${vexe} test ${ok_fpath}').matches('*OK*a_single_ok_test.v*')
+ check_ok('${vexe} test "${tdir}"').matches('*OK*a_single_ok_test.v*')
//
fail_fpath := create_test('a_single_failing_test.v', 'fn test_fail(){ assert 1 == 2 }')!
- check_fail('$vexe $fail_fpath').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
- check_fail('$vexe test $fail_fpath').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
- check_fail('$vexe test "$tdir"').has('> assert 1 == 2')
+ check_fail('${vexe} ${fail_fpath}').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
+ check_fail('${vexe} test ${fail_fpath}').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
+ check_fail('${vexe} test "${tdir}"').has('> assert 1 == 2')
rel_dir := os.join_path(tdir, rand.ulid())
os.mkdir(rel_dir)!
os.chdir(rel_dir)!
relative_path := '..' + os.path_separator + 'a_single_ok_test.v'
- check_ok('$vexe test ${os.quoted_path(relative_path)}').has('OK').has('a_single_ok_test.v')
+ check_ok('${vexe} test ${os.quoted_path(relative_path)}').has('OK').has('a_single_ok_test.v')
//
check_assert_continues_works()!
println('> all done')
diff --git a/cmd/tools/test_os_process.v b/cmd/tools/test_os_process.v
index 40000b8002..90fa64f213 100644
--- a/cmd/tools/test_os_process.v
+++ b/cmd/tools/test_os_process.v
@@ -35,10 +35,10 @@ fn (mut ctx Context) println(s string) {
ctx.omode = if ctx.omode == .stderr { Target.stdout } else { Target.stderr }
}
if ctx.target in [.both, .stdout] || ctx.omode == .stdout {
- println('stdout, $s')
+ println('stdout, ${s}')
}
if ctx.target in [.both, .stderr] || ctx.omode == .stderr {
- eprintln('stderr, $s')
+ eprintln('stderr, ${s}')
}
}
@@ -71,11 +71,11 @@ fn main() {
ctx.omode = .stdout
}
if ctx.is_verbose {
- eprintln('> args: $args | context: $ctx')
+ eprintln('> args: ${args} | context: ${ctx}')
}
spawn do_timeout(&ctx)
for i := 1; true; i++ {
- ctx.println('$i')
+ ctx.println('${i}')
time.sleep(ctx.period_ms * time.millisecond)
}
time.sleep(100 * time.second)
diff --git a/cmd/tools/translate.v b/cmd/tools/translate.v
index ad30f80b6a..bafb27c525 100644
--- a/cmd/tools/translate.v
+++ b/cmd/tools/translate.v
@@ -17,7 +17,7 @@ fn main() {
// Git clone c2v
if !os.exists(c2v_dir) {
os.mkdir_all(vmodules)!
- println('C2V is not installed. Cloning C2V to $c2v_dir ...')
+ println('C2V is not installed. Cloning C2V to ${c2v_dir} ...')
os.chdir(vmodules)!
res := os.execute('git clone https://github.com/vlang/c2v')
if res.exit_code != 0 {
@@ -43,10 +43,10 @@ fn main() {
passed_args := util.args_quote_paths(os.args[2..])
// println(passed_args)
os.chdir(os.wd_at_startup)!
- c2v_cmd := '${os.quoted_path(c2v_bin)} $passed_args'
+ c2v_cmd := '${os.quoted_path(c2v_bin)} ${passed_args}'
res := os.system(c2v_cmd)
if res != 0 {
- eprintln('C2V command: $c2v_cmd')
+ eprintln('C2V command: ${c2v_cmd}')
eprintln('C2V failed to translate the C files. Please report it via GitHub.')
exit(4)
}
diff --git a/cmd/tools/vast/vast.v b/cmd/tools/vast/vast.v
index 58e6a739ae..49375294ee 100644
--- a/cmd/tools/vast/vast.v
+++ b/cmd/tools/vast/vast.v
@@ -60,7 +60,7 @@ fn (ctx Context) write_file_or_print(file string) {
if ctx.is_print {
println(json(file))
} else {
- println('$time.now(): AST written to: ' + json_file(file))
+ println('${time.now()}: AST written to: ' + json_file(file))
}
}
@@ -74,7 +74,7 @@ fn (ctx Context) watch_for_changes(file string) {
ctx.write_file_or_print(file)
if ctx.is_compile {
file_name := file[0..(file.len - os.file_ext(file).len)]
- os.system('v -o ${file_name}.c $file')
+ os.system('v -o ${file_name}.c ${file}')
}
}
timestamp = new_timestamp
@@ -96,11 +96,11 @@ fn get_abs_path(path string) string {
// check file is v file and exists
fn check_file(file string) {
if os.file_ext(file) !in ['.v', '.vv', '.vsh'] {
- eprintln('the file `$file` must be a v file or vsh file')
+ eprintln('the file `${file}` must be a v file or vsh file')
exit(1)
}
if !os.exists(file) {
- eprintln('the v file `$file` does not exist')
+ eprintln('the v file `${file}` does not exist')
exit(1)
}
}
@@ -224,12 +224,12 @@ fn (t Tree) type_node(typ ast.Type) &Node {
// token type node
fn (t Tree) token_node(tok_kind token.Kind) &Node {
- return t.string_node('token:${int(tok_kind)}($tok_kind.str())')
+ return t.string_node('token:${int(tok_kind)}(${tok_kind.str()})')
}
// enum type node
fn (t Tree) enum_node(value T) &Node {
- return t.string_node('enum:${int(value)}($value)')
+ return t.string_node('enum:${int(value)}(${value})')
}
// for [][]comment
diff --git a/cmd/tools/vbin2v.v b/cmd/tools/vbin2v.v
index 3c4afbd9e5..7a05ca27b6 100644
--- a/cmd/tools/vbin2v.v
+++ b/cmd/tools/vbin2v.v
@@ -20,22 +20,22 @@ mut:
fn (context Context) header() string {
mut header_s := ''
- header_s += 'module $context.module_name\n'
+ header_s += 'module ${context.module_name}\n'
header_s += '\n'
allfiles := context.files.join(' ')
mut options := []string{}
if context.prefix.len > 0 {
- options << '-p $context.prefix'
+ options << '-p ${context.prefix}'
}
if context.module_name.len > 0 {
- options << '-m $context.module_name'
+ options << '-m ${context.module_name}'
}
if context.write_file.len > 0 {
- options << '-w $context.write_file'
+ options << '-w ${context.write_file}'
}
soptions := options.join(' ')
header_s += '// File generated by:\n'
- header_s += '// v bin2v $allfiles $soptions\n'
+ header_s += '// v bin2v ${allfiles} ${soptions}\n'
header_s += '// Please, do not edit this file.\n'
header_s += '// Your changes may be overwritten.\n'
header_s += 'const (\n'
@@ -49,9 +49,9 @@ fn (context Context) footer() string {
fn (context Context) file2v(bname string, fbytes []u8, bn_max int) string {
mut sb := strings.new_builder(1000)
bn_diff_len := bn_max - bname.len
- sb.write_string('\t${bname}_len' + ' '.repeat(bn_diff_len - 4) + ' = $fbytes.len\n')
+ sb.write_string('\t${bname}_len' + ' '.repeat(bn_diff_len - 4) + ' = ${fbytes.len}\n')
fbyte := fbytes[0]
- bnmae_line := '\t$bname' + ' '.repeat(bn_diff_len) + ' = [u8($fbyte), '
+ bnmae_line := '\t${bname}' + ' '.repeat(bn_diff_len) + ' = [u8(${fbyte}), '
sb.write_string(bnmae_line)
mut line_len := bnmae_line.len + 3
for i := 1; i < fbytes.len; i++ {
@@ -65,7 +65,7 @@ fn (context Context) file2v(bname string, fbytes []u8, bn_max int) string {
sb.write_string(b)
line_len += b.len
} else {
- sb.write_string('$b, ')
+ sb.write_string('${b}, ')
line_len += b.len + 2
}
}
@@ -76,8 +76,8 @@ fn (context Context) file2v(bname string, fbytes []u8, bn_max int) string {
fn (context Context) bname_and_bytes(file string) !(string, []u8) {
fname := os.file_name(file)
fname_escaped := fname.replace_each(['.', '_', '-', '_'])
- byte_name := '$context.prefix$fname_escaped'.to_lower()
- fbytes := os.read_bytes(file) or { return error('Error: $err.msg()') }
+ byte_name := '${context.prefix}${fname_escaped}'.to_lower()
+ fbytes := os.read_bytes(file) or { return error('Error: ${err.msg()}') }
return byte_name, fbytes
}
@@ -108,7 +108,7 @@ fn main() {
exit(0)
}
files := fp.finalize() or {
- eprintln('Error: $err.msg()')
+ eprintln('Error: ${err.msg()}')
exit(1)
}
real_files := files.filter(it != 'bin2v')
diff --git a/cmd/tools/vbug.v b/cmd/tools/vbug.v
index f5cd708452..8adec5973b 100644
--- a/cmd/tools/vbug.v
+++ b/cmd/tools/vbug.v
@@ -8,9 +8,9 @@ const vroot = @VMODROOT
fn get_vdoctor_output(is_verbose bool) string {
vexe := os.getenv('VEXE')
verbose_flag := if is_verbose { '-v' } else { '' }
- result := os.execute('${os.quoted_path(vexe)} $verbose_flag doctor')
+ result := os.execute('${os.quoted_path(vexe)} ${verbose_flag} doctor')
if result.exit_code != 0 {
- eprintln('unable to get `v doctor` output: $result.output')
+ eprintln('unable to get `v doctor` output: ${result.output}')
return ''
}
return result.output
@@ -23,21 +23,21 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
wd := os.getwd()
os.chdir(vroot) or {}
verbose_flag := if is_verbose { '-v' } else { '' }
- vdbg_path := $if windows { '$vroot/vdbg.exe' } $else { '$vroot/vdbg' }
- vdbg_compilation_cmd := '${os.quoted_path(vexe)} $verbose_flag -g -o ${os.quoted_path(vdbg_path)} cmd/v'
+ vdbg_path := $if windows { '${vroot}/vdbg.exe' } $else { '${vroot}/vdbg' }
+ vdbg_compilation_cmd := '${os.quoted_path(vexe)} ${verbose_flag} -g -o ${os.quoted_path(vdbg_path)} cmd/v'
vdbg_result := os.execute(vdbg_compilation_cmd)
os.chdir(wd) or {}
if vdbg_result.exit_code == 0 {
vexe = vdbg_path
} else {
- eprintln('unable to compile V in debug mode: $vdbg_result.output\ncommand: $vdbg_compilation_cmd\n')
+ eprintln('unable to compile V in debug mode: ${vdbg_result.output}\ncommand: ${vdbg_compilation_cmd}\n')
}
//
- mut result := os.execute('${os.quoted_path(vexe)} $verbose_flag ${os.quoted_path(file_path)}')
+ mut result := os.execute('${os.quoted_path(vexe)} ${verbose_flag} ${os.quoted_path(file_path)}')
defer {
os.rm(vdbg_path) or {
if is_verbose {
- eprintln('unable to delete `vdbg`: $err')
+ eprintln('unable to delete `vdbg`: ${err}')
}
}
}
@@ -49,14 +49,14 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
}
os.rm(generated_file) or {
if is_verbose {
- eprintln('unable to delete generated file: $err')
+ eprintln('unable to delete generated file: ${err}')
}
}
}
run := is_yes
|| ask('It looks like the compilation went well, do you want to run the file?')
if run {
- result = os.execute('${os.quoted_path(vexe)} $verbose_flag run ${os.quoted_path(file_path)}')
+ result = os.execute('${os.quoted_path(vexe)} ${verbose_flag} run ${os.quoted_path(file_path)}')
if result.exit_code == 0 && !is_yes {
confirm_or_exit('It looks like the file ran correctly as well, are you sure you want to continue?')
}
@@ -66,7 +66,7 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
}
fn ask(msg string) bool {
- prompt := os.input_opt('$msg [Y/n] ') or { 'y' }
+ prompt := os.input_opt('${msg} [Y/n] ') or { 'y' }
return prompt == '' || prompt[0].ascii_str().to_lower() != 'n'
}
@@ -90,7 +90,7 @@ fn main() {
}
else {
if !arg.ends_with('.v') && !arg.ends_with('.vsh') && !arg.ends_with('.vv') {
- eprintln('unknown argument: `$arg`')
+ eprintln('unknown argument: `${arg}`')
exit(1)
}
if file_path != '' {
@@ -111,7 +111,7 @@ fn main() {
vdoctor_output := get_vdoctor_output(is_verbose)
// file content
file_content := os.read_file(file_path) or {
- eprintln('unable to get file "$file_path" content: $err')
+ eprintln('unable to get file "${file_path}" content: ${err}')
''
}
// output from `v -g -o vdbg cmd/v && vdbg file.v`
@@ -136,26 +136,26 @@ fn main() {
raw_body := '
**V doctor:**
```
-$vdoctor_output
+${vdoctor_output}
```
**What did you do?**
-`v -g -o vdbg cmd/v && vdbg $file_path`
+`v -g -o vdbg cmd/v && vdbg ${file_path}`
{file_content}
**What did you expect to see?**
-$expected_result
+${expected_result}
**What did you see instead?**
```
-$build_output```'
- mut encoded_body := urllib.query_escape(raw_body.replace_once('{file_content}', '```v\n$file_content\n```'))
- mut generated_uri := 'https://github.com/vlang/v/issues/new?labels=Bug&body=$encoded_body'
+${build_output}```'
+ mut encoded_body := urllib.query_escape(raw_body.replace_once('{file_content}', '```v\n${file_content}\n```'))
+ mut generated_uri := 'https://github.com/vlang/v/issues/new?labels=Bug&body=${encoded_body}'
if generated_uri.len > 8192 {
// GitHub doesn't support URLs longer than 8192 characters
- encoded_body = urllib.query_escape(raw_body.replace_once('{file_content}', 'See attached file `$file_path`'))
- generated_uri = 'https://github.com/vlang/v/issues/new?labels=Bug&body=$encoded_body'
+ encoded_body = urllib.query_escape(raw_body.replace_once('{file_content}', 'See attached file `${file_path}`'))
+ generated_uri = 'https://github.com/vlang/v/issues/new?labels=Bug&body=${encoded_body}'
println('Your file is too big to be submitted. Head over to the following URL and attach your file.')
println(generated_uri)
} else {
diff --git a/cmd/tools/vbuild-tools.v b/cmd/tools/vbuild-tools.v
index b0ee17e109..dce79ec7ed 100644
--- a/cmd/tools/vbuild-tools.v
+++ b/cmd/tools/vbuild-tools.v
@@ -26,8 +26,8 @@ fn main() {
os.chdir(vroot)!
folder := os.join_path('cmd', 'tools')
tfolder := os.join_path(vroot, 'cmd', 'tools')
- main_label := 'Building $folder ...'
- finish_label := 'building $folder'
+ main_label := 'Building ${folder} ...'
+ finish_label := 'building ${folder}'
//
mut skips := []string{}
for stool in tools_in_subfolders {
@@ -68,7 +68,7 @@ fn main() {
os.mv_by_cp(tpath, target_path) or {
emsg := err.msg()
if !emsg.contains('vbuild-tools') && !emsg.contains('vtest-all') {
- eprintln('error while moving $tpath to $target_path: $emsg')
+ eprintln('error while moving ${tpath} to ${target_path}: ${emsg}')
}
continue
}
diff --git a/cmd/tools/vbump.v b/cmd/tools/vbump.v
index f782fd235e..c47dc704ae 100644
--- a/cmd/tools/vbump.v
+++ b/cmd/tools/vbump.v
@@ -68,7 +68,7 @@ fn get_replacement_function(options Options) ReplacementFunction {
}
fn process_file(input_file string, options Options) {
- lines := os.read_lines(input_file) or { panic('Failed to read file: $input_file') }
+ lines := os.read_lines(input_file) or { panic('Failed to read file: ${input_file}') }
mut re := regex.regex_opt(semver_query) or { panic('Could not create a RegEx parser.') }
@@ -103,28 +103,28 @@ fn process_file(input_file string, options Options) {
os.rm(backup_file) or {}
// Rename the original to the backup.
- os.mv(input_file, backup_file) or { panic('Failed to copy file: $input_file') }
+ os.mv(input_file, backup_file) or { panic('Failed to copy file: ${input_file}') }
// Process the old file and write it back to the original.
os.write_file(input_file, new_lines.join_lines()) or {
- panic('Failed to write file: $input_file')
+ panic('Failed to write file: ${input_file}')
}
// Remove the backup file.
os.rm(backup_file) or {}
if replacement_complete {
- println('Bumped version in $input_file')
+ println('Bumped version in ${input_file}')
} else {
- println('No changes made in $input_file')
+ println('No changes made in ${input_file}')
}
}
fn main() {
if os.args.len < 2 {
- println('Usage: $tool_name [options] [file1 file2 ...]
-$tool_description
-Try $tool_name -h for more help...')
+ println('Usage: ${tool_name} [options] [file1 file2 ...]
+${tool_description}
+Try ${tool_name} -h for more help...')
exit(1)
}
@@ -162,7 +162,7 @@ Try $tool_name -h for more help...')
for input_file in files {
if !os.exists(input_file) {
- println('File not found: $input_file')
+ println('File not found: ${input_file}')
exit(1)
}
process_file(input_file, options)
diff --git a/cmd/tools/vcheck-md.v b/cmd/tools/vcheck-md.v
index f4099fb023..685cf967d3 100644
--- a/cmd/tools/vcheck-md.v
+++ b/cmd/tools/vcheck-md.v
@@ -21,7 +21,7 @@ const (
show_progress = os.getenv('GITHUB_JOB') == '' && '-silent' !in os.args
non_option_args = cmdline.only_non_options(os.args[2..])
is_verbose = os.getenv('VERBOSE') != ''
- vcheckfolder = os.join_path(os.vtmp_dir(), 'v', 'vcheck_$os.getuid()')
+ vcheckfolder = os.join_path(os.vtmp_dir(), 'v', 'vcheck_${os.getuid()}')
should_autofix = os.getenv('VAUTOFIX') != ''
vexe = @VEXE
)
@@ -71,7 +71,7 @@ fn main() {
}
real_path := os.real_path(file_path)
lines := os.read_lines(real_path) or {
- println('"$file_path" does not exist')
+ println('"${file_path}" does not exist')
res.warnings++
continue
}
@@ -85,7 +85,7 @@ fn main() {
clear_previous_line()
}
if res.warnings > 0 || res.errors > 0 || res.oks > 0 {
- println('\nWarnings: $res.warnings | Errors: $res.errors | OKs: $res.oks')
+ println('\nWarnings: ${res.warnings} | Errors: ${res.errors} | OKs: ${res.oks}')
}
if res.errors > 0 {
exit(1)
@@ -130,12 +130,12 @@ fn rtext(s string) string {
}
fn wline(file_path string, lnumber int, column int, message string) string {
- return btext('$file_path:${lnumber + 1}:${column + 1}:') + btext(mtext(' warn:')) +
- rtext(' $message')
+ return btext('${file_path}:${lnumber + 1}:${column + 1}:') + btext(mtext(' warn:')) +
+ rtext(' ${message}')
}
fn eline(file_path string, lnumber int, column int, message string) string {
- return btext('$file_path:${lnumber + 1}:${column + 1}:') + btext(rtext(' error: $message'))
+ return btext('${file_path}:${lnumber + 1}:${column + 1}:') + btext(rtext(' error: ${message}'))
}
const default_command = 'compile'
@@ -166,7 +166,7 @@ mut:
fn (mut f MDFile) progress(message string) {
if show_progress {
clear_previous_line()
- println('File: ${f.path:-30s}, Lines: ${f.lines.len:5}, $message')
+ println('File: ${f.path:-30s}, Lines: ${f.lines.len:5}, ${message}')
}
}
@@ -177,30 +177,30 @@ fn (mut f MDFile) check() CheckResult {
// f.progress('line: $j')
if f.state == .vexample {
if line.len > too_long_line_length_example {
- wprintln(wline(f.path, j, line.len, 'example lines must be less than $too_long_line_length_example characters'))
+ wprintln(wline(f.path, j, line.len, 'example lines must be less than ${too_long_line_length_example} characters'))
wprintln(line)
res.warnings++
}
} else if f.state == .codeblock {
if line.len > too_long_line_length_codeblock {
- wprintln(wline(f.path, j, line.len, 'code lines must be less than $too_long_line_length_codeblock characters'))
+ wprintln(wline(f.path, j, line.len, 'code lines must be less than ${too_long_line_length_codeblock} characters'))
wprintln(line)
res.warnings++
}
} else if line.starts_with('|') {
if line.len > too_long_line_length_table {
- wprintln(wline(f.path, j, line.len, 'table lines must be less than $too_long_line_length_table characters'))
+ wprintln(wline(f.path, j, line.len, 'table lines must be less than ${too_long_line_length_table} characters'))
wprintln(line)
res.warnings++
}
} else if line.contains('http') {
if line.all_after('https').len > too_long_line_length_link {
- wprintln(wline(f.path, j, line.len, 'link lines must be less than $too_long_line_length_link characters'))
+ wprintln(wline(f.path, j, line.len, 'link lines must be less than ${too_long_line_length_link} characters'))
wprintln(line)
res.warnings++
}
} else if line.len > too_long_line_length_other {
- eprintln(eline(f.path, j, line.len, 'must be less than $too_long_line_length_other characters'))
+ eprintln(eline(f.path, j, line.len, 'must be less than ${too_long_line_length_other} characters'))
eprintln(line)
res.errors++
}
@@ -224,7 +224,7 @@ fn (mut f MDFile) parse_line(lnumber int, line string) {
if command == '' {
command = default_command
} else if command == 'nofmt' {
- command += ' $default_command'
+ command += ' ${default_command}'
}
f.current = VCodeExample{
sline: lnumber
@@ -331,14 +331,14 @@ fn (mut ad AnchorData) check_link_target_match(fpath string, mut res CheckResult
found_error_warning = true
res.errors++
for anchordata in ad.anchors[link] {
- eprintln(eline(fpath, anchordata.line, 0, 'multiple link targets of existing link (#$link)'))
+ eprintln(eline(fpath, anchordata.line, 0, 'multiple link targets of existing link (#${link})'))
}
}
} else {
found_error_warning = true
res.errors++
for brokenlink in linkdata {
- eprintln(eline(fpath, brokenlink.line, 0, 'no link target found for existing link [$brokenlink.lable](#$link)'))
+ eprintln(eline(fpath, brokenlink.line, 0, 'no link target found for existing link [${brokenlink.lable}](#${link})'))
}
}
}
@@ -354,7 +354,7 @@ fn (mut ad AnchorData) check_link_target_match(fpath string, mut res CheckResult
anchor.line
}
}
- wprintln(wline(fpath, line, 0, 'multiple link target for non existing link (#$link)'))
+ wprintln(wline(fpath, line, 0, 'multiple link target for non existing link (#${link})'))
found_error_warning = true
res.warnings++
}
@@ -394,7 +394,7 @@ fn create_ref_link(s string) string {
fn (mut f MDFile) debug() {
for e in f.examples {
- eprintln('f.path: $f.path | example: $e')
+ eprintln('f.path: ${f.path} | example: ${e}')
}
}
@@ -442,7 +442,7 @@ fn (mut f MDFile) check_examples() CheckResult {
mut acommands := e.command.split(' ')
nofmt := 'nofmt' in acommands
for command in acommands {
- f.progress('example from $e.sline to $e.eline, command: $command')
+ f.progress('example from ${e.sline} to ${e.eline}, command: ${command}')
fmt_res := if nofmt { 0 } else { get_fmt_exit_code(vfile, vexe) }
match command {
'compile' {
@@ -598,7 +598,7 @@ fn (mut f MDFile) check_examples() CheckResult {
}
'nofmt' {}
else {
- eprintln(eline(f.path, e.sline, 0, 'unrecognized command: "$command", use one of: wip/ignore/compile/failcompile/okfmt/nofmt/oksyntax/badsyntax/cgen/globals/live/shared'))
+ eprintln(eline(f.path, e.sline, 0, 'unrecognized command: "${command}", use one of: wip/ignore/compile/failcompile/okfmt/nofmt/oksyntax/badsyntax/cgen/globals/live/shared'))
should_cleanup_vfile = false
errors++
}
@@ -639,10 +639,10 @@ fn (mut f MDFile) report_not_formatted_example_if_needed(e VCodeExample, fmt_res
}
f.autofix_example(e, vfile) or {
if err is ExampleWasRewritten {
- eprintln('>> f.path: $f.path | example from $e.sline to $e.eline was re-formated by vfmt')
+ eprintln('>> f.path: ${f.path} | example from ${e.sline} to ${e.eline} was re-formated by vfmt')
return err
}
- eprintln('>> f.path: $f.path | encountered error while autofixing the example: $err')
+ eprintln('>> f.path: ${f.path} | encountered error while autofixing the example: ${err}')
}
}
@@ -651,7 +651,7 @@ struct ExampleWasRewritten {
}
fn (mut f MDFile) autofix_example(e VCodeExample, vfile string) ! {
- eprintln('>>> AUTOFIXING f.path: $f.path | e.sline: $e.sline | vfile: $vfile')
+ eprintln('>>> AUTOFIXING f.path: ${f.path} | e.sline: ${e.sline} | vfile: ${vfile}')
res := cmdexecute('${os.quoted_path(vexe)} fmt -w ${os.quoted_path(vfile)}')
if res != 0 {
return error('could not autoformat the example')
diff --git a/cmd/tools/vcomplete.v b/cmd/tools/vcomplete.v
index d36ea9f7cd..79eb4ccf63 100644
--- a/cmd/tools/vcomplete.v
+++ b/cmd/tools/vcomplete.v
@@ -295,12 +295,12 @@ fn auto_complete(args []string) {
println(setup_for_shell(shell_name))
exit(0)
}
- eprintln('Unknown shell ${shell_name}. Supported shells are: $auto_complete_shells')
+ eprintln('Unknown shell ${shell_name}. Supported shells are: ${auto_complete_shells}')
exit(1)
}
eprintln('auto completion require arguments to work.')
} else {
- eprintln('auto completion failed for "$args".')
+ eprintln('auto completion failed for "${args}".')
}
exit(1)
}
@@ -309,7 +309,7 @@ fn auto_complete(args []string) {
match sub {
'setup' {
if sub_args.len <= 1 || sub_args[1] !in auto_complete_shells {
- eprintln('please specify a shell to setup auto completion for ($auto_complete_shells).')
+ eprintln('please specify a shell to setup auto completion for (${auto_complete_shells}).')
exit(1)
}
shell := sub_args[1]
@@ -322,7 +322,7 @@ fn auto_complete(args []string) {
mut lines := []string{}
list := auto_complete_request(sub_args[1..])
for entry in list {
- lines << "COMPREPLY+=('$entry')"
+ lines << "COMPREPLY+=('${entry}')"
}
println(lines.join('\n'))
}
@@ -333,7 +333,7 @@ fn auto_complete(args []string) {
mut lines := []string{}
list := auto_complete_request(sub_args[1..])
for entry in list {
- lines << '$entry'
+ lines << '${entry}'
}
println(lines.join('\n'))
}
@@ -344,7 +344,7 @@ fn auto_complete(args []string) {
mut lines := []string{}
list := auto_complete_request(sub_args[1..])
for entry in list {
- lines << 'compadd -U -S' + '""' + ' -- ' + "'$entry';"
+ lines << 'compadd -U -S' + '""' + ' -- ' + "'${entry}';"
}
println(lines.join('\n'))
}
@@ -542,7 +542,7 @@ _v_completions() {
local limit
# Send all words up to the word the cursor is currently on
let limit=1+\$COMP_CWORD
- src=\$($vexe complete bash \$(printf "%s\\n" \${COMP_WORDS[@]: 0:\$limit}))
+ src=\$(${vexe} complete bash \$(printf "%s\\n" \${COMP_WORDS[@]: 0:\$limit}))
if [[ \$? == 0 ]]; then
eval \${src}
#echo \${src}
@@ -556,7 +556,7 @@ complete -o nospace -F _v_completions v
setup = '
function __v_completions
# Send all words up to the one before the cursor
- $vexe complete fish (commandline -cop)
+ ${vexe} complete fish (commandline -cop)
end
complete -f -c v -a "(__v_completions)"
'
@@ -567,7 +567,7 @@ complete -f -c v -a "(__v_completions)"
_v() {
local src
# Send all words up to the word the cursor is currently on
- src=\$($vexe complete zsh \$(printf "%s\\n" \${(@)words[1,\$CURRENT]}))
+ src=\$(${vexe} complete zsh \$(printf "%s\\n" \${(@)words[1,\$CURRENT]}))
if [[ \$? == 0 ]]; then
eval \${src}
#echo \${src}
@@ -580,7 +580,7 @@ compdef _v v
setup = '
Register-ArgumentCompleter -Native -CommandName v -ScriptBlock {
param(\$commandName, \$wordToComplete, \$cursorPosition)
- $vexe complete powershell "\$wordToComplete" | ForEach-Object {
+ ${vexe} complete powershell "\$wordToComplete" | ForEach-Object {
[System.Management.Automation.CompletionResult]::new(\$_, \$_, \'ParameterValue\', \$_)
}
}
diff --git a/cmd/tools/vcompress.v b/cmd/tools/vcompress.v
index bcd18e8f90..13865d62d9 100644
--- a/cmd/tools/vcompress.v
+++ b/cmd/tools/vcompress.v
@@ -24,13 +24,13 @@ fn main() {
}
path := os.args[3]
content := os.read_bytes(path) or {
- eprintln('unable to read "$path": $err')
+ eprintln('unable to read "${path}": ${err}')
exit(1)
}
compressed := match compression_type {
.zlib {
zlib.compress(content) or {
- eprintln('compression error: $err')
+ eprintln('compression error: ${err}')
exit(1)
}
}
@@ -38,7 +38,7 @@ fn main() {
out_path := os.args[4]
os.write_file_array(out_path, compressed) or {
- eprintln('failed to write "$out_path": $err')
+ eprintln('failed to write "${out_path}": ${err}')
exit(1)
}
}
diff --git a/cmd/tools/vcreate.v b/cmd/tools/vcreate.v
index c3c4b56b3f..f4e9a7d9cf 100644
--- a/cmd/tools/vcreate.v
+++ b/cmd/tools/vcreate.v
@@ -17,7 +17,7 @@ mut:
}
fn cerror(e string) {
- eprintln('\nerror: $e')
+ eprintln('\nerror: ${e}')
}
fn check_name(name string) string {
@@ -30,12 +30,12 @@ fn check_name(name string) string {
if cname.contains(' ') {
cname = cname.replace(' ', '_')
}
- eprintln('warning: the project name cannot be capitalized, the name will be changed to `$cname`')
+ eprintln('warning: the project name cannot be capitalized, the name will be changed to `${cname}`')
return cname
}
if name.contains(' ') {
cname := name.replace(' ', '_')
- eprintln('warning: the project name cannot contain spaces, the name will be changed to `$cname`')
+ eprintln('warning: the project name cannot contain spaces, the name will be changed to `${cname}`')
return cname
}
return name
@@ -43,10 +43,10 @@ fn check_name(name string) string {
fn vmod_content(c Create) string {
return "Module {
- name: '$c.name'
- description: '$c.description'
- version: '$c.version'
- license: '$c.license'
+ name: '${c.name}'
+ description: '${c.description}'
+ version: '${c.version}'
+ license: '${c.license}'
dependencies: []
}
"
@@ -64,7 +64,7 @@ fn main() {
fn gen_gitignore(name string) string {
return '# Binaries for programs and plugins
main
-$name
+${name}
*.exe
*.exe~
*.so
@@ -104,7 +104,7 @@ indent_size = 4
}
fn (c &Create) write_vmod(new bool) {
- vmod_path := if new { '$c.name/v.mod' } else { 'v.mod' }
+ vmod_path := if new { '${c.name}/v.mod' } else { 'v.mod' }
os.write_file(vmod_path, vmod_content(c)) or { panic(err) }
}
@@ -112,12 +112,12 @@ fn (c &Create) write_main(new bool) {
if !new && (os.exists('${c.name}.v') || os.exists('src/${c.name}.v')) {
return
}
- main_path := if new { '$c.name/${c.name}.v' } else { '${c.name}.v' }
+ main_path := if new { '${c.name}/${c.name}.v' } else { '${c.name}.v' }
os.write_file(main_path, main_content()) or { panic(err) }
}
fn (c &Create) write_gitattributes(new bool) {
- gitattributes_path := if new { '$c.name/.gitattributes' } else { '.gitattributes' }
+ gitattributes_path := if new { '${c.name}/.gitattributes' } else { '.gitattributes' }
if !new && os.exists(gitattributes_path) {
return
}
@@ -125,7 +125,7 @@ fn (c &Create) write_gitattributes(new bool) {
}
fn (c &Create) write_editorconfig(new bool) {
- editorconfig_path := if new { '$c.name/.editorconfig' } else { '.editorconfig' }
+ editorconfig_path := if new { '${c.name}/.editorconfig' } else { '.editorconfig' }
if !new && os.exists(editorconfig_path) {
return
}
@@ -134,14 +134,14 @@ fn (c &Create) write_editorconfig(new bool) {
fn (c &Create) create_git_repo(dir string) {
// Create Git Repo and .gitignore file
- if !os.is_dir('$dir/.git') {
- res := os.execute('git init $dir')
+ if !os.is_dir('${dir}/.git') {
+ res := os.execute('git init ${dir}')
if res.exit_code != 0 {
cerror('Unable to create git repo')
exit(4)
}
}
- gitignore_path := '$dir/.gitignore'
+ gitignore_path := '${dir}/.gitignore'
if !os.exists(gitignore_path) {
os.write_file(gitignore_path, gen_gitignore(c.name)) or {}
}
@@ -155,21 +155,21 @@ fn create(args []string) {
exit(1)
}
if c.name.contains('-') {
- cerror('"$c.name" should not contain hyphens')
+ cerror('"${c.name}" should not contain hyphens')
exit(1)
}
if os.is_dir(c.name) {
- cerror('$c.name folder already exists')
+ cerror('${c.name} folder already exists')
exit(3)
}
c.description = if args.len > 1 { args[1] } else { os.input('Input your project description: ') }
default_version := '0.0.0'
- c.version = os.input('Input your project version: ($default_version) ')
+ c.version = os.input('Input your project version: (${default_version}) ')
if c.version == '' {
c.version = default_version
}
default_license := os.getenv_opt('VLICENSE') or { 'MIT' }
- c.license = os.input('Input your project license: ($default_license) ')
+ c.license = os.input('Input your project license: (${default_license}) ')
if c.license == '' {
c.license = default_license
}
@@ -206,7 +206,7 @@ fn main() {
init_project()
}
else {
- cerror('unknown command: $cmd')
+ cerror('unknown command: ${cmd}')
exit(1)
}
}
diff --git a/cmd/tools/vdoc/html.v b/cmd/tools/vdoc/html.v
index 392567ad80..06b607f189 100644
--- a/cmd/tools/vdoc/html.v
+++ b/cmd/tools/vdoc/html.v
@@ -59,8 +59,8 @@ fn (vd VDoc) render_search_index(out Output) {
js_search_data.write_string('var searchModuleData = [')
for i, title in vd.search_module_index {
data := vd.search_module_data[i]
- js_search_index.write_string('"$title",')
- js_search_data.write_string('["$data.description","$data.link"],')
+ js_search_index.write_string('"${title}",')
+ js_search_data.write_string('["${data.description}","${data.link}"],')
}
js_search_index.writeln('];')
js_search_index.write_string('var searchIndex = [')
@@ -68,9 +68,9 @@ fn (vd VDoc) render_search_index(out Output) {
js_search_data.write_string('var searchData = [')
for i, title in vd.search_index {
data := vd.search_data[i]
- js_search_index.write_string('"$title",')
+ js_search_index.write_string('"${title}",')
// array instead of object to reduce file size
- js_search_data.write_string('["$data.badge","$data.description","$data.link","$data.prefix"],')
+ js_search_data.write_string('["${data.badge}","${data.description}","${data.link}","${data.prefix}"],')
}
js_search_index.writeln('];')
js_search_data.writeln('];')
@@ -94,7 +94,7 @@ fn (mut vd VDoc) render_static_html(out Output) {
fn (vd VDoc) get_resource(name string, out Output) string {
cfg := vd.cfg
path := os.join_path(cfg.theme_dir, name)
- mut res := os.read_file(path) or { panic('vdoc: could not read $path') }
+ mut res := os.read_file(path) or { panic('vdoc: could not read ${path}') }
/*
if minify {
if name.ends_with('.js') {
@@ -110,7 +110,7 @@ fn (vd VDoc) get_resource(name string, out Output) string {
} else {
output_path := os.join_path(out.path, name)
if !os.exists(output_path) {
- println('Generating $out.typ in "$output_path"')
+ println('Generating ${out.typ} in "${output_path}"')
os.write_file(output_path, res) or { panic(err) }
}
return name
@@ -150,7 +150,7 @@ fn (mut vd VDoc) create_search_results(mod string, dn doc.DocNode, out Output) {
dn_description := trim_doc_node_description(comments)
vd.search_index << dn.name
vd.search_data << SearchResult{
- prefix: if dn.parent_name != '' { '$dn.kind ($dn.parent_name)' } else { '$dn.kind ' }
+ prefix: if dn.parent_name != '' { '${dn.kind} (${dn.parent_name})' } else { '${dn.kind} ' }
description: dn_description
badge: mod
link: vd.get_file_name(mod, out) + '#' + get_node_id(dn)
@@ -164,7 +164,7 @@ fn (vd VDoc) write_content(cn &doc.DocNode, d &doc.Doc, mut hw strings.Builder)
cfg := vd.cfg
base_dir := os.dir(os.real_path(cfg.input_path))
file_path_name := if cfg.is_multi {
- cn.file_path.replace('$base_dir/', '')
+ cn.file_path.replace('${base_dir}/', '')
} else {
os.file_name(cn.file_path)
}
@@ -173,7 +173,7 @@ fn (vd VDoc) write_content(cn &doc.DocNode, d &doc.Doc, mut hw strings.Builder)
hw.write_string(doc_node_html(cn, src_link, false, cfg.include_examples, d.table))
}
for child in cn.children {
- child_file_path_name := child.file_path.replace('$base_dir/', '')
+ child_file_path_name := child.file_path.replace('${base_dir}/', '')
child_src_link := get_src_link(vd.manifest.repo_url, child_file_path_name,
child.pos.line_nr + 1)
hw.write_string(doc_node_html(child, child_src_link, false, cfg.include_examples,
@@ -223,7 +223,7 @@ fn (vd VDoc) gen_html(d doc.Doc) string {
submodules := vd.docs.filter(it.head.name.starts_with(submod_prefix + '.'))
dropdown := if submodules.len > 0 { vd.assets['arrow_icon'] } else { '' }
active_class := if dc.head.name == d.head.name { ' active' } else { '' }
- modules_toc.write_string('')
+ modules_toc.write_string('')
for j, cdoc in submodules {
if j == 0 {
modules_toc.write_string('')
@@ -234,7 +234,7 @@ fn (vd VDoc) gen_html(d doc.Doc) string {
} else {
''
}
- modules_toc.write_string('- $submod_name
')
+ modules_toc.write_string('- ${submod_name}
')
if j == submodules.len - 1 {
modules_toc.write_string('
')
}
@@ -280,15 +280,15 @@ fn get_src_link(repo_url string, file_name string, line_nr int) string {
return ''
}
url.path = url.path.trim_right('/') + match url.host {
- 'github.com' { '/blob/master/$file_name' }
- 'gitlab.com' { '/-/blob/master/$file_name' }
- 'git.sir.ht' { '/tree/master/$file_name' }
+ 'github.com' { '/blob/master/${file_name}' }
+ 'gitlab.com' { '/-/blob/master/${file_name}' }
+ 'git.sir.ht' { '/tree/master/${file_name}' }
else { '' }
}
if url.path == '/' {
return ''
}
- url.fragment = 'L$line_nr'
+ url.fragment = 'L${line_nr}'
return url.str()
}
@@ -299,18 +299,18 @@ fn html_highlight(code string, tb &ast.Table) string {
lit := if typ in [.unone, .operator, .punctuation] {
tok.kind.str()
} else if typ == .string {
- "'$tok.lit'"
+ "'${tok.lit}'"
} else if typ == .char {
- '`$tok.lit`'
+ '`${tok.lit}`'
} else if typ == .comment {
- if tok.lit != '' && tok.lit[0] == 1 { '//${tok.lit[1..]}' } else { '//$tok.lit' }
+ if tok.lit != '' && tok.lit[0] == 1 { '//${tok.lit[1..]}' } else { '//${tok.lit}' }
} else {
tok.lit
}
if typ in [.unone, .name] {
return lit
}
- return '$lit'
+ return '${lit}'
}
mut s := scanner.new_scanner(code, .parse_comments, &pref.Preferences{})
mut tok := s.scan()
@@ -398,44 +398,44 @@ fn doc_node_html(dn doc.DocNode, link string, head bool, include_examples bool,
mut tags := dn.tags.filter(!it.starts_with('deprecated'))
tags.sort()
mut node_id := get_node_id(dn)
- mut hash_link := if !head { ' #' } else { '' }
+ mut hash_link := if !head { ' #' } else { '' }
if head && is_module_readme(dn) {
- node_id = 'readme_$node_id'
- hash_link = ' #'
+ node_id = 'readme_${node_id}'
+ hash_link = ' #'
}
- dnw.writeln('${tabs[1]}')
+ dnw.writeln('${tabs[1]}')
if dn.name.len > 0 {
if dn.kind == .const_group {
- dnw.write_string('${tabs[2]}<$head_tag>$sym_name$hash_link$head_tag>')
+ dnw.write_string('${tabs[2]}
<${head_tag}>${sym_name}${hash_link}${head_tag}>')
} else {
- dnw.write_string('${tabs[2]}
<$head_tag>$dn.kind $sym_name$hash_link$head_tag>')
+ dnw.write_string('${tabs[2]}
<${head_tag}>${dn.kind} ${sym_name}${hash_link}${head_tag}>')
}
if link.len != 0 {
- dnw.write_string('
$link_svg')
+ dnw.write_string('
${link_svg}')
}
dnw.write_string('
')
}
if deprecated_tags.len > 0 {
attributes := deprecated_tags.map('
${no_quotes(it)}
').join('')
- dnw.writeln('
$attributes
')
+ dnw.writeln('
${attributes}
')
}
if tags.len > 0 {
- attributes := tags.map('
$it
').join('')
- dnw.writeln('
$attributes
')
+ attributes := tags.map('
${it}
').join('')
+ dnw.writeln('
${attributes}
')
}
if !head && dn.content.len > 0 {
- dnw.writeln('
$highlighted_code
')
+ dnw.writeln('
${highlighted_code}
')
}
// do not mess with md_content further, its formatting is important, just output it 1:1 !
- dnw.writeln('$md_content\n')
+ dnw.writeln('${md_content}\n')
// Write examples if any found
examples := dn.examples()
if include_examples && examples.len > 0 {
example_title := if examples.len > 1 { 'Examples' } else { 'Example' }
- dnw.writeln('
$example_title
')
+ dnw.writeln('${example_title}
')
for example in examples {
hl_example := html_highlight(example, tb)
- dnw.writeln('$hl_example
')
+ dnw.writeln('${hl_example}
')
}
dnw.writeln('')
}
@@ -488,17 +488,17 @@ fn write_toc(dn doc.DocNode, mut toc strings.Builder) {
if dn.comments.len == 0 || (dn.comments.len > 0 && dn.comments[0].text.len == 0) {
return
}
- toc.write_string('README')
+ toc.write_string('README')
} else if dn.name != 'Constants' {
- toc.write_string('$dn.kind $dn.name')
+ toc.write_string('${dn.kind} ${dn.name}')
toc.writeln(' ')
} else {
- toc.write_string('$dn.name')
+ toc.write_string('${dn.name}')
}
toc.writeln('')
}
diff --git a/cmd/tools/vdoc/markdown.v b/cmd/tools/vdoc/markdown.v
index cc24ad2bfa..8656abd531 100644
--- a/cmd/tools/vdoc/markdown.v
+++ b/cmd/tools/vdoc/markdown.v
@@ -10,40 +10,40 @@ fn markdown_escape_script_tags(str string) string {
fn (vd VDoc) gen_markdown(d doc.Doc, with_toc bool) string {
mut hw := strings.new_builder(200)
mut cw := strings.new_builder(200)
- hw.writeln('# $d.head.content\n')
+ hw.writeln('# ${d.head.content}\n')
if d.head.comments.len > 0 {
comments := if vd.cfg.include_examples {
d.head.merge_comments()
} else {
d.head.merge_comments_without_examples()
}
- hw.writeln('$comments\n')
+ hw.writeln('${comments}\n')
}
if with_toc {
hw.writeln('## Contents')
}
vd.write_markdown_content(d.contents.arr(), mut cw, mut hw, 0, with_toc)
footer_text := gen_footer_text(d, !vd.cfg.no_timestamp)
- cw.writeln('#### $footer_text')
+ cw.writeln('#### ${footer_text}')
return hw.str() + '\n' + cw.str()
}
fn (vd VDoc) write_markdown_content(contents []doc.DocNode, mut cw strings.Builder, mut hw strings.Builder, indent int, with_toc bool) {
for cn in contents {
if with_toc && cn.name.len > 0 {
- hw.writeln(' '.repeat(2 * indent) + '- [${slug(cn.name)}](#$cn.name)')
- cw.writeln('## $cn.name')
+ hw.writeln(' '.repeat(2 * indent) + '- [${slug(cn.name)}](#${cn.name})')
+ cw.writeln('## ${cn.name}')
}
if cn.content.len > 0 {
comments := cn.merge_comments_without_examples()
- cw.writeln('```v\n$cn.content\n```\n$comments\n')
+ cw.writeln('```v\n${cn.content}\n```\n${comments}\n')
// Write examples if any found
examples := cn.examples()
if vd.cfg.include_examples && examples.len > 0 {
example_title := if examples.len > 1 { 'Examples' } else { 'Example' }
- cw.writeln('$example_title\n```v\n')
+ cw.writeln('${example_title}\n```v\n')
for example in examples {
- cw.writeln('$example\n')
+ cw.writeln('${example}\n')
}
cw.writeln('```\n')
}
diff --git a/cmd/tools/vdoc/utils.v b/cmd/tools/vdoc/utils.v
index 691fe40d4b..4bd6f504ed 100644
--- a/cmd/tools/vdoc/utils.v
+++ b/cmd/tools/vdoc/utils.v
@@ -20,7 +20,7 @@ fn escape(str string) string {
fn get_sym_name(dn doc.DocNode) string {
sym_name := if dn.parent_name.len > 0 && dn.parent_name != 'void' {
- '($dn.parent_name) $dn.name'
+ '(${dn.parent_name}) ${dn.name}'
} else {
dn.name
}
@@ -29,7 +29,7 @@ fn get_sym_name(dn doc.DocNode) string {
fn get_node_id(dn doc.DocNode) string {
tag := if dn.parent_name.len > 0 && dn.parent_name != 'void' {
- '${dn.parent_name}.$dn.name'
+ '${dn.parent_name}.${dn.name}'
} else {
dn.name
}
@@ -37,7 +37,7 @@ fn get_node_id(dn doc.DocNode) string {
}
fn is_module_readme(dn doc.DocNode) bool {
- if dn.comments.len > 0 && dn.content == 'module $dn.name' {
+ if dn.comments.len > 0 && dn.content == 'module ${dn.name}' {
return true
}
return false
@@ -133,8 +133,8 @@ fn gen_footer_text(d &doc.Doc, include_timestamp bool) string {
return footer_text
}
generated_time := d.time_generated
- time_str := '$generated_time.day $generated_time.smonth() $generated_time.year $generated_time.hhmmss()'
- return '$footer_text Generated on: $time_str'
+ time_str := '${generated_time.day} ${generated_time.smonth()} ${generated_time.year} ${generated_time.hhmmss()}'
+ return '${footer_text} Generated on: ${time_str}'
}
fn color_highlight(code string, tb &ast.Table) string {
@@ -152,20 +152,20 @@ fn color_highlight(code string, tb &ast.Table) string {
'"'])
if use_double_quote {
s := unescaped_val.replace_each(['\x01', '\\\\', '"', '\\"'])
- lit = term.yellow('"$s"')
+ lit = term.yellow('"${s}"')
} else {
s := unescaped_val.replace_each(['\x01', '\\\\', "'", "\\'"])
- lit = term.yellow("'$s'")
+ lit = term.yellow("'${s}'")
}
}
.char {
- lit = term.yellow('`$tok.lit`')
+ lit = term.yellow('`${tok.lit}`')
}
.comment {
lit = if tok.lit != '' && tok.lit[0] == 1 {
'//${tok.lit[1..]}'
} else {
- '//$tok.lit'
+ '//${tok.lit}'
}
}
.keyword {
diff --git a/cmd/tools/vdoc/vdoc.v b/cmd/tools/vdoc/vdoc.v
index cb1125f649..681da0b222 100644
--- a/cmd/tools/vdoc/vdoc.v
+++ b/cmd/tools/vdoc/vdoc.v
@@ -82,9 +82,9 @@ fn (vd VDoc) gen_json(d doc.Doc) string {
} else {
d.head.merge_comments_without_examples()
}
- jw.write_string('{"module_name":"$d.head.name","description":"${escape(comments)}","contents":')
+ jw.write_string('{"module_name":"${d.head.name}","description":"${escape(comments)}","contents":')
jw.write_string(json.encode(d.contents.keys().map(d.contents[it])))
- jw.write_string(',"generator":"vdoc","time_generated":"$d.time_generated.str()"}')
+ jw.write_string(',"generator":"vdoc","time_generated":"${d.time_generated.str()}"}')
return jw.str()
}
@@ -95,7 +95,7 @@ fn (vd VDoc) gen_plaintext(d doc.Doc) string {
content_arr := d.head.content.split(' ')
pw.writeln('${term.bright_blue(content_arr[0])} ${term.green(content_arr[1])}\n')
} else {
- pw.writeln('$d.head.content\n')
+ pw.writeln('${d.head.content}\n')
}
if cfg.include_comments {
comments := if cfg.include_examples {
@@ -145,7 +145,7 @@ fn (vd VDoc) write_plaintext_content(contents []doc.DocNode, mut pw strings.Buil
}
}
if cfg.show_loc {
- pw.writeln('Location: $cn.file_path:${cn.pos.line_nr + 1}\n')
+ pw.writeln('Location: ${cn.file_path}:${cn.pos.line_nr + 1}\n')
}
}
vd.write_plaintext_content(cn.children, mut pw)
@@ -193,7 +193,7 @@ fn (vd VDoc) work_processor(mut work sync.Channel, mut wg sync.WaitGroup) {
}
file_name, content := vd.render_doc(pdoc.d, pdoc.out)
output_path := os.join_path(pdoc.out.path, file_name)
- println('Generating $pdoc.out.typ in "$output_path"')
+ println('Generating ${pdoc.out.typ} in "${output_path}"')
os.write_file(output_path, content) or { panic(err) }
}
wg.done()
@@ -237,7 +237,7 @@ fn (vd VDoc) get_readme(path string) string {
return ''
}
readme_path := os.join_path(path, '${fname}.md')
- vd.vprintln('Reading README file from $readme_path')
+ vd.vprintln('Reading README file from ${readme_path}')
readme_contents := os.read_file(readme_path) or { '' }
return readme_contents
}
@@ -287,7 +287,7 @@ fn (mut vd VDoc) generate_docs_from_file() {
}
manifest_path := os.join_path(dir_path, 'v.mod')
if os.exists(manifest_path) {
- vd.vprintln('Reading v.mod info from $manifest_path')
+ vd.vprintln('Reading v.mod info from ${manifest_path}')
if manifest := vmod.from_file(manifest_path) {
vd.manifest = manifest
}
@@ -313,7 +313,7 @@ fn (mut vd VDoc) generate_docs_from_file() {
cfg.input_path,
] }
for dirpath in dirs {
- vd.vprintln('Generating $out.typ docs for "$dirpath"')
+ vd.vprintln('Generating ${out.typ} docs for "${dirpath}"')
mut dcs := doc.generate(dirpath, cfg.pub_only, true, cfg.platform, cfg.symbol_name) or {
vd.emit_generate_err(err)
exit(1)
@@ -410,7 +410,7 @@ fn (mut vd VDoc) generate_docs_from_file() {
fn (vd VDoc) vprintln(str string) {
if vd.cfg.is_verbose {
- println('vdoc: $str')
+ println('vdoc: ${str}')
}
}
@@ -428,7 +428,7 @@ fn parse_arguments(args []string) Config {
format := cmdline.option(current_args, '-f', '')
if format !in allowed_formats {
allowed_str := allowed_formats.join(', ')
- eprintln('vdoc: "$format" is not a valid format. Only $allowed_str are allowed.')
+ eprintln('vdoc: "${format}" is not a valid format. Only ${allowed_str} are allowed.')
exit(1)
}
cfg.output_type = set_output_type_from_str(format)
@@ -517,7 +517,7 @@ fn parse_arguments(args []string) Config {
} else if !is_path {
// TODO vd.vprintln('Input "$cfg.input_path" is not a valid path. Looking for modules named "$cfg.input_path"...')
mod_path := doc.lookup_module(cfg.input_path) or {
- eprintln('vdoc: $err')
+ eprintln('vdoc: ${err}')
exit(1)
}
cfg.input_path = mod_path
@@ -544,6 +544,6 @@ fn main() {
repo_url: ''
}
}
- vd.vprintln('Setting output type to "$cfg.output_type"')
+ vd.vprintln('Setting output type to "${cfg.output_type}"')
vd.generate_docs_from_file()
}
diff --git a/cmd/tools/vdoctor.v b/cmd/tools/vdoctor.v
index f5243fa40b..20d7deff56 100644
--- a/cmd/tools/vdoctor.v
+++ b/cmd/tools/vdoctor.v
@@ -17,7 +17,7 @@ fn (mut a App) println(s string) {
fn (mut a App) collect_info() {
mut os_kind := os.user_os()
mut arch_details := []string{}
- arch_details << '$runtime.nr_cpus() cpus'
+ arch_details << '${runtime.nr_cpus()} cpus'
if runtime.is_32bit() {
arch_details << '32bit'
}
@@ -89,12 +89,12 @@ fn (mut a App) collect_info() {
)
p := a.parse(wmic_info, '=')
caption, build_number, os_arch := p['caption'], p['buildnumber'], p['osarchitecture']
- os_details = '$caption v$build_number $os_arch'
+ os_details = '${caption} v${build_number} ${os_arch}'
} else {
ouname := os.uname()
- os_details = '$ouname.release, $ouname.version'
+ os_details = '${ouname.release}, ${ouname.version}'
}
- a.line('OS', '$os_kind, $os_details')
+ a.line('OS', '${os_kind}, ${os_details}')
a.line('Processor', arch_details.join(', '))
a.line('CC version', a.cmd(command: 'cc --version'))
a.println('')
@@ -113,11 +113,11 @@ fn (mut a App) collect_info() {
a.line('V full version', version.full_v_version(true))
vtmp := os.getenv('VTMP')
if vtmp != '' {
- a.line('env VTMP', '"$vtmp"')
+ a.line('env VTMP', '"${vtmp}"')
}
vflags := os.getenv('VFLAGS')
if vflags != '' {
- a.line('env VFLAGS', '"$vflags"')
+ a.line('env VFLAGS', '"${vflags}"')
}
a.println('')
a.line('Git version', a.cmd(command: 'git --version'))
@@ -146,11 +146,11 @@ fn (mut a App) cmd(c CmdConfig) string {
return output[c.line]
}
}
- return 'Error: $x.output'
+ return 'Error: ${x.output}'
}
fn (mut a App) line(label string, value string) {
- a.println('$label: ${term.colorize(term.bold, value)}')
+ a.println('${label}: ${term.colorize(term.bold, value)}')
}
fn (app &App) parse(config string, sep string) map[string]string {
@@ -204,7 +204,7 @@ fn (mut a App) get_linux_os_name() string {
}
'uname' {
ouname := os.uname()
- os_details = '$ouname.release, $ouname.version'
+ os_details = '${ouname.release}, ${ouname.version}'
break
}
else {}
@@ -231,7 +231,7 @@ fn (mut a App) git_info() string {
os.execute('git -C . fetch V_REPO')
commit_count := a.cmd(command: 'git rev-list @{0}...V_REPO/master --right-only --count').int()
if commit_count > 0 {
- out += ' ($commit_count commit(s) behind V master)'
+ out += ' (${commit_count} commit(s) behind V master)'
}
return out
}
@@ -247,7 +247,7 @@ fn (mut a App) report_tcc_version(tccfolder string) {
tcc_commit := a.cmd(
command: 'git -C ${os.quoted_path(tccfolder)} describe --abbrev=8 --dirty --always --tags'
)
- a.line('$tccfolder status', '$tcc_branch_name $tcc_commit')
+ a.line('${tccfolder} status', '${tcc_branch_name} ${tcc_commit}')
}
fn (mut a App) report_info() {
diff --git a/cmd/tools/vfmt.v b/cmd/tools/vfmt.v
index 4333396f0e..a50937e214 100644
--- a/cmd/tools/vfmt.v
+++ b/cmd/tools/vfmt.v
@@ -61,7 +61,7 @@ fn main() {
if term_colors {
os.setenv('VCOLORS', 'always', true)
}
- foptions.vlog('vfmt foptions: $foptions')
+ foptions.vlog('vfmt foptions: ${foptions}')
if foptions.is_worker {
// -worker should be added by a parent vfmt process.
// We launch a sub process for each file because
@@ -74,7 +74,7 @@ fn main() {
// we are NOT a worker at this stage, i.e. we are a parent vfmt process
possible_files := cmdline.only_non_options(cmdline.options_after(args, ['fmt']))
if foptions.is_verbose {
- eprintln('vfmt toolexe: $toolexe')
+ eprintln('vfmt toolexe: ${toolexe}')
eprintln('vfmt args: ' + os.args.str())
eprintln('vfmt env_vflags_and_os_args: ' + args.str())
eprintln('vfmt possible_files: ' + possible_files.str())
@@ -107,7 +107,7 @@ fn main() {
mut worker_command_array := cli_args_no_files.clone()
worker_command_array << ['-worker', util.quote_path(fpath)]
worker_cmd := worker_command_array.join(' ')
- foptions.vlog('vfmt worker_cmd: $worker_cmd')
+ foptions.vlog('vfmt worker_cmd: ${worker_cmd}')
worker_result := os.execute(worker_cmd)
// Guard against a possibly crashing worker process.
if worker_result.exit_code != 0 {
@@ -133,7 +133,7 @@ fn main() {
errors++
}
if errors > 0 {
- eprintln('Encountered a total of: $errors errors.')
+ eprintln('Encountered a total of: ${errors} errors.')
if foptions.is_noerror {
exit(0)
}
@@ -162,17 +162,17 @@ fn (foptions &FormatOptions) vlog(msg string) {
}
fn (foptions &FormatOptions) format_file(file string) {
- foptions.vlog('vfmt2 running fmt.fmt over file: $file')
+ foptions.vlog('vfmt2 running fmt.fmt over file: ${file}')
prefs, table := setup_preferences_and_table()
file_ast := parser.parse_file(file, table, .parse_comments, prefs)
// checker.new_checker(table, prefs).check(file_ast)
formatted_content := fmt.fmt(file_ast, table, prefs, foptions.is_debug)
file_name := os.file_name(file)
ulid := rand.ulid()
- vfmt_output_path := os.join_path(vtmp_folder, 'vfmt_${ulid}_$file_name')
+ vfmt_output_path := os.join_path(vtmp_folder, 'vfmt_${ulid}_${file_name}')
os.write_file(vfmt_output_path, formatted_content) or { panic(err) }
- foptions.vlog('fmt.fmt worked and $formatted_content.len bytes were written to $vfmt_output_path .')
- eprintln('$formatted_file_token$vfmt_output_path')
+ foptions.vlog('fmt.fmt worked and ${formatted_content.len} bytes were written to ${vfmt_output_path} .')
+ eprintln('${formatted_file_token}${vfmt_output_path}')
}
fn (foptions &FormatOptions) format_pipe() {
@@ -184,20 +184,20 @@ fn (foptions &FormatOptions) format_pipe() {
formatted_content := fmt.fmt(file_ast, table, prefs, foptions.is_debug, source_text: input_text)
print(formatted_content)
flush_stdout()
- foptions.vlog('fmt.fmt worked and $formatted_content.len bytes were written to stdout.')
+ foptions.vlog('fmt.fmt worked and ${formatted_content.len} bytes were written to stdout.')
}
fn print_compiler_options(compiler_params &pref.Preferences) {
eprintln(' os: ' + compiler_params.os.str())
- eprintln(' ccompiler: $compiler_params.ccompiler')
- eprintln(' path: $compiler_params.path ')
- eprintln(' out_name: $compiler_params.out_name ')
- eprintln(' vroot: $compiler_params.vroot ')
- eprintln('lookup_path: $compiler_params.lookup_path ')
- eprintln(' out_name: $compiler_params.out_name ')
- eprintln(' cflags: $compiler_params.cflags ')
- eprintln(' is_test: $compiler_params.is_test ')
- eprintln(' is_script: $compiler_params.is_script ')
+ eprintln(' ccompiler: ${compiler_params.ccompiler}')
+ eprintln(' path: ${compiler_params.path} ')
+ eprintln(' out_name: ${compiler_params.out_name} ')
+ eprintln(' vroot: ${compiler_params.vroot} ')
+ eprintln('lookup_path: ${compiler_params.lookup_path} ')
+ eprintln(' out_name: ${compiler_params.out_name} ')
+ eprintln(' cflags: ${compiler_params.cflags} ')
+ eprintln(' is_test: ${compiler_params.is_test} ')
+ eprintln(' is_script: ${compiler_params.is_script} ')
}
fn (mut foptions FormatOptions) find_diff_cmd() string {
@@ -218,11 +218,11 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
return
}
fc := os.read_file(file) or {
- eprintln('File $file could not be read')
+ eprintln('File ${file} could not be read')
return
}
formatted_fc := os.read_file(formatted_file_path) or {
- eprintln('File $formatted_file_path could not be read')
+ eprintln('File ${formatted_file_path} could not be read')
return
}
is_formatted_different := fc != formatted_fc
@@ -231,7 +231,7 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
return
}
diff_cmd := foptions.find_diff_cmd()
- foptions.vlog('Using diff command: $diff_cmd')
+ foptions.vlog('Using diff command: ${diff_cmd}')
diff := diff.color_compare_files(diff_cmd, file, formatted_file_path)
if diff.len > 0 {
println(diff)
@@ -242,19 +242,19 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
if !is_formatted_different {
return
}
- println("$file is not vfmt'ed")
+ println("${file} is not vfmt'ed")
return error('')
}
if foptions.is_c {
if is_formatted_different {
- eprintln('File is not formatted: $file')
+ eprintln('File is not formatted: ${file}')
return error('')
}
return
}
if foptions.is_l {
if is_formatted_different {
- eprintln('File needs formatting: $file')
+ eprintln('File needs formatting: ${file}')
}
return
}
@@ -273,9 +273,9 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
$if !windows {
os.chmod(file, int(perms_to_restore)) or { panic(err) }
}
- eprintln('Reformatted file: $file')
+ eprintln('Reformatted file: ${file}')
} else {
- eprintln('Already formatted file: $file')
+ eprintln('Already formatted file: ${file}')
}
return
}
@@ -285,9 +285,9 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
fn (f FormatOptions) str() string {
return
- 'FormatOptions{ is_l: $f.is_l, is_w: $f.is_w, is_diff: $f.is_diff, is_verbose: $f.is_verbose,' +
- ' is_all: $f.is_all, is_worker: $f.is_worker, is_debug: $f.is_debug, is_noerror: $f.is_noerror,' +
- ' is_verify: $f.is_verify" }'
+ 'FormatOptions{ is_l: ${f.is_l}, is_w: ${f.is_w}, is_diff: ${f.is_diff}, is_verbose: ${f.is_verbose},' +
+ ' is_all: ${f.is_all}, is_worker: ${f.is_worker}, is_debug: ${f.is_debug}, is_noerror: ${f.is_noerror},' +
+ ' is_verify: ${f.is_verify}" }'
}
fn file_to_mod_name_and_is_module_file(file string) (string, bool) {
@@ -308,7 +308,7 @@ fn file_to_mod_name_and_is_module_file(file string) (string, bool) {
}
fn read_source_lines(file string) ?[]string {
- source_lines := os.read_lines(file) or { return error('can not read $file') }
+ source_lines := os.read_lines(file) or { return error('can not read ${file}') }
return source_lines
}
diff --git a/cmd/tools/vgret.v b/cmd/tools/vgret.v
index e322665fd5..1b6b6d3922 100644
--- a/cmd/tools/vgret.v
+++ b/cmd/tools/vgret.v
@@ -106,7 +106,7 @@ mut:
fn (co CaptureOptions) validate() ! {
if co.method !in supported_capture_methods {
- return error('capture method "$co.method" is not supported. Supported methods are: $supported_capture_methods')
+ return error('capture method "${co.method}" is not supported. Supported methods are: ${supported_capture_methods}')
}
}
@@ -129,7 +129,7 @@ mut:
}
fn (opt Options) verbose_execute(cmd string) os.Result {
- opt.verbose_eprintln('Running `$cmd`')
+ opt.verbose_eprintln('Running `${cmd}`')
return os.execute(cmd)
}
@@ -141,11 +141,11 @@ fn (opt Options) verbose_eprintln(msg string) {
fn main() {
if runtime_os !in supported_hosts {
- eprintln('$tool_name is currently only supported on $supported_hosts hosts')
+ eprintln('${tool_name} is currently only supported on ${supported_hosts} hosts')
exit(1)
}
if os.args.len == 1 {
- eprintln('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
+ eprintln('Usage: ${tool_name} PATH \n${tool_description}\n${tool_name} -h for more help...')
exit(1)
}
@@ -194,12 +194,12 @@ fn main() {
all_paths_in_use := [path, gen_in_path, target_path]
for path_in_use in all_paths_in_use {
if !os.is_dir(path_in_use) {
- eprintln('`$path_in_use` is not a directory')
+ eprintln('`${path_in_use}` is not a directory')
exit(1)
}
}
if path == target_path || gen_in_path == target_path || gen_in_path == path {
- eprintln('Compare paths can not be the same directory `$path`/`$target_path`/`$gen_in_path`')
+ eprintln('Compare paths can not be the same directory `${path}`/`${target_path}`/`${gen_in_path}`')
exit(1)
}
compare_screenshots(opt, gen_in_path, target_path)!
@@ -212,7 +212,7 @@ fn generate_screenshots(mut opt Options, output_path string) ! {
dst_path := output_path.trim_right('/')
if !os.is_dir(path) {
- return error('`$path` is not a directory')
+ return error('`${path}` is not a directory')
}
for mut app_config in opt.config.apps {
@@ -227,29 +227,29 @@ fn generate_screenshots(mut opt Options, output_path string) ! {
}
if app_config.capture.method == 'gg_record' {
- opt.verbose_eprintln('Compiling shaders (if needed) for `$file`')
+ opt.verbose_eprintln('Compiling shaders (if needed) for `${file}`')
sh_result := opt.verbose_execute('${os.quoted_path(v_exe)} shader ${os.quoted_path(app_path)}')
if sh_result.exit_code != 0 {
- opt.verbose_eprintln('Skipping shader compile for `$file` v shader failed with:\n$sh_result.output')
+ opt.verbose_eprintln('Skipping shader compile for `${file}` v shader failed with:\n${sh_result.output}')
continue
}
}
if !os.exists(dst_path) {
- opt.verbose_eprintln('Creating output path `$dst_path`')
- os.mkdir_all(dst_path) or { return error('Failed making directory `$dst_path`') }
+ opt.verbose_eprintln('Creating output path `${dst_path}`')
+ os.mkdir_all(dst_path) or { return error('Failed making directory `${dst_path}`') }
}
screenshot_path := os.join_path(dst_path, rel_out_path)
if !os.exists(screenshot_path) {
os.mkdir_all(screenshot_path) or {
- return error('Failed making screenshot path `$screenshot_path`')
+ return error('Failed making screenshot path `${screenshot_path}`')
}
}
app_config.screenshots_path = screenshot_path
app_config.screenshots = take_screenshots(opt, app_config) or {
- return error('Failed taking screenshots of `$app_path`:\n$err.msg()')
+ return error('Failed taking screenshots of `${app_path}`:\n${err.msg()}')
}
}
}
@@ -259,28 +259,28 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ! {
mut warns := map[string]string{}
for app_config in opt.config.apps {
screenshots := app_config.screenshots
- opt.verbose_eprintln('Comparing $screenshots.len screenshots in `$output_path` with `$target_path`')
+ opt.verbose_eprintln('Comparing ${screenshots.len} screenshots in `${output_path}` with `${target_path}`')
for screenshot in screenshots {
relative_screenshot := screenshot.all_after(output_path + os.path_separator)
src := screenshot
target := os.join_path(target_path, relative_screenshot)
- opt.verbose_eprintln('Comparing `$src` with `$target` with $app_config.compare.method')
+ opt.verbose_eprintln('Comparing `${src}` with `${target}` with ${app_config.compare.method}')
if app_config.compare.method == 'idiff' {
if idiff_exe == '' {
- return error('$tool_name need the `idiff` tool installed. It can be installed on Ubuntu with `sudo apt install openimageio-tools`')
+ return error('${tool_name} need the `idiff` tool installed. It can be installed on Ubuntu with `sudo apt install openimageio-tools`')
}
diff_file := os.join_path(os.vtmp_dir(), os.file_name(src).all_before_last('.') +
'.diff.tif')
flags := app_config.compare.flags.join(' ')
- diff_cmd := '${os.quoted_path(idiff_exe)} $flags -abs -od -o ${os.quoted_path(diff_file)} -abs ${os.quoted_path(src)} ${os.quoted_path(target)}'
+ diff_cmd := '${os.quoted_path(idiff_exe)} ${flags} -abs -od -o ${os.quoted_path(diff_file)} -abs ${os.quoted_path(src)} ${os.quoted_path(target)}'
result := opt.verbose_execute(diff_cmd)
if result.exit_code == 0 {
- opt.verbose_eprintln('OUTPUT: \n$result.output')
+ opt.verbose_eprintln('OUTPUT: \n${result.output}')
}
if result.exit_code != 0 {
- eprintln('OUTPUT: \n$result.output')
+ eprintln('OUTPUT: \n${result.output}')
if result.exit_code == 1 {
warns[src] = target
} else {
@@ -295,30 +295,30 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ! {
eprintln('--- WARNINGS ---')
eprintln('The following files had warnings when compared to their targets')
for warn_src, warn_target in warns {
- eprintln('$warn_src ~= $warn_target')
+ eprintln('${warn_src} ~= ${warn_target}')
}
}
if fails.len > 0 {
eprintln('--- ERRORS ---')
eprintln('The following files did not match their targets')
for fail_src, fail_target in fails {
- eprintln('$fail_src != $fail_target')
+ eprintln('${fail_src} != ${fail_target}')
}
first := fails.keys()[0]
fail_copy := os.join_path(os.vtmp_dir(), 'fail.' + first.all_after_last('.'))
os.cp(first, fail_copy)!
- eprintln('First failed file `$first` is copied to `$fail_copy`')
+ eprintln('First failed file `${first}` is copied to `${fail_copy}`')
diff_file := os.join_path(os.vtmp_dir(), os.file_name(first).all_before_last('.') +
'.diff.tif')
diff_copy := os.join_path(os.vtmp_dir(), 'diff.tif')
if os.is_file(diff_file) {
os.cp(diff_file, diff_copy)!
- eprintln('First failed diff file `$diff_file` is copied to `$diff_copy`')
- eprintln('Removing alpha channel from $diff_copy ...')
+ eprintln('First failed diff file `${diff_file}` is copied to `${diff_copy}`')
+ eprintln('Removing alpha channel from ${diff_copy} ...')
final_fail_result_file := os.join_path(os.vtmp_dir(), 'diff.png')
opt.verbose_execute('convert ${os.quoted_path(diff_copy)} -alpha off ${os.quoted_path(final_fail_result_file)}')
- eprintln('Final diff file: `$final_fail_result_file`')
+ eprintln('Final diff file: `${final_fail_result_file}`')
}
exit(1)
}
@@ -327,26 +327,26 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ! {
fn take_screenshots(opt Options, app AppConfig) ![]string {
out_path := app.screenshots_path
if !opt.compare_only {
- opt.verbose_eprintln('Taking screenshot(s) of `$app.path` to `$out_path`')
+ opt.verbose_eprintln('Taking screenshot(s) of `${app.path}` to `${out_path}`')
match app.capture.method {
'gg_record' {
for k, v in app.capture.env {
rv := v.replace('\$OUT_PATH', out_path)
- opt.verbose_eprintln('Setting ENV `$k` = $rv ...')
- os.setenv('$k', rv, true)
+ opt.verbose_eprintln('Setting ENV `${k}` = ${rv} ...')
+ os.setenv('${k}', rv, true)
}
flags := app.capture.flags.join(' ')
- result := opt.verbose_execute('${os.quoted_path(v_exe)} $flags -d gg_record run ${os.quoted_path(app.abs_path)}')
+ result := opt.verbose_execute('${os.quoted_path(v_exe)} ${flags} -d gg_record run ${os.quoted_path(app.abs_path)}')
if result.exit_code != 0 {
- return error('Failed taking screenshot of `$app.abs_path`:\n$result.output')
+ return error('Failed taking screenshot of `${app.abs_path}`:\n${result.output}')
}
}
'generic_screenshot' {
for k, v in app.capture.env {
rv := v.replace('\$OUT_PATH', out_path)
- opt.verbose_eprintln('Setting ENV `$k` = $rv ...')
- os.setenv('$k', rv, true)
+ opt.verbose_eprintln('Setting ENV `${k}` = ${rv} ...')
+ os.setenv('${k}', rv, true)
}
existing_screenshots := get_app_screenshots(out_path, app)!
@@ -354,9 +354,9 @@ fn take_screenshots(opt Options, app AppConfig) ![]string {
flags := app.capture.flags
if !os.exists(app.abs_path) {
- return error('Failed starting app `$app.abs_path`, the path does not exist')
+ return error('Failed starting app `${app.abs_path}`, the path does not exist')
}
- opt.verbose_eprintln('Running $app.abs_path $flags')
+ opt.verbose_eprintln('Running ${app.abs_path} ${flags}')
mut p_app := os.new_process(app.abs_path)
p_app.set_args(flags)
p_app.set_redirect_stdio()
@@ -364,56 +364,56 @@ fn take_screenshots(opt Options, app AppConfig) ![]string {
if !p_app.is_alive() {
output := p_app.stdout_read() + '\n' + p_app.stderr_read()
- return error('Failed starting app `$app.abs_path` (before screenshot):\n$output')
+ return error('Failed starting app `${app.abs_path}` (before screenshot):\n${output}')
}
if app.capture.wait_ms > 0 {
- opt.verbose_eprintln('Waiting $app.capture.wait_ms before capturing')
+ opt.verbose_eprintln('Waiting ${app.capture.wait_ms} before capturing')
time.sleep(app.capture.wait_ms * time.millisecond)
}
if !p_app.is_alive() {
output := p_app.stdout_slurp() + '\n' + p_app.stderr_slurp()
- return error('App `$app.abs_path` exited ($p_app.code) before a screenshot could be captured:\n$output')
+ return error('App `${app.abs_path}` exited (${p_app.code}) before a screenshot could be captured:\n${output}')
}
// Use ImageMagick's `import` tool to take the screenshot
out_file := os.join_path(out_path, os.file_name(app.path) +
'_screenshot_${existing_screenshots.len:02}.png')
- result := opt.verbose_execute('import -window root "$out_file"')
+ result := opt.verbose_execute('import -window root "${out_file}"')
if result.exit_code != 0 {
p_app.signal_kill()
- return error('Failed taking screenshot of `$app.abs_path` to "$out_file":\n$result.output')
+ return error('Failed taking screenshot of `${app.abs_path}` to "${out_file}":\n${result.output}')
}
// When using regions the capture is split up into regions.len
// And name the output based on each region's properties
if app.capture.regions.len > 0 {
for region in app.capture.regions {
- region_id := 'x${region.x}y${region.y}w${region.width}h$region.height'
+ region_id := 'x${region.x}y${region.y}w${region.width}h${region.height}'
region_out_file := os.join_path(out_path, os.file_name(app.path) +
'_screenshot_${existing_screenshots.len:02}_region_${region_id}.png')
// If the region is empty (w, h == 0, 0) infer a full screenshot,
// This allows for capturing both regions *and* the complete screen
if region.is_empty() {
os.cp(out_file, region_out_file) or {
- return error('Failed copying original screenshot "$out_file" to region file "$region_out_file"')
+ return error('Failed copying original screenshot "${out_file}" to region file "${region_out_file}"')
}
continue
}
- extract_result := opt.verbose_execute('convert -extract ${region.width}x$region.height+$region.x+$region.y "$out_file" "$region_out_file"')
+ extract_result := opt.verbose_execute('convert -extract ${region.width}x${region.height}+${region.x}+${region.y} "${out_file}" "${region_out_file}"')
if extract_result.exit_code != 0 {
p_app.signal_kill()
- return error('Failed extracting region $region_id from screenshot of `$app.abs_path` to "$region_out_file":\n$result.output')
+ return error('Failed extracting region ${region_id} from screenshot of `${app.abs_path}` to "${region_out_file}":\n${result.output}')
}
}
// When done, remove the original file that was split into regions.
- opt.verbose_eprintln('Removing "$out_file" (region mode)')
+ opt.verbose_eprintln('Removing "${out_file}" (region mode)')
os.rm(out_file) or {
- return error('Failed removing original screenshot "$out_file"')
+ return error('Failed removing original screenshot "${out_file}"')
}
}
p_app.signal_kill()
}
else {
- return error('Unsupported capture method "$app.capture.method"')
+ return error('Unsupported capture method "${app.capture.method}"')
}
}
}
@@ -422,7 +422,7 @@ fn take_screenshots(opt Options, app AppConfig) ![]string {
fn get_app_screenshots(path string, app AppConfig) ![]string {
mut screenshots := []string{}
- shots := os.ls(path) or { return error('Failed listing dir `$path`') }
+ shots := os.ls(path) or { return error('Failed listing dir `${path}`') }
for shot in shots {
if shot.starts_with(os.file_name(app.path).all_before_last('.')) {
screenshots << os.join_path(path, shot)
diff --git a/cmd/tools/vls.v b/cmd/tools/vls.v
index 2694621d89..eea79927df 100644
--- a/cmd/tools/vls.v
+++ b/cmd/tools/vls.v
@@ -226,7 +226,7 @@ fn (upd VlsUpdater) download_prebuilt() ! {
}
fn (upd VlsUpdater) print_new_vls_version(new_vls_exec_path string) {
- exec_version := os.execute('$new_vls_exec_path --version')
+ exec_version := os.execute('${new_vls_exec_path} --version')
if exec_version.exit_code == 0 {
upd.log('VLS was updated to version: ${exec_version.output.all_after('vls version ').trim_space()}')
}
@@ -242,13 +242,13 @@ fn (upd VlsUpdater) compile_from_source() ! {
if !os.exists(vls_src_folder) {
upd.log('Cloning VLS repo...')
- clone_result := os.execute('$git clone https://github.com/nedpals/vls $vls_src_folder')
+ clone_result := os.execute('${git} clone https://github.com/nedpals/vls ${vls_src_folder}')
if clone_result.exit_code != 0 {
- return error('Failed to build VLS from source. Reason: $clone_result.output')
+ return error('Failed to build VLS from source. Reason: ${clone_result.output}')
}
} else {
upd.log('Updating VLS repo...')
- pull_result := os.execute('$git -C $vls_src_folder pull')
+ pull_result := os.execute('${git} -C ${vls_src_folder} pull')
if !upd.is_force && pull_result.output.trim_space() == 'Already up to date.' {
upd.log("VLS was already updated to it's latest version.")
return
@@ -271,7 +271,7 @@ fn (upd VlsUpdater) compile_from_source() ! {
compile_result := os.execute('v run ${os.join_path(vls_src_folder, 'build.vsh')} ${possible_compilers[selected_compiler_idx]}')
if compile_result.exit_code != 0 {
- return error('Cannot compile VLS from source: $compile_result.output')
+ return error('Cannot compile VLS from source: ${compile_result.output}')
}
exec_path := os.join_path(vls_src_folder, 'bin', 'vls')
@@ -372,10 +372,10 @@ fn (mut upd VlsUpdater) parse(mut fp flag.FlagParser) ! {
fn (upd VlsUpdater) log(msg string) {
match upd.output {
.text {
- println('> $msg')
+ println('> ${msg}')
}
.json {
- print('{"message":"$msg"}')
+ print('{"message":"${msg}"}')
flush_stdout()
}
.silent {}
@@ -397,7 +397,7 @@ fn (upd VlsUpdater) error_details(err IError) string {
the specified path exists and is a valid executable.
- If you have an existing installation of VLS, be sure
to remove "vls.config.json" and "bin" located inside
- "$vls_dir_shortened" and re-install.
+ "${vls_dir_shortened}" and re-install.
If none of the options listed have solved your issue,
please report it at https://github.com/vlang/v/issues
@@ -413,7 +413,7 @@ fn (upd VlsUpdater) error_details(err IError) string {
fn (upd VlsUpdater) cli_error(err IError) {
match upd.output {
.text {
- eprintln('v ls error: $err.msg() ($err.code())')
+ eprintln('v ls error: ${err.msg()} (${err.code()})')
if err !is none {
eprintln(upd.error_details(err))
}
@@ -421,7 +421,7 @@ fn (upd VlsUpdater) cli_error(err IError) {
print_backtrace()
}
.json {
- print('{"error":{"message":${json.encode(err.msg())},"code":"$err.code()","details":${json.encode(upd.error_details(err).trim_space())}}}')
+ print('{"error":{"message":${json.encode(err.msg())},"code":"${err.code()}","details":${json.encode(upd.error_details(err).trim_space())}}}')
flush_stdout()
}
.silent {}
@@ -433,7 +433,7 @@ fn (upd VlsUpdater) check_installation() {
if upd.ls_path.len == 0 {
upd.log('Language server is not installed')
} else {
- upd.log('Language server is installed at: $upd.ls_path')
+ upd.log('Language server is installed at: ${upd.ls_path}')
}
}
@@ -457,7 +457,7 @@ fn (upd VlsUpdater) run(fp flag.FlagParser) ! {
}
}
} else if upd.pass_to_ls {
- exit(os.system('$upd.ls_path ${upd.args.join(' ')}'))
+ exit(os.system('${upd.ls_path} ${upd.args.join(' ')}'))
} else if upd.is_help {
println(fp.usage())
exit(0)
diff --git a/cmd/tools/vmissdoc.v b/cmd/tools/vmissdoc.v
index a2849a0228..eb7351554a 100644
--- a/cmd/tools/vmissdoc.v
+++ b/cmd/tools/vmissdoc.v
@@ -110,12 +110,12 @@ fn (opt &Options) report_undocumented_functions_in_path(path string) int {
fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
if list.len > 0 {
for undocumented_fn in list {
- mut line_numbers := '$undocumented_fn.line:0:'
+ mut line_numbers := '${undocumented_fn.line}:0:'
if opt.no_line_numbers {
line_numbers = ''
}
tags_str := if opt.collect_tags && undocumented_fn.tags.len > 0 {
- '$undocumented_fn.tags'
+ '${undocumented_fn.tags}'
} else {
''
}
@@ -126,7 +126,7 @@ fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
os.real_path(file)
}
if opt.deprecated {
- println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
+ println('${ofile}:${line_numbers}${undocumented_fn.signature} ${tags_str}')
} else {
mut has_deprecation_tag := false
for tag in undocumented_fn.tags {
@@ -136,7 +136,7 @@ fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
}
}
if !has_deprecation_tag {
- println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
+ println('${ofile}:${line_numbers}${undocumented_fn.signature} ${tags_str}')
}
}
}
@@ -247,7 +247,7 @@ fn main() {
}
if opt.additional_args.len == 0 {
println(fp.usage())
- eprintln('Error: $tool_name is missing PATH input')
+ eprintln('Error: ${tool_name} is missing PATH input')
exit(1)
}
// Allow short-long versions to prevent false positive situations, should
@@ -262,7 +262,7 @@ fn main() {
if opt.diff {
if opt.additional_args.len < 2 {
println(fp.usage())
- eprintln('Error: $tool_name --diff needs two valid PATH inputs')
+ eprintln('Error: ${tool_name} --diff needs two valid PATH inputs')
exit(1)
}
path_old := opt.additional_args[0]
@@ -270,7 +270,7 @@ fn main() {
if !(os.is_file(path_old) || os.is_dir(path_old)) || !(os.is_file(path_new)
|| os.is_dir(path_new)) {
println(fp.usage())
- eprintln('Error: $tool_name --diff needs two valid PATH inputs')
+ eprintln('Error: ${tool_name} --diff needs two valid PATH inputs')
exit(1)
}
list := opt.diff_undocumented_functions_in_paths(path_old, path_new)
diff --git a/cmd/tools/vpm.v b/cmd/tools/vpm.v
index f639b72e57..39de1d7735 100644
--- a/cmd/tools/vpm.v
+++ b/cmd/tools/vpm.v
@@ -66,7 +66,7 @@ fn main() {
// args are: vpm [options] SUBCOMMAND module names
params := cmdline.only_non_options(os.args[1..])
options := cmdline.only_options(os.args[1..])
- verbose_println('cli params: $params')
+ verbose_println('cli params: ${params}')
if params.len < 1 {
vpm_help()
exit(5)
@@ -126,10 +126,10 @@ fn main() {
vpm_show(module_names)
}
else {
- eprintln('Error: you tried to run "v $vpm_command"')
+ eprintln('Error: you tried to run "v ${vpm_command}"')
eprintln('... but the v package management tool vpm only knows about these commands:')
for validcmd in valid_vpm_commands {
- eprintln(' v $validcmd')
+ eprintln(' v ${validcmd}')
}
exit(3)
}
@@ -156,7 +156,7 @@ fn vpm_search(keywords []string) {
continue
}
if index == 0 {
- println('Search results for "$joined":\n')
+ println('Search results for "${joined}":\n')
}
index++
mut parts := mod.split('.')
@@ -168,17 +168,17 @@ fn vpm_search(keywords []string) {
parts[0] = ' by ${parts[0]} '
}
installed := if mod in installed_modules { ' (installed)' } else { '' }
- println('${index}. ${parts[1]}${parts[0]}[$mod]$installed')
+ println('${index}. ${parts[1]}${parts[0]}[${mod}]${installed}')
break
}
}
if index == 0 {
vexe := os.getenv('VEXE')
vroot := os.real_path(os.dir(vexe))
- mut messages := ['No module(s) found for `$joined` .']
+ mut messages := ['No module(s) found for `${joined}` .']
for vlibmod in search_keys {
if os.is_dir(os.join_path(vroot, 'vlib', vlibmod)) {
- messages << 'There is already an existing "$vlibmod" module in vlib, so you can just `import $vlibmod` .'
+ messages << 'There is already an existing "${vlibmod}" module in vlib, so you can just `import ${vlibmod}` .'
}
}
for m in messages {
@@ -195,7 +195,7 @@ fn vpm_install_from_vpm(module_names []string) {
name := n.trim_space().replace('_', '-')
mod := get_module_meta_info(name) or {
errors++
- eprintln('Errors while retrieving meta data for module $name:')
+ eprintln('Errors while retrieving meta data for module ${name}:')
eprintln(err)
continue
}
@@ -205,12 +205,12 @@ fn vpm_install_from_vpm(module_names []string) {
}
if vcs !in supported_vcs_systems {
errors++
- eprintln('Skipping module "$name", since it uses an unsupported VCS {$vcs} .')
+ eprintln('Skipping module "${name}", since it uses an unsupported VCS {${vcs}} .')
continue
}
if !ensure_vcs_is_installed(vcs) {
errors++
- eprintln('VPM needs `$vcs` to be installed.')
+ eprintln('VPM needs `${vcs}` to be installed.')
continue
}
//
@@ -219,14 +219,14 @@ fn vpm_install_from_vpm(module_names []string) {
vpm_update([name])
continue
}
- println('Installing module "$name" from "$mod.url" to "$minfo.final_module_path" ...')
+ println('Installing module "${name}" from "${mod.url}" to "${minfo.final_module_path}" ...')
vcs_install_cmd := supported_vcs_install_cmds[vcs]
- cmd := '$vcs_install_cmd "$mod.url" "$minfo.final_module_path"'
- verbose_println(' command: $cmd')
+ cmd := '${vcs_install_cmd} "${mod.url}" "${minfo.final_module_path}"'
+ verbose_println(' command: ${cmd}')
cmdres := os.execute(cmd)
if cmdres.exit_code != 0 {
errors++
- eprintln('Failed installing module "$name" to "$minfo.final_module_path" .')
+ eprintln('Failed installing module "${name}" to "${minfo.final_module_path}" .')
print_failed_cmd(cmd, cmdres)
continue
}
@@ -238,8 +238,8 @@ fn vpm_install_from_vpm(module_names []string) {
}
fn print_failed_cmd(cmd string, cmdres os.Result) {
- verbose_println('Failed command: $cmd')
- verbose_println('Failed command output:\n$cmdres.output')
+ verbose_println('Failed command: ${cmd}')
+ verbose_println('Failed command output:\n${cmdres.output}')
}
fn ensure_vcs_is_installed(vcs string) bool {
@@ -260,7 +260,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
first_cut_pos := url.last_index('/') or {
errors++
- eprintln('Errors while retrieving name for module "$url" :')
+ eprintln('Errors while retrieving name for module "${url}" :')
eprintln(err)
continue
}
@@ -269,7 +269,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
second_cut_pos := url.substr(0, first_cut_pos).last_index('/') or {
errors++
- eprintln('Errors while retrieving name for module "$url" :')
+ eprintln('Errors while retrieving name for module "${url}" :')
eprintln(err)
continue
}
@@ -284,17 +284,17 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
}
if !ensure_vcs_is_installed(vcs_key) {
errors++
- eprintln('VPM needs `$vcs_key` to be installed.')
+ eprintln('VPM needs `${vcs_key}` to be installed.')
continue
}
- println('Installing module "$name" from "$url" to "$final_module_path" ...')
+ println('Installing module "${name}" from "${url}" to "${final_module_path}" ...')
vcs_install_cmd := supported_vcs_install_cmds[vcs_key]
- cmd := '$vcs_install_cmd "$url" "$final_module_path"'
- verbose_println(' command: $cmd')
+ cmd := '${vcs_install_cmd} "${url}" "${final_module_path}"'
+ verbose_println(' command: ${cmd}')
cmdres := os.execute(cmd)
if cmdres.exit_code != 0 {
errors++
- eprintln('Failed installing module "$name" to "$final_module_path" .')
+ eprintln('Failed installing module "${name}" to "${final_module_path}" .')
print_failed_cmd(cmd, cmdres)
continue
}
@@ -306,30 +306,30 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
return
}
minfo := mod_name_info(vmod.name)
- println('Relocating module from "$name" to "$vmod.name" ( "$minfo.final_module_path" ) ...')
+ println('Relocating module from "${name}" to "${vmod.name}" ( "${minfo.final_module_path}" ) ...')
if os.exists(minfo.final_module_path) {
- eprintln('Warning module "$minfo.final_module_path" already exsits!')
- eprintln('Removing module "$minfo.final_module_path" ...')
+ eprintln('Warning module "${minfo.final_module_path}" already exsits!')
+ eprintln('Removing module "${minfo.final_module_path}" ...')
os.rmdir_all(minfo.final_module_path) or {
errors++
- println('Errors while removing "$minfo.final_module_path" :')
+ println('Errors while removing "${minfo.final_module_path}" :')
println(err)
continue
}
}
os.mv(final_module_path, minfo.final_module_path) or {
errors++
- eprintln('Errors while relocating module "$name" :')
+ eprintln('Errors while relocating module "${name}" :')
eprintln(err)
os.rmdir_all(final_module_path) or {
errors++
- eprintln('Errors while removing "$final_module_path" :')
+ eprintln('Errors while removing "${final_module_path}" :')
eprintln(err)
continue
}
continue
}
- println('Module "$name" relocated to "$vmod.name" successfully.')
+ println('Module "${name}" relocated to "${vmod.name}" successfully.')
final_module_path = minfo.final_module_path
name = vmod.name
}
@@ -387,23 +387,23 @@ fn vpm_update(m []string) {
zname := url_to_module_name(modulename)
final_module_path := valid_final_path_of_existing_module(modulename) or { continue }
os.chdir(final_module_path) or {}
- println('Updating module "$zname" in "$final_module_path" ...')
+ println('Updating module "${zname}" in "${final_module_path}" ...')
vcs := vcs_used_in_dir(final_module_path) or { continue }
if !ensure_vcs_is_installed(vcs[0]) {
errors++
- println('VPM needs `$vcs` to be installed.')
+ println('VPM needs `${vcs}` to be installed.')
continue
}
vcs_cmd := supported_vcs_update_cmds[vcs[0]]
- verbose_println(' command: $vcs_cmd')
- vcs_res := os.execute('$vcs_cmd')
+ verbose_println(' command: ${vcs_cmd}')
+ vcs_res := os.execute('${vcs_cmd}')
if vcs_res.exit_code != 0 {
errors++
- println('Failed updating module "$zname" in "$final_module_path" .')
+ println('Failed updating module "${zname}" in "${final_module_path}" .')
print_failed_cmd(vcs_cmd, vcs_res)
continue
} else {
- verbose_println(' $vcs_res.output.trim_space()')
+ verbose_println(' ${vcs_res.output.trim_space()}')
}
resolve_dependencies(modulename, final_module_path, module_names)
}
@@ -424,9 +424,9 @@ fn get_outdated() ?[]string {
for step in vcs_cmd_steps {
res := os.execute(step)
if res.exit_code < 0 {
- verbose_println('Error command: $step')
- verbose_println('Error details:\n$res.output')
- return error('Error while checking latest commits for "$name" .')
+ verbose_println('Error command: ${step}')
+ verbose_println('Error details:\n${res.output}')
+ return error('Error while checking latest commits for "${name}" .')
}
if vcs[0] == 'hg' {
if res.exit_code == 1 {
@@ -457,7 +457,7 @@ fn vpm_outdated() {
if outdated.len > 0 {
eprintln('Outdated modules:')
for m in outdated {
- eprintln(' $m')
+ eprintln(' ${m}')
}
} else {
println('Modules are up to date.')
@@ -486,10 +486,10 @@ fn vpm_remove(module_names []string) {
}
for name in module_names {
final_module_path := valid_final_path_of_existing_module(name) or { continue }
- eprintln('Removing module "$name" ...')
- verbose_println('removing folder $final_module_path')
+ eprintln('Removing module "${name}" ...')
+ verbose_println('removing folder ${final_module_path}')
os.rmdir_all(final_module_path) or {
- verbose_println('error while removing "$final_module_path": $err.msg()')
+ verbose_println('error while removing "${final_module_path}": ${err.msg()}')
}
// delete author directory if it is empty
author := name.split('.')[0]
@@ -498,9 +498,9 @@ fn vpm_remove(module_names []string) {
continue
}
if os.is_dir_empty(author_dir) {
- verbose_println('removing author folder $author_dir')
+ verbose_println('removing author folder ${author_dir}')
os.rmdir(author_dir) or {
- verbose_println('error while removing "$author_dir": $err.msg()')
+ verbose_println('error while removing "${author_dir}": ${err.msg()}')
}
}
}
@@ -510,15 +510,15 @@ fn valid_final_path_of_existing_module(modulename string) ?string {
name := if mod := get_mod_by_url(modulename) { mod.name } else { modulename }
minfo := mod_name_info(name)
if !os.exists(minfo.final_module_path) {
- eprintln('No module with name "$minfo.mname_normalised" exists at $minfo.final_module_path')
+ eprintln('No module with name "${minfo.mname_normalised}" exists at ${minfo.final_module_path}')
return none
}
if !os.is_dir(minfo.final_module_path) {
- eprintln('Skipping "$minfo.final_module_path", since it is not a folder.')
+ eprintln('Skipping "${minfo.final_module_path}", since it is not a folder.')
return none
}
vcs_used_in_dir(minfo.final_module_path) or {
- eprintln('Skipping "$minfo.final_module_path", since it does not use a supported vcs.')
+ eprintln('Skipping "${minfo.final_module_path}", since it does not use a supported vcs.')
return none
}
return minfo.final_module_path
@@ -526,7 +526,7 @@ fn valid_final_path_of_existing_module(modulename string) ?string {
fn ensure_vmodules_dir_exist() {
if !os.is_dir(settings.vmodules_path) {
- println('Creating "$settings.vmodules_path/" ...')
+ println('Creating "${settings.vmodules_path}/" ...')
os.mkdir(settings.vmodules_path) or { panic(err) }
}
}
@@ -566,7 +566,7 @@ fn get_installed_modules() []string {
mods := os.ls(adir) or { continue }
for m in mods {
vcs_used_in_dir(os.join_path(adir, m)) or { continue }
- modules << '${author}.$m'
+ modules << '${author}.${m}'
}
}
return modules
@@ -601,7 +601,7 @@ fn get_all_modules() []string {
url := get_working_server_url()
r := http.get(url) or { panic(err) }
if r.status_code != 200 {
- eprintln('Failed to search vpm.vlang.io. Status code: $r.status_code')
+ eprintln('Failed to search vpm.vlang.io. Status code: ${r.status_code}')
exit(1)
}
s := r.body
@@ -656,14 +656,14 @@ fn resolve_dependencies(name string, module_path string, module_names []string)
}
}
if deps.len > 0 {
- println('Resolving $deps.len dependencies for module "$name" ...')
- verbose_println('Found dependencies: $deps')
+ println('Resolving ${deps.len} dependencies for module "${name}" ...')
+ verbose_println('Found dependencies: ${deps}')
vpm_install(deps, Source.vpm)
}
}
fn parse_vmod(data string) !Vmod {
- manifest := vmod.decode(data) or { return error('Parsing v.mod file failed, $err') }
+ manifest := vmod.decode(data) or { return error('Parsing v.mod file failed, ${err}') }
mut vmod := Vmod{}
vmod.name = manifest.name
vmod.version = manifest.version
@@ -678,9 +678,9 @@ fn get_working_server_url() string {
vpm_server_urls
}
for url in server_urls {
- verbose_println('Trying server url: $url')
+ verbose_println('Trying server url: ${url}')
http.head(url) or {
- verbose_println(' $url failed.')
+ verbose_println(' ${url} failed.')
continue
}
return url
@@ -720,7 +720,7 @@ fn verbose_println(s string) {
fn get_mod_by_url(name string) ?Mod {
if purl := urllib.parse(name) {
- verbose_println('purl: $purl')
+ verbose_println('purl: ${purl}')
mod := Mod{
name: purl.path.trim_left('/').trim_right('/').replace('/', '.')
url: name
@@ -728,7 +728,7 @@ fn get_mod_by_url(name string) ?Mod {
verbose_println(mod.str())
return mod
}
- return error('invalid url: $name')
+ return error('invalid url: ${name}')
}
fn get_module_meta_info(name string) ?Mod {
@@ -738,19 +738,19 @@ fn get_module_meta_info(name string) ?Mod {
mut errors := []string{}
for server_url in vpm_server_urls {
- modurl := server_url + '/jsmod/$name'
- verbose_println('Retrieving module metadata from: "$modurl" ...')
+ modurl := server_url + '/jsmod/${name}'
+ verbose_println('Retrieving module metadata from: "${modurl}" ...')
r := http.get(modurl) or {
- errors << 'Http server did not respond to our request for "$modurl" .'
- errors << 'Error details: $err'
+ errors << 'Http server did not respond to our request for "${modurl}" .'
+ errors << 'Error details: ${err}'
continue
}
if r.status_code == 404 || r.body.trim_space() == '404' {
- errors << 'Skipping module "$name", since "$server_url" reported that "$name" does not exist.'
+ errors << 'Skipping module "${name}", since "${server_url}" reported that "${name}" does not exist.'
continue
}
if r.status_code != 200 {
- errors << 'Skipping module "$name", since "$server_url" responded with $r.status_code http status code. Please try again later.'
+ errors << 'Skipping module "${name}", since "${server_url}" responded with ${r.status_code} http status code. Please try again later.'
continue
}
s := r.body
@@ -760,11 +760,11 @@ fn get_module_meta_info(name string) ?Mod {
continue
}
mod := json.decode(Mod, s) or {
- errors << 'Skipping module "$name", since its information is not in json format.'
+ errors << 'Skipping module "${name}", since its information is not in json format.'
continue
}
if '' == mod.url || '' == mod.name {
- errors << 'Skipping module "$name", since it is missing name or url information.'
+ errors << 'Skipping module "${name}", since it is missing name or url information.'
continue
}
return mod
@@ -778,9 +778,9 @@ fn vpm_show(module_names []string) {
if module_name !in installed_modules {
module_meta_info := get_module_meta_info(module_name) or { continue }
print('
-Name: $module_meta_info.name
-Homepage: $module_meta_info.url
-Downloads: $module_meta_info.nr_downloads
+Name: ${module_meta_info.name}
+Homepage: ${module_meta_info.url}
+Downloads: ${module_meta_info.nr_downloads}
Installed: False
--------
')
@@ -788,13 +788,13 @@ Installed: False
}
path := os.join_path(os.vmodules_dir(), module_name.replace('.', os.path_separator))
mod := vmod.from_file(os.join_path(path, 'v.mod')) or { continue }
- print('Name: $mod.name
-Version: $mod.version
-Description: $mod.description
-Homepage: $mod.repo_url
-Author: $mod.author
-License: $mod.license
-Location: $path
+ print('Name: ${mod.name}
+Version: ${mod.version}
+Description: ${mod.description}
+Homepage: ${mod.repo_url}
+Author: ${mod.author}
+License: ${mod.license}
+Location: ${path}
Requires: ${mod.dependencies.join(', ')}
--------
')
diff --git a/cmd/tools/vrepl.v b/cmd/tools/vrepl.v
index 0dbb921593..370d96b1e9 100644
--- a/cmd/tools/vrepl.v
+++ b/cmd/tools/vrepl.v
@@ -107,7 +107,7 @@ fn (mut r Repl) checks() bool {
fn (r &Repl) function_call(line string) (bool, FnType) {
for function in r.functions_name {
- is_function_definition := line.replace(' ', '').starts_with('$function:=')
+ is_function_definition := line.replace(' ', '').starts_with('${function}:=')
if line.starts_with(function) && !is_function_definition {
// TODO(vincenzopalazzo) store the type of the function here
fntype := r.check_fn_type_kind(line)
@@ -141,7 +141,7 @@ fn (r &Repl) is_function_call(line string) bool {
fn (r &Repl) import_to_source_code() []string {
mut imports_line := []string{}
for mod in r.modules {
- mut import_str := 'import $mod'
+ mut import_str := 'import ${mod}'
if mod in r.alias {
import_str += ' as ${r.alias[mod]}'
}
@@ -177,7 +177,7 @@ fn (r &Repl) current_source_code(should_add_temp_lines bool, not_add_print bool)
// do not return anything, while others return results.
// This function checks which one we have:
fn (r &Repl) check_fn_type_kind(new_line string) FnType {
- source_code := r.current_source_code(true, false) + '\nprintln($new_line)'
+ source_code := r.current_source_code(true, false) + '\nprintln(${new_line})'
check_file := os.join_path(r.folder, '${rand.ulid()}.vrepl.check.v')
os.write_file(check_file, source_code) or { panic(err) }
defer {
@@ -228,11 +228,11 @@ fn (mut r Repl) list_source() {
}
fn highlight_console_command(command string) string {
- return term.bright_white(term.bright_bg_black(' $command '))
+ return term.bright_white(term.bright_bg_black(' ${command} '))
}
fn highlight_repl_command(command string) string {
- return term.bright_white(term.bg_blue(' $command '))
+ return term.bright_white(term.bg_blue(' ${command} '))
}
fn print_welcome_screen() {
@@ -253,19 +253,19 @@ fn print_welcome_screen() {
term.bright_blue(r' \__/ '),
]
help_text := [
- 'Welcome to the V REPL (for help with V itself, type $cmd_exit, then run $cmd_v_help).',
+ 'Welcome to the V REPL (for help with V itself, type ${cmd_exit}, then run ${cmd_v_help}).',
'Note: the REPL is highly experimental. For best V experience, use a text editor, ',
- 'save your code in a $file_main file and execute: $cmd_v_run',
- '${version.full_v_version(false)} . Use $cmd_list to see the accumulated program so far.',
- 'Use Ctrl-C or $cmd_exit to exit, or $cmd_help to see other available commands.',
+ 'save your code in a ${file_main} file and execute: ${cmd_v_run}',
+ '${version.full_v_version(false)} . Use ${cmd_list} to see the accumulated program so far.',
+ 'Use Ctrl-C or ${cmd_exit} to exit, or ${cmd_help} to see other available commands.',
]
if width >= 97 {
eprintln('${vlogo[0]}')
- eprintln('${vlogo[1]} $vbar ${help_text[0]}')
- eprintln('${vlogo[2]} $vbar ${help_text[1]}')
- eprintln('${vlogo[3]} $vbar ${help_text[2]}')
- eprintln('${vlogo[4]} $vbar ${help_text[3]}')
- eprintln('${vlogo[5]} $vbar ${help_text[4]}')
+ eprintln('${vlogo[1]} ${vbar} ${help_text[0]}')
+ eprintln('${vlogo[2]} ${vbar} ${help_text[1]}')
+ eprintln('${vlogo[3]} ${vbar} ${help_text[2]}')
+ eprintln('${vlogo[4]} ${vbar} ${help_text[3]}')
+ eprintln('${vlogo[5]} ${vbar} ${help_text[4]}')
eprintln('')
} else {
if width >= 14 {
@@ -286,7 +286,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
if vstartup != '' {
result := repl_run_vfile(vstartup) or {
os.Result{
- output: '$vstartup file not found'
+ output: '${vstartup} file not found'
}
}
print('\n')
@@ -359,7 +359,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
r.line = ''
}
if r.line == 'debug_repl' {
- eprintln('repl: $r')
+ eprintln('repl: ${r}')
continue
}
if r.line == 'reset' {
@@ -385,7 +385,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
r.line = 'println(' + r.line[1..] + ')'
}
if r.line.starts_with('print') {
- source_code := r.current_source_code(false, false) + '\n$r.line\n'
+ source_code := r.current_source_code(false, false) + '\n${r.line}\n'
os.write_file(temp_file, source_code) or { panic(err) }
s := repl_run_vfile(temp_file) or { return 1 }
if s.output.len > r.last_output.len {
@@ -438,8 +438,8 @@ fn run_repl(workdir string, vrepl_prefix string) int {
is_statement = true
}
if !is_statement && (!func_call || fntype == FnType.fn_type) && r.line != '' {
- temp_line = 'println($r.line)'
- source_code := r.current_source_code(false, false) + '\n$temp_line\n'
+ temp_line = 'println(${r.line})'
+ source_code := r.current_source_code(false, false) + '\n${temp_line}\n'
os.write_file(temp_file, source_code) or { panic(err) }
s := repl_run_vfile(temp_file) or { return 1 }
if s.output.len > r.last_output.len {
@@ -452,10 +452,10 @@ fn run_repl(workdir string, vrepl_prefix string) int {
if temp_line.starts_with('import ') {
mod := r.line.fields()[1]
if mod !in r.modules {
- temp_source_code = '$temp_line\n' + r.current_source_code(false, true)
+ temp_source_code = '${temp_line}\n' + r.current_source_code(false, true)
}
} else if temp_line.starts_with('#include ') {
- temp_source_code = '$temp_line\n' + r.current_source_code(false, false)
+ temp_source_code = '${temp_line}\n' + r.current_source_code(false, false)
} else {
for i, l in r.lines {
if (l.starts_with('for ') || l.starts_with('if ')) && l.contains('println') {
@@ -463,7 +463,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
break
}
}
- temp_source_code = r.current_source_code(true, false) + '\n$temp_line\n'
+ temp_source_code = r.current_source_code(true, false) + '\n${temp_line}\n'
}
os.write_file(temp_file, temp_source_code) or { panic(err) }
s := repl_run_vfile(temp_file) or { return 1 }
@@ -547,7 +547,7 @@ fn main() {
}
fn rerror(s string) {
- println('V repl error: $s')
+ println('V repl error: ${s}')
os.flush()
}
@@ -578,7 +578,7 @@ fn cleanup_files(file string) {
fn repl_run_vfile(file string) ?os.Result {
$if trace_repl_temp_files ? {
- eprintln('>> repl_run_vfile file: $file')
+ eprintln('>> repl_run_vfile file: ${file}')
}
s := os.execute('${os.quoted_path(vexe)} -repl run ${os.quoted_path(file)}')
if s.exit_code < 0 {
diff --git a/cmd/tools/vscan.v b/cmd/tools/vscan.v
index 4266adc8d2..052a6ccf10 100644
--- a/cmd/tools/vscan.v
+++ b/cmd/tools/vscan.v
@@ -21,8 +21,8 @@ fn main() {
for tok.kind != .eof {
tok = scanner.scan()
pos := tok.pos()
- location := '$path:${pos.line_nr + 1}:${pos.col + 1}:'
- println('${location:-32} | pos: ${pos.pos:-5} | $tok.debug()')
+ location := '${path}:${pos.line_nr + 1}:${pos.col + 1}:'
+ println('${location:-32} | pos: ${pos.pos:-5} | ${tok.debug()}')
}
}
}
diff --git a/cmd/tools/vself.v b/cmd/tools/vself.v
index 5b5f59fddd..a4bfe97b10 100644
--- a/cmd/tools/vself.v
+++ b/cmd/tools/vself.v
@@ -20,15 +20,15 @@ fn main() {
vexe_name := os.file_name(vexe)
short_v_name := vexe_name.all_before('.')
//
- recompilation.must_be_enabled(vroot, 'Please install V from source, to use `$vexe_name self` .')
+ recompilation.must_be_enabled(vroot, 'Please install V from source, to use `${vexe_name} self` .')
os.chdir(vroot)!
os.setenv('VCOLORS', 'always', true)
args := os.args[1..].filter(it != 'self')
jargs := args.join(' ')
obinary := cmdline.option(args, '-o', '')
- sargs := if obinary != '' { jargs } else { '$jargs -o v2' }
- cmd := '${os.quoted_path(vexe)} $sargs ${os.quoted_path('cmd/v')}'
- options := if args.len > 0 { '($sargs)' } else { '' }
+ sargs := if obinary != '' { jargs } else { '${jargs} -o v2' }
+ cmd := '${os.quoted_path(vexe)} ${sargs} ${os.quoted_path('cmd/v')}'
+ options := if args.len > 0 { '(${sargs})' } else { '' }
println('V self compiling ${options}...')
compile(vroot, cmd)
if obinary != '' {
@@ -37,13 +37,13 @@ fn main() {
return
}
backup_old_version_and_rename_newer(short_v_name) or { panic(err.msg()) }
- println('V built successfully as executable "$vexe_name".')
+ println('V built successfully as executable "${vexe_name}".')
}
fn compile(vroot string, cmd string) {
result := os.execute_or_exit(cmd)
if result.exit_code != 0 {
- eprintln('cannot compile to `$vroot`: \n$result.output')
+ eprintln('cannot compile to `${vroot}`: \n${result.output}')
exit(1)
}
if result.output.len > 0 {
@@ -59,34 +59,34 @@ fn list_folder(short_v_name string, bmessage string, message string) {
println(bmessage)
}
if os.user_os() == 'windows' {
- os.system('dir $short_v_name*.exe')
+ os.system('dir ${short_v_name}*.exe')
} else {
- os.system('ls -lartd $short_v_name*')
+ os.system('ls -lartd ${short_v_name}*')
}
println(message)
}
fn backup_old_version_and_rename_newer(short_v_name string) !bool {
mut errors := []string{}
- short_v_file := if os.user_os() == 'windows' { '${short_v_name}.exe' } else { '$short_v_name' }
+ short_v_file := if os.user_os() == 'windows' { '${short_v_name}.exe' } else { '${short_v_name}' }
short_v2_file := if os.user_os() == 'windows' { 'v2.exe' } else { 'v2' }
short_bak_file := if os.user_os() == 'windows' { 'v_old.exe' } else { 'v_old' }
v_file := os.real_path(short_v_file)
v2_file := os.real_path(short_v2_file)
bak_file := os.real_path(short_bak_file)
- list_folder(short_v_name, 'before:', 'removing $bak_file ...')
+ list_folder(short_v_name, 'before:', 'removing ${bak_file} ...')
if os.exists(bak_file) {
- os.rm(bak_file) or { errors << 'failed removing $bak_file: $err.msg()' }
+ os.rm(bak_file) or { errors << 'failed removing ${bak_file}: ${err.msg()}' }
}
- list_folder(short_v_name, '', 'moving $v_file to $bak_file ...')
+ list_folder(short_v_name, '', 'moving ${v_file} to ${bak_file} ...')
os.mv(v_file, bak_file) or { errors << err.msg() }
- list_folder(short_v_name, '', 'removing $v_file ...')
+ list_folder(short_v_name, '', 'removing ${v_file} ...')
os.rm(v_file) or {}
- list_folder(short_v_name, '', 'moving $v2_file to $v_file ...')
+ list_folder(short_v_name, '', 'moving ${v2_file} to ${v_file} ...')
os.mv_by_cp(v2_file, v_file) or { panic(err.msg()) }
list_folder(short_v_name, 'after:', '')
diff --git a/cmd/tools/vsetup-freetype.v b/cmd/tools/vsetup-freetype.v
index 5a608bb03f..c5e1f870b9 100644
--- a/cmd/tools/vsetup-freetype.v
+++ b/cmd/tools/vsetup-freetype.v
@@ -15,7 +15,7 @@ fn main() {
if os.is_dir(freetype_folder) {
println('Thirdparty "freetype" is already installed.')
} else {
- s := os.execute('git clone --depth=1 $freetype_repo_url $freetype_folder')
+ s := os.execute('git clone --depth=1 ${freetype_repo_url} ${freetype_folder}')
if s.exit_code != 0 {
panic(s.output)
}
diff --git a/cmd/tools/vshader.v b/cmd/tools/vshader.v
index 71b65ec0d1..be53e3fba5 100644
--- a/cmd/tools/vshader.v
+++ b/cmd/tools/vshader.v
@@ -53,9 +53,9 @@ const (
shdc_version = shdc_full_hash[0..8]
shdc_urls = {
- 'windows': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/win32/sokol-shdc.exe'
- 'macos': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/osx/sokol-shdc'
- 'linux': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/linux/sokol-shdc'
+ 'windows': 'https://github.com/floooh/sokol-tools-bin/raw/${shdc_full_hash}/bin/win32/sokol-shdc.exe'
+ 'macos': 'https://github.com/floooh/sokol-tools-bin/raw/${shdc_full_hash}/bin/osx/sokol-shdc'
+ 'linux': 'https://github.com/floooh/sokol-tools-bin/raw/${shdc_full_hash}/bin/linux/sokol-shdc'
}
shdc_version_file = os.join_path(cache_dir, 'sokol-shdc.version')
shdc = shdc_exe()
@@ -77,7 +77,7 @@ struct CompileOptions {
fn main() {
if os.args.len == 1 {
- println('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
+ println('Usage: ${tool_name} PATH \n${tool_description}\n${tool_name} -h for more help...')
exit(1)
}
mut fp := flag.new_flag_parser(os.args[1..])
@@ -91,7 +91,7 @@ fn main() {
show_help: fp.bool('help', `h`, false, 'Show this help text.')
force_update: fp.bool('force-update', `u`, false, 'Force update of the sokol-shdc tool.')
verbose: fp.bool('verbose', `v`, false, 'Be verbose about the tools progress.')
- slangs: fp.string_multi('slang', `l`, 'Shader dialects to generate code for. Default is all.\n Available dialects: $supported_slangs')
+ slangs: fp.string_multi('slang', `l`, 'Shader dialects to generate code for. Default is all.\n Available dialects: ${supported_slangs}')
}
if opt.show_help {
println(fp.usage())
@@ -124,7 +124,7 @@ fn shader_program_name(shader_file string) string {
// validate_shader_file returns an error if `shader_file` isn't valid.
fn validate_shader_file(shader_file string) ! {
shader_program := os.read_lines(shader_file) or {
- return error('shader program at "$shader_file" could not be opened for reading')
+ return error('shader program at "${shader_file}" could not be opened for reading')
}
mut has_program_directive := false
for line in shader_program {
@@ -134,7 +134,7 @@ fn validate_shader_file(shader_file string) ! {
}
}
if !has_program_directive {
- return error('shader program at "$shader_file" is missing a "@program" directive.')
+ return error('shader program at "${shader_file}" is missing a "@program" directive.')
}
}
@@ -152,7 +152,7 @@ fn compile_shaders(opt Options, input_path string) ! {
if shader_files.len == 0 {
if opt.verbose {
- eprintln('$tool_name found no shader files to compile for "$path"')
+ eprintln('${tool_name} found no shader files to compile for "${path}"')
}
return
}
@@ -190,23 +190,23 @@ fn compile_shader(opt CompileOptions, shader_file string) ! {
header_name := os.file_name(out_file)
if opt.verbose {
- eprintln('$tool_name generating shader code for $slangs in header "$header_name" in "$path" from $shader_file')
+ eprintln('${tool_name} generating shader code for ${slangs} in header "${header_name}" in "${path}" from ${shader_file}')
}
cmd :=
'${os.quoted_path(shdc)} --input ${os.quoted_path(shader_file)} --output ${os.quoted_path(out_file)} --slang ' +
os.quoted_path(slangs.join(':'))
if opt.verbose {
- eprintln('$tool_name executing:\n$cmd')
+ eprintln('${tool_name} executing:\n${cmd}')
}
res := os.execute(cmd)
if res.exit_code != 0 {
- eprintln('$tool_name failed generating shader includes:\n $res.output\n $cmd')
+ eprintln('${tool_name} failed generating shader includes:\n ${res.output}\n ${cmd}')
exit(1)
}
if opt.verbose {
program_name := shader_program_name(shader_file)
- eprintln('$tool_name usage example in V:\n\nimport sokol.gfx\n\n#include "$header_name"\n\nfn C.${program_name}_shader_desc(gfx.Backend) &gfx.ShaderDesc\n')
+ eprintln('${tool_name} usage example in V:\n\nimport sokol.gfx\n\n#include "${header_name}"\n\nfn C.${program_name}_shader_desc(gfx.Backend) &gfx.ShaderDesc\n')
}
}
@@ -245,7 +245,7 @@ fn ensure_external_tools(opt Options) ! {
if is_shdc_available && is_shdc_executable {
if opt.verbose {
version := os.read_file(shdc_version_file) or { 'unknown' }
- eprintln('$tool_name using sokol-shdc version $version at "$shdc"')
+ eprintln('${tool_name} using sokol-shdc version ${version} at "${shdc}"')
}
return
}
@@ -265,15 +265,15 @@ fn download_shdc(opt Options) ! {
// We want to use the same, runtime, OS type as this tool is invoked on.
download_url := shdc_urls[runtime_os] or { '' }
if download_url == '' {
- return error('$tool_name failed to download an external dependency "sokol-shdc" for ${runtime_os}.\nThe supported host platforms for shader compilation is $supported_hosts')
+ return error('${tool_name} failed to download an external dependency "sokol-shdc" for ${runtime_os}.\nThe supported host platforms for shader compilation is ${supported_hosts}')
}
update_to_shdc_version := os.read_file(shdc_version_file) or { shdc_version }
file := shdc_exe()
if opt.verbose {
if shdc_version != update_to_shdc_version && os.exists(file) {
- eprintln('$tool_name updating sokol-shdc to version $update_to_shdc_version ...')
+ eprintln('${tool_name} updating sokol-shdc to version ${update_to_shdc_version} ...')
} else {
- eprintln('$tool_name installing sokol-shdc version $update_to_shdc_version ...')
+ eprintln('${tool_name} installing sokol-shdc version ${update_to_shdc_version} ...')
}
}
if os.exists(file) {
@@ -283,11 +283,11 @@ fn download_shdc(opt Options) ! {
mut dtmp_file, dtmp_path := util.temp_file(util.TempFileOptions{ path: os.dir(file) })!
dtmp_file.close()
if opt.verbose {
- eprintln('$tool_name downloading sokol-shdc from $download_url')
+ eprintln('${tool_name} downloading sokol-shdc from ${download_url}')
}
http.download_file(download_url, dtmp_path) or {
os.rm(dtmp_path)!
- return error('$tool_name failed to download sokol-shdc needed for shader compiling: $err')
+ return error('${tool_name} failed to download sokol-shdc needed for shader compiling: ${err}')
}
// Make it executable
os.chmod(dtmp_path, 0o775)!
diff --git a/cmd/tools/vshould-compile-all.v b/cmd/tools/vshould-compile-all.v
index 5213a90b62..59e5b5564b 100644
--- a/cmd/tools/vshould-compile-all.v
+++ b/cmd/tools/vshould-compile-all.v
@@ -29,17 +29,17 @@ fn main() {
mut failed_commands := []string{}
for idx, example in files {
cmd := '${os.quoted_path(@VEXE)} ${os.quoted_path(example)}'
- println('> compiling ${idx + 1:4}/${files.len:-4}: $cmd')
+ println('> compiling ${idx + 1:4}/${files.len:-4}: ${cmd}')
if 0 != os.system(cmd) {
failed_commands << cmd
}
}
if failed_commands.len > 0 {
for idx, fcmd in failed_commands {
- eprintln('>>> FAILED command ${idx + 1:4}/${failed_commands.len:-4}: $fcmd')
+ eprintln('>>> FAILED command ${idx + 1:4}/${failed_commands.len:-4}: ${fcmd}')
}
println('Summary: ${failed_commands.len:4}/${files.len:-4} file(s) failed to compile.')
exit(1)
}
- println('Summary: all $files.len file(s) compiled successfully.')
+ println('Summary: all ${files.len} file(s) compiled successfully.')
}
diff --git a/cmd/tools/vsymlink.v b/cmd/tools/vsymlink.v
index 5ebc2fb45a..e8d22e62c4 100644
--- a/cmd/tools/vsymlink.v
+++ b/cmd/tools/vsymlink.v
@@ -44,7 +44,7 @@ fn setup_symlink_github() {
mut content := os.read_file(os.getenv('GITHUB_PATH')) or {
panic('Failed to read GITHUB_PATH.')
}
- content += '\n$os.getwd()\n'
+ content += '\n${os.getwd()}\n'
os.write_file(os.getenv('GITHUB_PATH'), content) or { panic('Failed to write to GITHUB_PATH.') }
}
@@ -59,7 +59,7 @@ fn setup_symlink_unix(vexe string) {
}
os.rm(link_path) or {}
os.symlink(vexe, link_path) or {
- eprintln('Failed to create symlink "$link_path". Try again with sudo.')
+ eprintln('Failed to create symlink "${link_path}". Try again with sudo.')
exit(1)
}
}
@@ -90,19 +90,19 @@ fn setup_symlink_windows(vexe string) {
os.symlink(vsymlink, vexe) or {
// typically only fails if you're on a network drive (VirtualBox)
// do batch file creation instead
- eprintln('Could not create a native symlink: $err')
+ eprintln('Could not create a native symlink: ${err}')
eprintln('Creating a batch file instead...')
vsymlink = os.join_path(vsymlinkdir, 'v.bat')
if os.exists(vsymlink) {
os.rm(vsymlink) or { panic(err) }
}
- os.write_file(vsymlink, '@echo off\n"$vexe" %*') or { panic(err) }
- eprintln('$vsymlink file written.')
+ os.write_file(vsymlink, '@echo off\n"${vexe}" %*') or { panic(err) }
+ eprintln('${vsymlink} file written.')
}
if !os.exists(vsymlink) {
- warn_and_exit('Could not create $vsymlink')
+ warn_and_exit('Could not create ${vsymlink}')
}
- println('Symlink $vsymlink to $vexe created.')
+ println('Symlink ${vsymlink} to ${vexe} created.')
println('Checking system %PATH%...')
reg_sys_env_handle := get_reg_sys_env_handle() or {
warn_and_exit(err.msg())
@@ -114,7 +114,7 @@ fn setup_symlink_windows(vexe string) {
// }
// if the above succeeded, and we cannot get the value, it may simply be empty
sys_env_path := get_reg_value(reg_sys_env_handle, 'Path') or { '' }
- current_sys_paths := sys_env_path.split(os.path_delimiter).map(it.trim('/$os.path_separator'))
+ current_sys_paths := sys_env_path.split(os.path_delimiter).map(it.trim('/${os.path_separator}'))
mut new_paths := [vsymlinkdir]
for p in current_sys_paths {
if p == '' {
@@ -161,7 +161,7 @@ fn get_reg_sys_env_handle() ?voidptr {
reg_key_path := 'Environment'
reg_env_key := unsafe { nil } // or HKEY (HANDLE)
if C.RegOpenKeyEx(os.hkey_current_user, reg_key_path.to_wide(), 0, 1 | 2, ®_env_key) != 0 {
- return error('Could not open "$reg_key_path" in the registry')
+ return error('Could not open "${reg_key_path}" in the registry')
}
return reg_env_key
}
@@ -175,7 +175,7 @@ fn get_reg_value(reg_env_key voidptr, key string) ?string {
reg_value_size := u32(4095) // this is the max length (not for the registry, but for the system %PATH%)
mut reg_value := unsafe { &u16(malloc(int(reg_value_size))) }
if C.RegQueryValueExW(reg_env_key, key.to_wide(), 0, 0, reg_value, ®_value_size) != 0 {
- return error('Unable to get registry value for "$key".')
+ return error('Unable to get registry value for "${key}".')
}
return unsafe { string_from_wide(reg_value) }
}
@@ -187,7 +187,7 @@ fn set_reg_value(reg_key voidptr, key string, value string) ?bool {
$if windows {
if C.RegSetValueExW(reg_key, key.to_wide(), 0, C.REG_EXPAND_SZ, value.to_wide(),
value.len * 2) != 0 {
- return error('Unable to set registry value for "$key". %PATH% may be too long.')
+ return error('Unable to set registry value for "${key}". %PATH% may be too long.')
}
return true
}
diff --git a/cmd/tools/vtest-all.v b/cmd/tools/vtest-all.v
index 60be08b0a0..4d3b7672f8 100644
--- a/cmd/tools/vtest-all.v
+++ b/cmd/tools/vtest-all.v
@@ -29,14 +29,14 @@ fn main() {
flush_stdout()
println('')
println(term.header_left(term_highlight('Summary of `v test-all`:'), '-'))
- println(term_highlight('Total runtime: $spent ms'))
+ println(term_highlight('Total runtime: ${spent} ms'))
for ocmd in oks {
msg := if ocmd.okmsg != '' { ocmd.okmsg } else { ocmd.line }
- println(term.colorize(term.green, '> OK: $msg '))
+ println(term.colorize(term.green, '> OK: ${msg} '))
}
for fcmd in fails {
msg := if fcmd.errmsg != '' { fcmd.errmsg } else { fcmd.line }
- println(term.failed('> Failed:') + ' $msg')
+ println(term.failed('> Failed:') + ' ${msg}')
}
flush_stdout()
if fails.len > 0 {
@@ -76,28 +76,28 @@ mut:
fn get_all_commands() []Command {
mut res := []Command{}
res << Command{
- line: '$vexe examples/hello_world.v'
+ line: '${vexe} examples/hello_world.v'
okmsg: 'V can compile hello world.'
rmfile: 'examples/hello_world'
}
res << Command{
- line: '$vexe -o hhww.c examples/hello_world.v'
+ line: '${vexe} -o hhww.c examples/hello_world.v'
okmsg: 'V can output a .c file, without compiling further.'
rmfile: 'hhww.c'
}
res << Command{
- line: '$vexe -skip-unused examples/hello_world.v'
+ line: '${vexe} -skip-unused examples/hello_world.v'
okmsg: 'V can compile hello world with -skip-unused.'
rmfile: 'examples/hello_world'
}
res << Command{
- line: '$vexe -skip-unused -profile - examples/hello_world.v'
+ line: '${vexe} -skip-unused -profile - examples/hello_world.v'
okmsg: 'V can compile hello world with both -skip-unused and -profile .'
rmfile: 'examples/hello_world'
}
$if linux || macos {
res << Command{
- line: '$vexe run examples/hello_world.v'
+ line: '${vexe} run examples/hello_world.v'
okmsg: 'V can run hello world.'
runcmd: .execute
expect: 'Hello, World!\n'
@@ -106,8 +106,8 @@ fn get_all_commands() []Command {
for compiler_name in ['clang', 'gcc'] {
if _ := os.find_abs_path_of_executable(compiler_name) {
res << Command{
- line: '$vexe -cc $compiler_name -gc boehm run examples/hello_world.v'
- okmsg: '`v -cc $compiler_name -gc boehm run examples/hello_world.v` works'
+ line: '${vexe} -cc ${compiler_name} -gc boehm run examples/hello_world.v'
+ okmsg: '`v -cc ${compiler_name} -gc boehm run examples/hello_world.v` works'
runcmd: .execute
expect: 'Hello, World!\n'
}
@@ -115,13 +115,13 @@ fn get_all_commands() []Command {
}
}
res << Command{
- line: '$vexe interpret examples/hello_world.v'
+ line: '${vexe} interpret examples/hello_world.v'
okmsg: 'V can interpret hello world.'
runcmd: .execute
expect: 'Hello, World!\n'
}
res << Command{
- line: '$vexe interpret examples/hanoi.v'
+ line: '${vexe} interpret examples/hanoi.v'
okmsg: 'V can interpret hanoi.v'
runcmd: .execute
starts_with: 'Disc 1 from A to C...\n'
@@ -129,121 +129,121 @@ fn get_all_commands() []Command {
contains: 'Disc 7 from A to C...\n'
}
res << Command{
- line: '$vexe -o - examples/hello_world.v | grep "#define V_COMMIT_HASH" > /dev/null'
+ line: '${vexe} -o - examples/hello_world.v | grep "#define V_COMMIT_HASH" > /dev/null'
okmsg: 'V prints the generated source code to stdout with `-o -` .'
}
res << Command{
- line: '$vexe run examples/v_script.vsh > /dev/null'
+ line: '${vexe} run examples/v_script.vsh > /dev/null'
okmsg: 'V can run the .VSH script file examples/v_script.vsh'
}
$if linux {
res << Command{
- line: '$vexe -b native run examples/native/hello_world.v > /dev/null'
+ line: '${vexe} -b native run examples/native/hello_world.v > /dev/null'
okmsg: 'V compiles and runs examples/native/hello_world.v on the native backend for linux'
}
}
// only compilation:
res << Command{
- line: '$vexe -os linux -b native -o hw.linux examples/hello_world.v'
+ line: '${vexe} -os linux -b native -o hw.linux examples/hello_world.v'
okmsg: 'V compiles hello_world.v on the native backend for linux'
rmfile: 'hw.linux'
}
res << Command{
- line: '$vexe -os macos -b native -o hw.macos examples/hello_world.v'
+ line: '${vexe} -os macos -b native -o hw.macos examples/hello_world.v'
okmsg: 'V compiles hello_world.v on the native backend for macos'
rmfile: 'hw.macos'
}
res << Command{
- line: '$vexe -os windows -b native -o hw.exe examples/hello_world.v'
+ line: '${vexe} -os windows -b native -o hw.exe examples/hello_world.v'
okmsg: 'V compiles hello_world.v on the native backend for windows'
rmfile: 'hw.exe'
}
//
res << Command{
- line: '$vexe -b js -o hw.js examples/hello_world.v'
+ line: '${vexe} -b js -o hw.js examples/hello_world.v'
okmsg: 'V compiles hello_world.v on the JS backend'
rmfile: 'hw.js'
}
res << Command{
- line: '$vexe -skip-unused -b js -o hw_skip_unused.js examples/hello_world.v'
+ line: '${vexe} -skip-unused -b js -o hw_skip_unused.js examples/hello_world.v'
okmsg: 'V compiles hello_world.v on the JS backend, with -skip-unused'
rmfile: 'hw_skip_unused.js'
}
res << Command{
- line: '$vexe -skip-unused examples/2048'
+ line: '${vexe} -skip-unused examples/2048'
okmsg: 'V can compile 2048 with -skip-unused.'
rmfile: 'examples/2048/2048'
}
}
res << Command{
- line: '$vexe -o vtmp cmd/v'
+ line: '${vexe} -o vtmp cmd/v'
okmsg: 'V can compile itself.'
rmfile: 'vtmp'
}
res << Command{
- line: '$vexe -o vtmp_werror -cstrict cmd/v'
+ line: '${vexe} -o vtmp_werror -cstrict cmd/v'
okmsg: 'V can compile itself with -cstrict.'
rmfile: 'vtmp_werror'
}
res << Command{
- line: '$vexe -o vtmp_autofree -autofree cmd/v'
+ line: '${vexe} -o vtmp_autofree -autofree cmd/v'
okmsg: 'V can compile itself with -autofree.'
rmfile: 'vtmp_autofree'
}
res << Command{
- line: '$vexe -o vtmp_prealloc -prealloc cmd/v'
+ line: '${vexe} -o vtmp_prealloc -prealloc cmd/v'
okmsg: 'V can compile itself with -prealloc.'
rmfile: 'vtmp_prealloc'
}
res << Command{
- line: '$vexe -o vtmp_unused -skip-unused cmd/v'
+ line: '${vexe} -o vtmp_unused -skip-unused cmd/v'
okmsg: 'V can compile itself with -skip-unused.'
rmfile: 'vtmp_unused'
}
$if linux {
res << Command{
- line: '$vexe -cc gcc -keepc -freestanding -o bel vlib/os/bare/bare_example_linux.v'
+ line: '${vexe} -cc gcc -keepc -freestanding -o bel vlib/os/bare/bare_example_linux.v'
okmsg: 'V can compile with -freestanding on Linux with GCC.'
rmfile: 'bel'
}
res << Command{
- line: '$vexe -cc gcc -keepc -freestanding -o str_array vlib/strconv/bare/str_array_example.v'
+ line: '${vexe} -cc gcc -keepc -freestanding -o str_array vlib/strconv/bare/str_array_example.v'
okmsg: 'V can compile & allocate memory with -freestanding on Linux with GCC.'
rmfile: 'str_array'
}
}
res << Command{
- line: '$vexe $vargs -progress test-cleancode'
+ line: '${vexe} ${vargs} -progress test-cleancode'
okmsg: 'All .v files are invariant when processed with `v fmt`'
}
res << Command{
- line: '$vexe $vargs -progress test-fmt'
+ line: '${vexe} ${vargs} -progress test-fmt'
okmsg: 'All .v files can be processed with `v fmt`. Note: the result may not always be compilable, but `v fmt` should not crash.'
}
res << Command{
- line: '$vexe $vargs -progress test-self'
+ line: '${vexe} ${vargs} -progress test-self'
okmsg: 'There are no _test.v file regressions.'
}
res << Command{
- line: '$vexe $vargs -progress -W build-tools'
+ line: '${vexe} ${vargs} -progress -W build-tools'
okmsg: 'All tools can be compiled.'
}
res << Command{
- line: '$vexe $vargs -progress -W build-examples'
+ line: '${vexe} ${vargs} -progress -W build-examples'
okmsg: 'All examples can be compiled.'
}
res << Command{
- line: '$vexe check-md -hide-warnings .'
+ line: '${vexe} check-md -hide-warnings .'
label: 'Check ```v ``` code examples and formatting of .MD files...'
okmsg: 'All .md files look good.'
}
res << Command{
- line: '$vexe install nedpals.args'
+ line: '${vexe} install nedpals.args'
okmsg: '`v install` works.'
}
res << Command{
- line: '$vexe -usecache -cg examples/hello_world.v'
+ line: '${vexe} -usecache -cg examples/hello_world.v'
okmsg: '`v -usecache -cg` works.'
rmfile: 'examples/hello_world'
}
@@ -251,13 +251,13 @@ fn get_all_commands() []Command {
// own #flags (tetris depends on gg, which uses sokol) can be compiled
// with -usecache:
res << Command{
- line: '$vexe -usecache examples/tetris/tetris.v'
+ line: '${vexe} -usecache examples/tetris/tetris.v'
okmsg: '`v -usecache` works.'
rmfile: 'examples/tetris/tetris'
}
$if macos || linux {
res << Command{
- line: '$vexe -o v.c cmd/v && cc -Werror v.c -lpthread -lm && rm -rf a.out'
+ line: '${vexe} -o v.c cmd/v && cc -Werror v.c -lpthread -lm && rm -rf a.out'
label: 'v.c should be buildable with no warnings...'
okmsg: 'v.c can be compiled without warnings. This is good :)'
rmfile: 'v.c'
@@ -265,7 +265,7 @@ fn get_all_commands() []Command {
}
$if linux {
res << Command{
- line: '$vexe vlib/v/tests/bench/bench_stbi_load.v && prlimit -v10485760 vlib/v/tests/bench/bench_stbi_load'
+ line: '${vexe} vlib/v/tests/bench/bench_stbi_load.v && prlimit -v10485760 vlib/v/tests/bench/bench_stbi_load'
okmsg: 'STBI load does not leak with GC on, when loading images multiple times (use < 10MB)'
runcmd: .execute
contains: 'logo.png 1000 times.'
@@ -274,7 +274,7 @@ fn get_all_commands() []Command {
}
$if !windows {
res << Command{
- line: '$vexe -raw-vsh-tmp-prefix tmp vlib/v/tests/script_with_no_extension'
+ line: '${vexe} -raw-vsh-tmp-prefix tmp vlib/v/tests/script_with_no_extension'
okmsg: 'V can crun a script, that lacks a .vsh extension'
runcmd: .execute
expect: 'Test\n'
@@ -282,7 +282,7 @@ fn get_all_commands() []Command {
}
res << Command{
- line: '$vexe -raw-vsh-tmp-prefix tmp run vlib/v/tests/script_with_no_extension'
+ line: '${vexe} -raw-vsh-tmp-prefix tmp run vlib/v/tests/script_with_no_extension'
okmsg: 'V can run a script, that lacks a .vsh extension'
runcmd: .execute
expect: 'Test\n'
@@ -344,23 +344,23 @@ fn (mut cmd Command) run() {
}
//
run_label := if is_failed { term.failed('FAILED') } else { term_highlight('OK') }
- println('> Running: "$cmd.line" took: $spent ms ... $run_label')
+ println('> Running: "${cmd.line}" took: ${spent} ms ... ${run_label}')
//
if is_failed && is_failed_expected {
- eprintln('> expected:\n$cmd.expect')
- eprintln('> output:\n$cmd.output')
+ eprintln('> expected:\n${cmd.expect}')
+ eprintln('> output:\n${cmd.output}')
}
if is_failed && is_failed_starts_with {
- eprintln('> expected to start with:\n$cmd.starts_with')
+ eprintln('> expected to start with:\n${cmd.starts_with}')
eprintln('> output:\n${cmd.output#[..cmd.starts_with.len]}')
}
if is_failed && is_failed_ends_with {
- eprintln('> expected to end with:\n$cmd.ends_with')
+ eprintln('> expected to end with:\n${cmd.ends_with}')
eprintln('> output:\n${cmd.output#[-cmd.starts_with.len..]}')
}
if is_failed && is_failed_contains {
- eprintln('> expected to contain:\n$cmd.contains')
- eprintln('> output:\n$cmd.output')
+ eprintln('> expected to contain:\n${cmd.contains}')
+ eprintln('> output:\n${cmd.output}')
}
if vtest_nocleanup {
return
@@ -371,7 +371,7 @@ fn (mut cmd Command) run() {
file_existed = file_existed || rm_existing(cmd.rmfile + '.exe')
}
if !file_existed {
- eprintln('Expected file did not exist: $cmd.rmfile')
+ eprintln('Expected file did not exist: ${cmd.rmfile}')
cmd.ecode = 999
}
}
diff --git a/cmd/tools/vtest-cleancode.v b/cmd/tools/vtest-cleancode.v
index 4e8b6e53af..9862750ddd 100644
--- a/cmd/tools/vtest-cleancode.v
+++ b/cmd/tools/vtest-cleancode.v
@@ -58,9 +58,9 @@ fn main() {
fn tsession(vargs string, tool_source string, tool_cmd string, tool_args string, flist []string, slist []string) testing.TestSession {
os.chdir(vroot) or {}
- title_message := 'running $tool_cmd over most .v files'
+ title_message := 'running ${tool_cmd} over most .v files'
testing.eheader(title_message)
- mut test_session := testing.new_test_session('$vargs $tool_args', false)
+ mut test_session := testing.new_test_session('${vargs} ${tool_args}', false)
test_session.files << flist
test_session.skip_files << slist
util.prepare_tool_when_needed(tool_source)
@@ -93,10 +93,10 @@ fn v_test_vetting(vargs string) {
if vet_session.benchmark.nfail > 0 || verify_session.benchmark.nfail > 0 {
eprintln('\n')
if vet_session.benchmark.nfail > 0 {
- eprintln('WARNING: `v vet` failed $vet_session.benchmark.nfail times.')
+ eprintln('WARNING: `v vet` failed ${vet_session.benchmark.nfail} times.')
}
if verify_session.benchmark.nfail > 0 {
- eprintln('WARNING: `v fmt -verify` failed $verify_session.benchmark.nfail times.')
+ eprintln('WARNING: `v fmt -verify` failed ${verify_session.benchmark.nfail} times.')
}
exit(1)
}
diff --git a/cmd/tools/vtest-fmt.v b/cmd/tools/vtest-fmt.v
index b1ebb81ef6..e24d3ad8d0 100644
--- a/cmd/tools/vtest-fmt.v
+++ b/cmd/tools/vtest-fmt.v
@@ -19,13 +19,13 @@ fn v_test_formatting(vargs string) {
all_v_files := v_files()
util.prepare_tool_when_needed('vfmt.v')
testing.eheader('Run "v fmt" over all .v files')
- mut vfmt_test_session := testing.new_test_session('$vargs fmt -worker', false)
+ mut vfmt_test_session := testing.new_test_session('${vargs} fmt -worker', false)
vfmt_test_session.files << all_v_files
vfmt_test_session.skip_files << known_failing_exceptions
vfmt_test_session.test()
eprintln(vfmt_test_session.benchmark.total_message('running vfmt over V files'))
if vfmt_test_session.benchmark.nfail > 0 {
- eprintln('\nWARNING: v fmt failed $vfmt_test_session.benchmark.nfail times.\n')
+ eprintln('\nWARNING: v fmt failed ${vfmt_test_session.benchmark.nfail} times.\n')
exit(1)
}
}
diff --git a/cmd/tools/vtest-parser.v b/cmd/tools/vtest-parser.v
index ae19cf3f0a..3c8a34aaf8 100644
--- a/cmd/tools/vtest-parser.v
+++ b/cmd/tools/vtest-parser.v
@@ -44,7 +44,7 @@ fn main() {
mut context := process_cli_args()
if context.is_worker {
pid := os.getpid()
- context.log('> worker ${pid:5} starts parsing at cut_index: ${context.cut_index:5} | $context.path')
+ context.log('> worker ${pid:5} starts parsing at cut_index: ${context.cut_index:5} | ${context.path}')
// A worker's process job is to try to parse a single given file in context.path.
// It can crash/panic freely.
context.table = ast.new_table()
@@ -62,7 +62,7 @@ fn main() {
exit(ecode_timeout)
}(context.timeout_ms)
_ := parser.parse_text(source, context.path, context.table, .skip_comments, context.pref)
- context.log('> worker ${pid:5} finished parsing $context.path')
+ context.log('> worker ${pid:5} finished parsing ${context.path}')
exit(0)
} else {
// The process supervisor should NOT crash/panic, unlike the workers.
@@ -161,17 +161,17 @@ fn (mut context Context) log(msg string) {
if context.is_verbose {
label := yellow('info')
ts := time.now().format_ss_micro()
- eprintln('$label: $ts | $msg')
+ eprintln('${label}: ${ts} | ${msg}')
}
}
fn (mut context Context) error(msg string) {
label := red('error')
- eprintln('$label: $msg')
+ eprintln('${label}: ${msg}')
}
fn (mut context Context) expand_all_paths() {
- context.log('> context.all_paths before: $context.all_paths')
+ context.log('> context.all_paths before: ${context.all_paths}')
mut files := []string{}
for path in context.all_paths {
if os.is_dir(path) {
@@ -180,24 +180,24 @@ fn (mut context Context) expand_all_paths() {
continue
}
if !path.ends_with('.v') && !path.ends_with('.vv') && !path.ends_with('.vsh') {
- context.error('`v test-parser` can only be used on .v/.vv/.vsh files.\nOffending file: "$path".')
+ context.error('`v test-parser` can only be used on .v/.vv/.vsh files.\nOffending file: "${path}".')
continue
}
if !os.exists(path) {
- context.error('"$path" does not exist.')
+ context.error('"${path}" does not exist.')
continue
}
files << path
}
context.all_paths = files
- context.log('> context.all_paths after: $context.all_paths')
+ context.log('> context.all_paths after: ${context.all_paths}')
}
fn (mut context Context) process_whole_file_in_worker(path string) (int, int) {
context.path = path // needed for the progress bar
- context.log('> context.process_whole_file_in_worker path: $path')
+ context.log('> context.process_whole_file_in_worker path: ${path}')
if !(os.is_file(path) && os.is_readable(path)) {
- context.error('$path is not readable')
+ context.error('${path} is not readable')
return 1, 0
}
source := os.read_file(path) or { '' }
@@ -212,10 +212,10 @@ fn (mut context Context) process_whole_file_in_worker(path string) (int, int) {
for i in 0 .. len {
verbosity := if context.is_verbose { '-v' } else { '' }
context.cut_index = i // needed for the progress bar
- cmd := '${os.quoted_path(context.myself)} $verbosity --worker --timeout_ms ${context.timeout_ms:5} --cut_index ${i:5} --path ${os.quoted_path(path)} '
+ cmd := '${os.quoted_path(context.myself)} ${verbosity} --worker --timeout_ms ${context.timeout_ms:5} --cut_index ${i:5} --path ${os.quoted_path(path)} '
context.log(cmd)
mut res := os.execute(cmd)
- context.log('worker exit_code: $res.exit_code | worker output:\n$res.output')
+ context.log('worker exit_code: ${res.exit_code} | worker output:\n${res.output}')
if res.exit_code != 0 {
fails++
mut is_panic := false
@@ -232,10 +232,10 @@ fn (mut context Context) process_whole_file_in_worker(path string) (int, int) {
} else {
red('parser failure: crash, ${ecode_details[res.exit_code]}')
}
- path_to_line := bold('$path:$line:$col:')
+ path_to_line := bold('${path}:${line}:${col}:')
err_line := last_line.trim_left('\t')
- println('$path_to_line $err')
- println('\t$line | $err_line')
+ println('${path_to_line} ${err}')
+ println('\t${line} | ${err_line}')
println('')
eprintln(res.output)
}
@@ -269,7 +269,7 @@ fn (mut context Context) print_status() {
return
}
term.cursor_up(1)
- eprint('\r $msg\n')
+ eprint('\r ${msg}\n')
}
fn (mut context Context) print_periodic_status() {
diff --git a/cmd/tools/vtest-self.v b/cmd/tools/vtest-self.v
index 84ddf02ceb..1ffc1a4ae1 100644
--- a/cmd/tools/vtest-self.v
+++ b/cmd/tools/vtest-self.v
@@ -381,7 +381,7 @@ fn main() {
tsession.test()
eprintln(tsession.benchmark.total_message(title))
if tsession.benchmark.nfail > 0 {
- eprintln('\nWARNING: failed $tsession.benchmark.nfail times.\n')
+ eprintln('\nWARNING: failed ${tsession.benchmark.nfail} times.\n')
exit(1)
}
}
diff --git a/cmd/tools/vtest.v b/cmd/tools/vtest.v
index 93de683bc4..6787f2d032 100644
--- a/cmd/tools/vtest.v
+++ b/cmd/tools/vtest.v
@@ -61,7 +61,7 @@ fn main() {
.ignore {}
}
} else {
- eprintln('\nUnrecognized test file `$targ`.\n `v test` can only be used with folders and/or _test.v files.\n')
+ eprintln('\nUnrecognized test file `${targ}`.\n `v test` can only be used with folders and/or _test.v files.\n')
show_usage()
exit(1)
}
@@ -198,7 +198,7 @@ fn (mut ctx Context) should_test_when_it_contains_matching_fns(path string, back
}
if tname.match_glob(pat) {
if ctx.verbose {
- println('> compiling path: $path, since test fn `$tname` matches glob pattern `$pat`')
+ println('> compiling path: ${path}, since test fn `${tname}` matches glob pattern `${pat}`')
}
return .test
}
diff --git a/cmd/tools/vtracev.v b/cmd/tools/vtracev.v
index 9010cca51d..fcd0d68dd1 100644
--- a/cmd/tools/vtracev.v
+++ b/cmd/tools/vtracev.v
@@ -11,7 +11,7 @@ fn main() {
self_idx := os.args.index('tracev')
args := os.args[1..self_idx]
args_str := args.join(' ')
- options := if args.len > 0 { '($args_str)' } else { '' }
+ options := if args.len > 0 { '(${args_str})' } else { '' }
println('Compiling a `tracev` executable ${options}...')
- os.system('${os.quoted_path(vexe)} -cg -d trace_parser -d trace_checker -d trace_gen -o tracev $args_str cmd/v')
+ os.system('${os.quoted_path(vexe)} -cg -d trace_parser -d trace_checker -d trace_gen -o tracev ${args_str} cmd/v')
}
diff --git a/cmd/tools/vup.v b/cmd/tools/vup.v
index 6a48be86ab..e373b57974 100644
--- a/cmd/tools/vup.v
+++ b/cmd/tools/vup.v
@@ -44,7 +44,7 @@ fn main() {
if !app.recompile_v() {
app.show_current_v_version()
eprintln('Recompiling V *failed*.')
- eprintln('Try running `$get_make_cmd_name()` .')
+ eprintln('Try running `${get_make_cmd_name()}` .')
exit(1)
}
app.recompile_vup()
@@ -75,14 +75,14 @@ fn (app App) update_from_master() {
fn (app App) recompile_v() bool {
// Note: app.vexe is more reliable than just v (which may be a symlink)
opts := if app.is_prod { '-prod' } else { '' }
- vself := '${os.quoted_path(app.vexe)} $opts self'
- app.vprintln('> recompiling v itself with `$vself` ...')
+ vself := '${os.quoted_path(app.vexe)} ${opts} self'
+ app.vprintln('> recompiling v itself with `${vself}` ...')
self_result := os.execute(vself)
if self_result.exit_code == 0 {
println(self_result.output.trim_space())
return true
} else {
- app.vprintln('`$vself` failed, running `make`...')
+ app.vprintln('`${vself}` failed, running `make`...')
app.vprintln(self_result.output.trim_space())
}
return app.make(vself)
@@ -102,7 +102,7 @@ fn (app App) make(vself string) bool {
make := get_make_cmd_name()
make_result := os.execute(make)
if make_result.exit_code != 0 {
- eprintln('> $make failed:')
+ eprintln('> ${make} failed:')
eprintln('> make output:')
eprintln(make_result.output)
return false
@@ -117,30 +117,30 @@ fn (app App) show_current_v_version() {
mut vversion := vout.output.trim_space()
if vout.exit_code == 0 {
latest_v_commit := vversion.split(' ').last().all_after('.')
- latest_v_commit_time := os.execute('git show -s --format=%ci $latest_v_commit')
+ latest_v_commit_time := os.execute('git show -s --format=%ci ${latest_v_commit}')
if latest_v_commit_time.exit_code == 0 {
vversion += ', timestamp: ' + latest_v_commit_time.output.trim_space()
}
}
- println('Current V version: $vversion')
+ println('Current V version: ${vversion}')
}
}
fn (app App) backup(file string) {
backup_file := '${file}_old.exe'
if os.exists(backup_file) {
- os.rm(backup_file) or { eprintln('failed removing $backup_file: $err.msg()') }
+ os.rm(backup_file) or { eprintln('failed removing ${backup_file}: ${err.msg()}') }
}
- os.mv(file, backup_file) or { eprintln('failed moving $file: $err.msg()') }
+ os.mv(file, backup_file) or { eprintln('failed moving ${file}: ${err.msg()}') }
}
fn (app App) git_command(command string) {
- app.vprintln('git_command: git $command')
- git_result := os.execute('git $command')
+ app.vprintln('git_command: git ${command}')
+ git_result := os.execute('git ${command}')
if git_result.exit_code < 0 {
app.get_git()
// Try it again with (maybe) git installed
- os.execute_or_exit('git $command')
+ os.execute_or_exit('git ${command}')
}
if git_result.exit_code != 0 {
eprintln(git_result.output)
@@ -153,7 +153,7 @@ fn (app App) get_git() {
$if windows {
println('Downloading git 32 bit for Windows, please wait.')
// We'll use 32 bit because maybe someone out there is using 32-bit windows
- res_download := os.execute('bitsadmin.exe /transfer "vgit" https://github.com/git-for-windows/git/releases/download/v2.30.0.windows.2/Git-2.30.0.2-32-bit.exe "$os.getwd()/git32.exe"')
+ res_download := os.execute('bitsadmin.exe /transfer "vgit" https://github.com/git-for-windows/git/releases/download/v2.30.0.windows.2/Git-2.30.0.2-32-bit.exe "${os.getwd()}/git32.exe"')
if res_download.exit_code != 0 {
eprintln('Unable to install git automatically: please install git manually')
panic(res_download.output)
diff --git a/cmd/tools/vvet/vvet.v b/cmd/tools/vvet/vvet.v
index 7004e4282e..55d8e2adee 100644
--- a/cmd/tools/vvet/vvet.v
+++ b/cmd/tools/vvet/vvet.v
@@ -50,14 +50,14 @@ fn main() {
}
for path in paths {
if !os.exists(path) {
- eprintln('File/folder $path does not exist')
+ eprintln('File/folder ${path} does not exist')
continue
}
if os.is_file(path) {
vt.vet_file(path)
}
if os.is_dir(path) {
- vt.vprintln("vetting folder: '$path' ...")
+ vt.vprintln("vetting folder: '${path}' ...")
vfiles := os.walk_ext(path, '.v')
vvfiles := os.walk_ext(path, '.vv')
mut files := []string{}
@@ -91,7 +91,7 @@ fn (mut vt Vet) vet_file(path string) {
// skip all /tests/ files, since usually their content is not
// important enough to be documented/vetted, and they may even
// contain intentionally invalid code.
- vt.vprintln("skipping test file: '$path' ...")
+ vt.vprintln("skipping test file: '${path}' ...")
return
}
vt.file = path
@@ -99,7 +99,7 @@ fn (mut vt Vet) vet_file(path string) {
prefs.is_vet = true
prefs.is_vsh = path.ends_with('.vsh')
table := ast.new_table()
- vt.vprintln("vetting file '$path'...")
+ vt.vprintln("vetting file '${path}'...")
_, errors := parser.parse_vet_file(path, table, prefs)
// Transfer errors from scanner and parser
vt.errors << errors
@@ -175,7 +175,7 @@ fn (mut vt Vet) vet_fn_documentation(lines []string, line string, lnumber int) {
}
if grab {
clean_line := line.all_before_last('{').trim(' ')
- vt.warn('Function documentation seems to be missing for "$clean_line".',
+ vt.warn('Function documentation seems to be missing for "${clean_line}".',
lnumber, .doc)
}
} else {
@@ -189,14 +189,15 @@ fn (mut vt Vet) vet_fn_documentation(lines []string, line string, lnumber int) {
prev_line := lines[j]
if prev_line.contains('}') { // We've looked back to the above scope, stop here
break
- } else if prev_line.starts_with('// $fn_name ') {
+ } else if prev_line.starts_with('// ${fn_name} ') {
grab = false
break
- } else if prev_line.starts_with('// $fn_name') && !prev_prev_line.starts_with('//') {
+ } else if prev_line.starts_with('// ${fn_name}')
+ && !prev_prev_line.starts_with('//') {
grab = false
clean_line := line.all_before_last('{').trim(' ')
- vt.warn('The documentation for "$clean_line" seems incomplete.', lnumber,
- .doc)
+ vt.warn('The documentation for "${clean_line}" seems incomplete.',
+ lnumber, .doc)
break
} else if prev_line.starts_with('[') {
tags << collect_tags(prev_line)
@@ -207,7 +208,7 @@ fn (mut vt Vet) vet_fn_documentation(lines []string, line string, lnumber int) {
}
if grab {
clean_line := line.all_before_last('{').trim(' ')
- vt.warn('A function name is missing from the documentation of "$clean_line".',
+ vt.warn('A function name is missing from the documentation of "${clean_line}".',
lnumber, .doc)
}
}
@@ -222,8 +223,8 @@ fn (vt &Vet) vprintln(s string) {
}
fn (vt &Vet) e2string(err vet.Error) string {
- mut kind := '$err.kind:'
- mut location := '$err.file_path:$err.pos.line_nr:'
+ mut kind := '${err.kind}:'
+ mut location := '${err.file_path}:${err.pos.line_nr}:'
if vt.opt.use_color {
kind = match err.kind {
.warning { term.magenta(kind) }
@@ -232,7 +233,7 @@ fn (vt &Vet) e2string(err vet.Error) string {
kind = term.bold(kind)
location = term.bold(location)
}
- return '$location $kind $err.message'
+ return '${location} ${kind} ${err.message}'
}
fn (mut vt Vet) error(msg string, line int, fix vet.FixKind) {
diff --git a/cmd/tools/vwatch.v b/cmd/tools/vwatch.v
index 625933244c..d315767546 100644
--- a/cmd/tools/vwatch.v
+++ b/cmd/tools/vwatch.v
@@ -99,11 +99,11 @@ mut:
[if debug_vwatch ?]
fn (mut context Context) elog(msg string) {
- eprintln('> vwatch $context.pid, $msg')
+ eprintln('> vwatch ${context.pid}, ${msg}')
}
fn (context &Context) str() string {
- return 'Context{ pid: $context.pid, is_worker: $context.is_worker, check_period_ms: $context.check_period_ms, vexe: $context.vexe, opts: $context.opts, is_exiting: $context.is_exiting, vfiles: $context.vfiles'
+ return 'Context{ pid: ${context.pid}, is_worker: ${context.is_worker}, check_period_ms: ${context.check_period_ms}, vexe: ${context.vexe}, opts: ${context.opts}, is_exiting: ${context.is_exiting}, vfiles: ${context.vfiles}'
}
fn (mut context Context) get_stats_for_affected_vfiles() []VFileStat {
@@ -112,7 +112,7 @@ fn (mut context Context) get_stats_for_affected_vfiles() []VFileStat {
// The next command will make V parse the program, and print all .v files,
// needed for its compilation, without actually compiling it.
copts := context.opts.join(' ')
- cmd := '"$context.vexe" -silent -print-v-files $copts'
+ cmd := '"${context.vexe}" -silent -print-v-files ${copts}'
// context.elog('> cmd: $cmd')
mut paths := []string{}
if context.add_files.len > 0 && context.add_files[0] != '' {
@@ -168,7 +168,7 @@ fn (mut context Context) get_changed_vfiles() int {
if existing_vfs.path == vfs.path {
found = true
if existing_vfs.mtime != vfs.mtime {
- context.elog('> new updates for file: $vfs')
+ context.elog('> new updates for file: ${vfs}')
changed++
}
break
@@ -181,7 +181,7 @@ fn (mut context Context) get_changed_vfiles() int {
}
context.vfiles = newfiles
if changed > 0 {
- context.elog('> get_changed_vfiles: $changed')
+ context.elog('> get_changed_vfiles: ${changed}')
}
return changed
}
@@ -219,23 +219,23 @@ fn (mut context Context) kill_pgroup() {
fn (mut context Context) run_before_cmd() {
if context.cmd_before_run != '' {
- context.elog('> run_before_cmd: "$context.cmd_before_run"')
+ context.elog('> run_before_cmd: "${context.cmd_before_run}"')
os.system(context.cmd_before_run)
}
}
fn (mut context Context) run_after_cmd() {
if context.cmd_after_run != '' {
- context.elog('> run_after_cmd: "$context.cmd_after_run"')
+ context.elog('> run_after_cmd: "${context.cmd_after_run}"')
os.system(context.cmd_after_run)
}
}
fn (mut context Context) compilation_runner_loop() {
- cmd := '"$context.vexe" ${context.opts.join(' ')}'
+ cmd := '"${context.vexe}" ${context.opts.join(' ')}'
_ := <-context.rerun_channel
for {
- context.elog('>> loop: v_cycles: $context.v_cycles')
+ context.elog('>> loop: v_cycles: ${context.v_cycles}')
if context.clear_terminal {
term.clear()
}
@@ -246,7 +246,7 @@ fn (mut context Context) compilation_runner_loop() {
context.child_process.set_args(context.opts)
context.child_process.run()
if !context.silent {
- eprintln('$timestamp: $cmd | pid: ${context.child_process.pid:7d} | reload cycle: ${context.v_cycles:5d}')
+ eprintln('${timestamp}: ${cmd} | pid: ${context.child_process.pid:7d} | reload cycle: ${context.v_cycles:5d}')
}
for {
mut notalive_count := 0
@@ -286,7 +286,7 @@ fn (mut context Context) compilation_runner_loop() {
}
}
if !context.child_process.is_alive() {
- context.elog('> child_process is no longer alive | notalive_count: $notalive_count')
+ context.elog('> child_process is no longer alive | notalive_count: ${notalive_count}')
context.child_process.wait()
context.child_process.close()
if notalive_count == 0 {
@@ -333,17 +333,17 @@ fn main() {
exit(0)
}
remaining_options := fp.finalize() or {
- eprintln('Error: $err')
+ eprintln('Error: ${err}')
exit(1)
}
context.opts = remaining_options
- context.elog('>>> context.pid: $context.pid')
- context.elog('>>> context.vexe: $context.vexe')
- context.elog('>>> context.opts: $context.opts')
- context.elog('>>> context.is_worker: $context.is_worker')
- context.elog('>>> context.clear_terminal: $context.clear_terminal')
- context.elog('>>> context.add_files: $context.add_files')
- context.elog('>>> context.ignore_exts: $context.ignore_exts')
+ context.elog('>>> context.pid: ${context.pid}')
+ context.elog('>>> context.vexe: ${context.vexe}')
+ context.elog('>>> context.opts: ${context.opts}')
+ context.elog('>>> context.is_worker: ${context.is_worker}')
+ context.elog('>>> context.clear_terminal: ${context.clear_terminal}')
+ context.elog('>>> context.add_files: ${context.add_files}')
+ context.elog('>>> context.ignore_exts: ${context.ignore_exts}')
if context.is_worker {
context.worker_main()
} else {
diff --git a/cmd/tools/vwhere/finder.v b/cmd/tools/vwhere/finder.v
index ea712c0683..76d29da7bf 100755
--- a/cmd/tools/vwhere/finder.v
+++ b/cmd/tools/vwhere/finder.v
@@ -41,14 +41,14 @@ fn (mut fdr Finder) configure_from_arguments(args []string) {
}
fdr.visib.set_from_str(cmdline.option(args, '-vis', '${Visibility.all}'))
if fdr.symbol == .var && fdr.visib != .all {
- make_and_print_error('-vis $fdr.visib just can be setted with symbol_type:',
+ make_and_print_error('-vis ${fdr.visib} just can be setted with symbol_type:',
['fn', 'method', 'const', 'struct', 'enum', 'interface', 'regexp'],
- '$fdr.symbol')
+ '${fdr.symbol}')
}
fdr.mutab.set_from_str(cmdline.option(args, '-mut', '${Mutability.any}'))
if fdr.symbol != .var && fdr.mutab != .any {
- make_and_print_error('-mut $fdr.mutab just can be setted with symbol_type:',
- ['var'], '$fdr.symbol')
+ make_and_print_error('-mut ${fdr.mutab} just can be setted with symbol_type:',
+ ['var'], '${fdr.symbol}')
}
fdr.modul = cmdline.option(args, '-mod', '')
fdr.dirs = cmdline.options(args, '-dir')
@@ -95,28 +95,32 @@ fn (mut fdr Finder) search_for_matches() {
cp := r'\)'
// Build regex query
- sy := '$fdr.symbol'
- st := if fdr.receiver != '' { '$sp$op$sp[a-z].*$sp$fdr.receiver$cp$sp' } else { '.*' }
- na := '$fdr.name'
+ sy := '${fdr.symbol}'
+ st := if fdr.receiver != '' {
+ '${sp}${op}${sp}[a-z].*${sp}${fdr.receiver}${cp}${sp}'
+ } else {
+ '.*'
+ }
+ na := '${fdr.name}'
query := match fdr.symbol {
.@fn {
- '.*$sy$sp$na$sp${op}.*${cp}.*'
+ '.*${sy}${sp}${na}${sp}${op}.*${cp}.*'
}
.method {
- '.*fn$st$na$sp${op}.*${cp}.*'
+ '.*fn${st}${na}${sp}${op}.*${cp}.*'
}
.var {
- '.*$na$sp:=.*'
+ '.*${na}${sp}:=.*'
}
.@const {
- '.*$na$sp = .*'
+ '.*${na}${sp} = .*'
}
.regexp {
- '$na'
+ '${na}'
}
else {
- '.*$sy$sp$na${sp}.*' // for struct, enum and interface
+ '.*${sy}${sp}${na}${sp}.*' // for struct, enum and interface
}
}
// println(query)
@@ -191,7 +195,7 @@ fn (fdr Finder) show_results() {
println(maybe_color(term.bright_yellow, 'No Matches found'))
} else if verbose || header {
print(fdr)
- println(maybe_color(term.bright_green, '$fdr.matches.len matches Found\n'))
+ println(maybe_color(term.bright_green, '${fdr.matches.len} matches Found\n'))
for result in fdr.matches {
result.show()
}
@@ -203,13 +207,13 @@ fn (fdr Finder) show_results() {
}
fn (fdr Finder) str() string {
- v := maybe_color(term.bright_red, '$fdr.visib')
- m := maybe_color(term.bright_red, '$fdr.mutab')
- st := if fdr.receiver != '' { ' ( _ $fdr.receiver)' } else { '' }
- s := maybe_color(term.bright_magenta, '$fdr.symbol')
- n := maybe_color(term.bright_cyan, '$fdr.name')
+ v := maybe_color(term.bright_red, '${fdr.visib}')
+ m := maybe_color(term.bright_red, '${fdr.mutab}')
+ st := if fdr.receiver != '' { ' ( _ ${fdr.receiver})' } else { '' }
+ s := maybe_color(term.bright_magenta, '${fdr.symbol}')
+ n := maybe_color(term.bright_cyan, '${fdr.name}')
- mm := if fdr.modul != '' { maybe_color(term.blue, '$fdr.modul') } else { '' }
+ mm := if fdr.modul != '' { maybe_color(term.blue, '${fdr.modul}') } else { '' }
dd := if fdr.dirs.len != 0 {
fdr.dirs.map(maybe_color(term.blue, it))
} else {
@@ -219,14 +223,14 @@ fn (fdr Finder) str() string {
dm := if fdr.dirs.len == 0 && fdr.modul == '' {
'all the project scope'
} else if fdr.dirs.len == 0 && fdr.modul != '' {
- 'module $mm'
+ 'module ${mm}'
} else if fdr.dirs.len != 0 && fdr.modul == '' {
- 'directories: $dd'
+ 'directories: ${dd}'
} else {
- 'module $mm searching within directories: $dd'
+ 'module ${mm} searching within directories: ${dd}'
}
- return '\nFind: $s$st $n | visibility: $v mutability: $m\nwithin $dm '
+ return '\nFind: ${s}${st} ${n} | visibility: ${v} mutability: ${m}\nwithin ${dm} '
}
// Match is one result of the search_for_matches() process
@@ -238,11 +242,11 @@ struct Match {
fn (mtc Match) show() {
path := maybe_color(term.bright_magenta, mtc.path)
- line := maybe_color(term.bright_yellow, '$mtc.line')
- text := maybe_color(term.bright_green, '$mtc.text')
+ line := maybe_color(term.bright_yellow, '${mtc.line}')
+ text := maybe_color(term.bright_green, '${mtc.text}')
if verbose || format {
- println('$path\n$line : [ $text ]\n')
+ println('${path}\n${line} : [ ${text} ]\n')
} else {
- println('$path:$line: $text')
+ println('${path}:${line}: ${text}')
}
}
diff --git a/cmd/tools/vwhere/finder_utils.v b/cmd/tools/vwhere/finder_utils.v
index 65a1ffa29b..312ca7a3de 100755
--- a/cmd/tools/vwhere/finder_utils.v
+++ b/cmd/tools/vwhere/finder_utils.v
@@ -177,6 +177,6 @@ fn resolve_module(path string) !string {
} else if os.is_dir(os.join_path(vlib_dir, path)) {
return os.join_path(vlib_dir, path)
} else {
- return error('Path: $path not found')
+ return error('Path: ${path} not found')
}
}
diff --git a/cmd/tools/vwipe-cache.v b/cmd/tools/vwipe-cache.v
index 4ce4ce848a..725110eeb8 100644
--- a/cmd/tools/vwipe-cache.v
+++ b/cmd/tools/vwipe-cache.v
@@ -13,5 +13,5 @@ fn wipe_path(cpath string, label string) {
os.rmdir_all(cpath) or {}
}
os.mkdir_all(cpath) or {}
- println('$label folder $cpath was wiped.')
+ println('${label} folder ${cpath} was wiped.')
}
diff --git a/cmd/v/v.v b/cmd/v/v.v
index 91acfa209e..d5ebc081a7 100755
--- a/cmd/v/v.v
+++ b/cmd/v/v.v
@@ -149,7 +149,7 @@ fn main() {
all_commands << external_tools
all_commands << other_commands
all_commands.sort()
- eprintln(util.new_suggestion(command, all_commands).say('v: unknown command `$command`'))
+ eprintln(util.new_suggestion(command, all_commands).say('v: unknown command `${command}`'))
eprintln('Run ${term.highlight_command('v help')} for usage.')
exit(1)
}
diff --git a/doc/docs.md b/doc/docs.md
index 4886cc9d24..e093daae93 100644
--- a/doc/docs.md
+++ b/doc/docs.md
@@ -416,9 +416,9 @@ In this way, their values can be swapped without an intermediary variable.
```v
mut a := 0
mut b := 1
-println('$a, $b') // 0, 1
+println('${a}, ${b}') // 0, 1
a, b = b, a
-println('$a, $b') // 1, 0
+println('${a}, ${b}') // 1, 0
```
### Declaration errors
@@ -605,7 +605,7 @@ converted to a string and embedded into the literal:
```v
name := 'Bob'
-println('Hello, $name!') // Hello, Bob!
+println('Hello, ${name}!') // Hello, Bob!
```
It also works with fields: `'age = $user.age'`. If you need more complex expressions, use `${}`:
@@ -697,7 +697,7 @@ or use string interpolation (preferred):
```v
age := 12
-println('age = $age')
+println('age = ${age}')
```
See all methods of [string](https://modules.vlang.io/index.html#string)
@@ -1334,7 +1334,7 @@ m := {
'abc': 'def'
}
if v := m['abc'] {
- println('the map value for that key is: $v')
+ println('the map value for that key is: ${v}')
}
```
@@ -1380,7 +1380,7 @@ import os
fn main() {
// read text from stdin
name := os.input('Enter your name: ')
- println('Hello, $name!')
+ println('Hello, ${name}!')
}
```
This program can use any public definitions from the `os` module, such
@@ -1404,7 +1404,7 @@ import os { input }
fn main() {
// read text from stdin
name := input('Enter your name: ')
- println('Hello, $name!')
+ println('Hello, ${name}!')
}
```
Note: This will import the module as well. Also, this is not allowed for
@@ -1416,7 +1416,7 @@ You can import several specific symbols at once:
import os { input, user_os }
name := input('Enter your name: ')
-println('Name: $name')
+println('Name: ${name}')
os := user_os()
println('Your OS is ${os}.')
```
@@ -1457,7 +1457,7 @@ fn main() {
day: 25
}
println(time.new_time(my_time).utc_string())
- println('Century: $my_time.century()')
+ println('Century: ${my_time.century()}')
}
```
@@ -1469,11 +1469,11 @@ fn main() {
a := 10
b := 20
if a < b {
- println('$a < $b')
+ println('${a} < ${b}')
} else if a > b {
- println('$a > $b')
+ println('${a} > ${b}')
} else {
- println('$a == $b')
+ println('${a} == ${b}')
}
```
@@ -1739,7 +1739,7 @@ for num in numbers {
}
names := ['Sam', 'Peter']
for i, name in names {
- println('$i) $name')
+ println('${i}) ${name}')
// Output: 0) Sam
// 1) Peter
}
@@ -1807,7 +1807,7 @@ m := {
'two': 2
}
for key, value in m {
- println('$key -> $value')
+ println('${key} -> ${value}')
// Output: one -> 1
// two -> 2
}
@@ -1971,7 +1971,7 @@ fn write_log(s State) !int {
// the file will be closed after the `error()` function
// has returned - so the error message will still report
// it as open
- return error('nothing written; file open: $f.is_opened')
+ return error('nothing written; file open: ${f.is_opened}')
}
// the file will be closed here, too
return 0
@@ -1979,10 +1979,10 @@ fn write_log(s State) !int {
fn main() {
n := write_log(.return_error) or {
- println('Error: $err')
+ println('Error: ${err}')
0
}
- println('$n bytes written')
+ println('${n} bytes written')
}
```
@@ -2444,7 +2444,7 @@ clr2 := Rgba32{
sz := sizeof(Rgba32)
unsafe {
- println('Size: ${sz}B,clr1.b: $clr1.b,clr2.b: $clr2.b')
+ println('Size: ${sz}B,clr1.b: ${clr1.b},clr2.b: ${clr2.b}')
}
```
@@ -2859,7 +2859,7 @@ struct Color {
}
pub fn (c Color) str() string {
- return '{$c.r, $c.g, $c.b}'
+ return '{${c.r}, ${c.g}, ${c.b}}'
}
red := Color{
@@ -3024,7 +3024,7 @@ enum Grocery {
g1 := int(Grocery.apple)
g2 := int(Grocery.orange)
g3 := int(Grocery.pear)
-println('Grocery IDs: $g1, $g2, $g3')
+println('Grocery IDs: ${g1}, ${g2}, ${g3}')
```
Output: `Grocery IDs: 0, 5, 6`.
@@ -3177,7 +3177,7 @@ fn main() {
arr << dog
arr << cat
for item in arr {
- println('a $item.breed says: $item.speak()')
+ println('a ${item.breed} says: ${item.speak()}')
}
}
```
@@ -3240,9 +3240,9 @@ interface Something {}
fn announce(s Something) {
if s is Dog {
- println('a $s.breed dog') // `s` is automatically cast to `Dog` (smart cast)
+ println('a ${s.breed} dog') // `s` is automatically cast to `Dog` (smart cast)
} else if s is Cat {
- println('a cat speaks $s.speak()')
+ println('a cat speaks ${s.speak()}')
} else {
println('something else')
}
@@ -3566,7 +3566,7 @@ fn (r Repo) find_user_by_id(id int) !User {
return user
}
}
- return error('User $id not found')
+ return error('User ${id} not found')
}
// A version of the function using an optional
@@ -3709,7 +3709,7 @@ struct PathError {
}
fn (err PathError) msg() string {
- return 'Failed to open path: $err.path'
+ return 'Failed to open path: ${err.path}'
}
fn try_open(path string) ? {
@@ -3750,7 +3750,7 @@ fn new_repo(db DB) Repo {
// This is a generic function. V will generate it for every type it's used with.
fn (r Repo) find_by_id(id int) ?T {
table_name := T.name // in this example getting the name of the type gives us the table name
- return r.db.query_one('select * from $table_name where id = ?', id)
+ return r.db.query_one('select * from ${table_name} where id = ?', id)
}
db := new_db()
@@ -3856,7 +3856,7 @@ fn main() {
g := spawn get_hypot(54.06, 2.08) // spawn thread and get handle to it
h1 := get_hypot(2.32, 16.74) // do some other calculation here
h2 := g.wait() // get result from spawned thread
- println('Results: $h1, $h2') // prints `Results: 16.9, 54.1`
+ println('Results: ${h1}, ${h2}') // prints `Results: 16.9, 54.1`
}
```
@@ -3867,9 +3867,9 @@ using an array of threads.
import time
fn task(id int, duration int) {
- println('task $id begin')
+ println('task ${id} begin')
time.sleep(duration * time.millisecond)
- println('task $id end')
+ println('task ${id} end')
}
fn main() {
@@ -3906,7 +3906,7 @@ fn main() {
}
// Join all tasks
r := threads.wait()
- println('All jobs finished: $r')
+ println('All jobs finished: ${r}')
}
// Output: All jobs finished: [1, 4, 9, 16, 25, 36, 49, 64, 81]
@@ -4008,16 +4008,16 @@ fn main() {
select {
a := <-ch {
// do something with `a`
- eprintln('> a: $a')
+ eprintln('> a: ${a}')
}
b = <-ch2 {
// do something with predeclared variable `b`
- eprintln('> b: $b')
+ eprintln('> b: ${b}')
}
ch3 <- c {
// do something if `c` was sent
time.sleep(5 * time.millisecond)
- eprintln('> c: $c was send on channel ch3')
+ eprintln('> c: ${c} was send on channel ch3')
}
500 * time.millisecond {
// do something if no channel has become ready within 0.5s
@@ -4141,7 +4141,7 @@ struct User {
data := '{ "name": "Frodo", "lastName": "Baggins", "age": 25 }'
user := json.decode(User, data) or {
- eprintln('Failed to decode json, error: $err')
+ eprintln('Failed to decode json, error: ${err}')
return
}
println(user.name)
@@ -4212,7 +4212,7 @@ strings that interpolate variables, etc.
```v
fn test_assertion_with_extra_message_failure() {
for i in 0 .. 100 {
- assert i * 2 - 45 < 75 + 10, 'assertion failed for i: $i'
+ assert i * 2 - 45 < 75 + 10, 'assertion failed for i: ${i}'
}
}
```
@@ -4388,8 +4388,8 @@ fn draw_scene() {
// ...
name1 := 'abc'
name2 := 'def ghi'
- draw_text('hello $name1', 10, 10)
- draw_text('hello $name2', 100, 10)
+ draw_text('hello ${name1}', 10, 10)
+ draw_text('hello ${name2}', 100, 10)
draw_text(strings.repeat(`X`, 10000), 10, 50)
// ...
}
@@ -4452,7 +4452,7 @@ struct RefStruct {
fn main() {
q, w := f()
- println('q: $q.r.n, w: $w.n')
+ println('q: ${q.r.n}, w: ${w.n}')
}
fn f() (RefStruct, &MyStruct) {
@@ -4469,7 +4469,7 @@ fn f() (RefStruct, &MyStruct) {
r: &b
}
x := a.n + c.n
- println('x: $x')
+ println('x: ${x}')
return e, &c
}
```
@@ -4494,7 +4494,7 @@ fn main() {
n: 13
}
x := q.f(&w) // references of `q` and `w` are passed
- println('q: $q\nx: $x')
+ println('q: ${q}\nx: ${x}')
}
fn (mut a MyStruct) f(b &MyStruct) int {
@@ -4537,7 +4537,7 @@ fn main() {
r: &m
}
r.g()
- println('r: $r')
+ println('r: ${r}')
}
fn (mut r RefStruct) g() {
@@ -4630,7 +4630,7 @@ fn use_stack() {
x := 7.5
y := 3.25
z := x + y
- println('$x $y $z')
+ println('${x} ${y} ${z}')
}
fn main() {
@@ -4640,7 +4640,7 @@ fn main() {
}
r.g()
use_stack() // to erase invalid stack contents
- println('r: $r')
+ println('r: ${r}')
}
fn (mut r RefStruct) g() {
@@ -4711,20 +4711,20 @@ sql db {
nr_customers := sql db {
select count from Customer
}
-println('number of all customers: $nr_customers')
+println('number of all customers: ${nr_customers}')
// V syntax can be used to build queries
uk_customers := sql db {
select from Customer where country == 'uk' && nr_orders > 0
}
println(uk_customers.len)
for customer in uk_customers {
- println('$customer.id - $customer.name')
+ println('${customer.id} - ${customer.name}')
}
// by adding `limit 1` we tell V that there will be only one object
customer := sql db {
select from Customer where id == 1 limit 1
}
-println('$customer.id - $customer.name')
+println('${customer.id} - ${customer.name}')
// insert a new customer
new_customer := Customer{
name: 'Bob'
@@ -5198,7 +5198,7 @@ Another example, is if you want to embed the version/name from v.mod *inside* yo
```v ignore
import v.vmod
vm := vmod.decode( @VMOD_FILE ) or { panic(err) }
-eprintln('$vm.name $vm.version\n $vm.description')
+eprintln('${vm.name} ${vm.version}\n ${vm.description}')
```
### Compile-time reflection
@@ -5215,7 +5215,7 @@ struct User {
fn main() {
$for field in User.fields {
$if field.typ is string {
- println('$field.name is of type string')
+ println('${field.name} is of type string')
}
}
}
@@ -5243,7 +5243,7 @@ fn main() {
}
// Usage as expression
os := $if windows { 'Windows' } $else { 'UNIX' }
- println('Using $os')
+ println('Using ${os}')
// $else-$if branches
$if tinyc {
println('tinyc')
@@ -5578,7 +5578,7 @@ struct Vec {
}
fn (a Vec) str() string {
- return '{$a.x, $a.y}'
+ return '{${a.x}, ${a.y}}'
}
fn (a Vec) + (b Vec) Vec {
@@ -5724,9 +5724,9 @@ fn main() {
}
races_won_by_change := t.wait()
atom_new := C.atomic_load_u32(&atom)
- println('atom: $atom_new, #exchanges: ${races_won_by_main + races_won_by_change}')
+ println('atom: ${atom_new}, #exchanges: ${races_won_by_main + races_won_by_change}')
// prints `atom: 31, #exchanges: 10000000`)
- println('races won by\n- `main()`: $races_won_by_main\n- `change()`: $races_won_by_change')
+ println('races won by\n- `main()`: ${races_won_by_main}\n- `change()`: ${races_won_by_change}')
}
```
@@ -5932,7 +5932,7 @@ fn main() {
C.sqlite3_step(stmt)
nr_users := C.sqlite3_column_int(stmt, 0)
C.sqlite3_finalize(stmt)
- println('There are $nr_users users in the database.')
+ println('There are ${nr_users} users in the database.')
//
error_msg := &char(0)
query_all_users := 'select * from users'
@@ -6219,9 +6219,9 @@ asm amd64 {
; r (a) as a // input
r (b) as b
}
-println('a: $a') // 100
-println('b: $b') // 20
-println('c: $c') // 120
+println('a: ${a}') // 100
+println('b: ${b}') // 20
+println('c: ${c}') // 120
```
For more examples, see [github.com/vlang/v/tree/master/vlib/v/tests/assembly/asm_test.amd64.v](https://github.com/vlang/v/tree/master/vlib/v/tests/assembly/asm_test.amd64.v)
@@ -6289,7 +6289,7 @@ An example `deploy.vsh`:
// print command then execute it
fn sh(cmd string) {
- println('❯ $cmd')
+ println('❯ ${cmd}')
print(execute_or_exit(cmd).output)
}
@@ -6314,7 +6314,7 @@ sh('ls')
// for file in files {
// if file.ends_with('.v') {
// mv(file, 'build/') or {
-// println('err: $err')
+// println('err: ${err}')
// return
// }
// }
diff --git a/examples/2048/2048.v b/examples/2048/2048.v
index 661412178f..e4f389e22d 100644
--- a/examples/2048/2048.v
+++ b/examples/2048/2048.v
@@ -492,7 +492,7 @@ fn (mut app App) ai_move() {
bestprediction = predictions[move_idx]
}
}
- eprintln('Simulation time: ${think_time:4}ms | best $bestprediction')
+ eprintln('Simulation time: ${think_time:4}ms | best ${bestprediction}')
app.move(bestprediction.move)
}
@@ -613,8 +613,8 @@ fn (app &App) draw() {
app.gg.draw_text(ww / 2, (m * 8 / 10) + ypad, msg2, app.label_format(.score_end))
}
// Draw at the end, so that it's on top of the victory / game over overlays
- app.gg.draw_text(labelx, labely, 'Points: $app.board.points', app.label_format(.points))
- app.gg.draw_text(ww - labelx, labely, 'Moves: $app.moves', app.label_format(.moves))
+ app.gg.draw_text(labelx, labely, 'Points: ${app.board.points}', app.label_format(.points))
+ app.gg.draw_text(ww - labelx, labely, 'Moves: ${app.moves}', app.label_format(.moves))
}
fn (app &App) draw_tiles() {
@@ -654,13 +654,13 @@ fn (app &App) draw_tiles() {
app.gg.draw_text(xpos, ypos, '${1 << tidx}', fmt)
}
.log {
- app.gg.draw_text(xpos, ypos, '$tidx', fmt)
+ app.gg.draw_text(xpos, ypos, '${tidx}', fmt)
}
.exponent {
app.gg.draw_text(xpos, ypos, '2', fmt)
fs2 := int(f32(fmt.size) * 0.67)
app.gg.draw_text(xpos + app.ui.tile_size / 10, ypos - app.ui.tile_size / 8,
- '$tidx', gx.TextCfg{
+ '${tidx}', gx.TextCfg{
...fmt
size: fs2
align: gx.HorizontalAlign.left
diff --git a/examples/asm.v b/examples/asm.v
index 88c75ec318..6a117358b4 100644
--- a/examples/asm.v
+++ b/examples/asm.v
@@ -12,7 +12,7 @@ fn main() {
r (b)
}
}
- println('a: $a') // 100
- println('b: $b') // 20
- println('c: $c') // 120
+ println('a: ${a}') // 100
+ println('b: ${b}') // 20
+ println('c: ${c}') // 120
}
diff --git a/examples/binary_search_tree.v b/examples/binary_search_tree.v
index 88a2d31746..2835c3f2f5 100644
--- a/examples/binary_search_tree.v
+++ b/examples/binary_search_tree.v
@@ -130,16 +130,16 @@ fn main() {
for i in vals {
tree = tree.insert(i)
}
- println('[1] after insertion tree size is $tree.size()') // 11
+ println('[1] after insertion tree size is ${tree.size()}') // 11
del_vals := [-0.3, 0.0, 0.3, 0.6, 1.0, 1.5]
for i in del_vals {
tree = tree.delete(i)
}
- print('[2] after deletion tree size is $tree.size(), ') // 7
+ print('[2] after deletion tree size is ${tree.size()}, ') // 7
print('and these elements were deleted: ') // 0.0 0.3 0.6 1.0
for i in vals {
if !tree.search(i) {
- print('$i ')
+ print('${i} ')
}
}
println('')
diff --git a/examples/brainvuck.v b/examples/brainvuck.v
index 81ebd726e8..a165ade52a 100644
--- a/examples/brainvuck.v
+++ b/examples/brainvuck.v
@@ -61,6 +61,6 @@ fn main() {
}
// print the state of the interpreter at the end
- println('Address: $address')
- println('Memory: $memory')
+ println('Address: ${address}')
+ println('Memory: ${memory}')
}
diff --git a/examples/buf_reader.v b/examples/buf_reader.v
index 380a9fabfd..4fb90d1369 100644
--- a/examples/buf_reader.v
+++ b/examples/buf_reader.v
@@ -12,7 +12,7 @@ fn main() {
mut r := io.new_buffered_reader(reader: conn)
for {
l := r.read_line() or { break }
- println('$l')
+ println('${l}')
// Make it nice and obvious that we are doing this line by line
time.sleep(100 * time.millisecond)
}
diff --git a/examples/c_interop_wkhtmltopdf.v b/examples/c_interop_wkhtmltopdf.v
index 54ba89b3f3..5f72408874 100644
--- a/examples/c_interop_wkhtmltopdf.v
+++ b/examples/c_interop_wkhtmltopdf.v
@@ -52,9 +52,9 @@ fn C.wkhtmltopdf_get_output(converter &C.wkhtmltopdf_converter, data &&char) int
fn main() {
// init
init := C.wkhtmltopdf_init(0)
- println('wkhtmltopdf_init: $init')
+ println('wkhtmltopdf_init: ${init}')
version := unsafe { cstring_to_vstring(&char(C.wkhtmltopdf_version())) }
- println('wkhtmltopdf_version: $version')
+ println('wkhtmltopdf_version: ${version}')
global_settings := C.wkhtmltopdf_create_global_settings()
println('wkhtmltopdf_create_global_settings: ${voidptr(global_settings)}')
object_settings := C.wkhtmltopdf_create_object_settings()
@@ -63,24 +63,24 @@ fn main() {
println('wkhtmltopdf_create_converter: ${voidptr(converter)}')
// convert
mut result := C.wkhtmltopdf_set_object_setting(object_settings, c'page', c'http://www.google.com.br')
- println('wkhtmltopdf_set_object_setting: $result [page = http://www.google.com.br]')
+ println('wkhtmltopdf_set_object_setting: ${result} [page = http://www.google.com.br]')
C.wkhtmltopdf_add_object(converter, object_settings, 0)
println('wkhtmltopdf_add_object')
result = C.wkhtmltopdf_convert(converter)
- println('wkhtmltopdf_convert: $result')
+ println('wkhtmltopdf_convert: ${result}')
error_code := C.wkhtmltopdf_http_error_code(converter)
- println('wkhtmltopdf_http_error_code: $error_code')
+ println('wkhtmltopdf_http_error_code: ${error_code}')
if result {
pdata := &char(0)
ppdata := &pdata
size := C.wkhtmltopdf_get_output(converter, voidptr(ppdata))
- println('wkhtmltopdf_get_output: $size bytes')
+ println('wkhtmltopdf_get_output: ${size} bytes')
mut file := os.open_file('./google.pdf', 'w+', 0o666) or {
- println('ERR: $err')
+ println('ERR: ${err}')
return
}
wrote := unsafe { file.write_ptr(pdata, size) }
- println('write_bytes: $wrote [./google.pdf]')
+ println('write_bytes: ${wrote} [./google.pdf]')
file.flush()
file.close()
}
@@ -92,5 +92,5 @@ fn main() {
C.wkhtmltopdf_destroy_global_settings(global_settings)
println('wkhtmltopdf_destroy_global_settings')
deinit := C.wkhtmltopdf_deinit()
- println('wkhtmltopdf_deinit: $deinit')
+ println('wkhtmltopdf_deinit: ${deinit}')
}
diff --git a/examples/cli.v b/examples/cli.v
index f28fdfd47b..afd4a2de6b 100644
--- a/examples/cli.v
+++ b/examples/cli.v
@@ -42,19 +42,21 @@ fn main() {
}
fn greet_func(cmd Command) ! {
- language := cmd.flags.get_string('language') or { panic('Failed to get `language` flag: $err') }
- times := cmd.flags.get_int('times') or { panic('Failed to get `times` flag: $err') }
+ language := cmd.flags.get_string('language') or {
+ panic('Failed to get `language` flag: ${err}')
+ }
+ times := cmd.flags.get_int('times') or { panic('Failed to get `times` flag: ${err}') }
name := cmd.args[0]
for _ in 0 .. times {
match language {
'english', 'en' {
- println('Welcome $name')
+ println('Welcome ${name}')
}
'german', 'de' {
- println('Willkommen $name')
+ println('Willkommen ${name}')
}
'dutch', 'nl' {
- println('Welkom $name')
+ println('Welkom ${name}')
}
else {
println('Unsupported language')
@@ -63,9 +65,9 @@ fn greet_func(cmd Command) ! {
}
}
}
- fun := cmd.flags.get_strings('fun') or { panic('Failed to get `fun` flag: $err') }
+ fun := cmd.flags.get_strings('fun') or { panic('Failed to get `fun` flag: ${err}') }
for f in fun {
- println('fun: $f')
+ println('fun: ${f}')
}
}
diff --git a/examples/compiletime/methods.v b/examples/compiletime/methods.v
index f3f90fa673..5221066677 100644
--- a/examples/compiletime/methods.v
+++ b/examples/compiletime/methods.v
@@ -13,25 +13,25 @@ fn (mut app App) method_three(s string) string {
fn main() {
$for method in App.methods {
$if method.typ is fn (string) string {
- println('$method.name IS `fn(string) string`')
+ println('${method.name} IS `fn(string) string`')
} $else {
- println('$method.name is NOT `fn(string) string`')
+ println('${method.name} is NOT `fn(string) string`')
}
$if method.return_type !is int {
- println('$method.name does NOT return `int`')
+ println('${method.name} does NOT return `int`')
} $else {
- println('$method.name DOES return `int`')
+ println('${method.name} DOES return `int`')
}
$if method.args[0].typ !is string {
- println("$method.name's first arg is NOT `string`")
+ println("${method.name}'s first arg is NOT `string`")
} $else {
- println("$method.name's first arg IS `string`")
+ println("${method.name}'s first arg IS `string`")
}
// TODO: Double inversion, should this even be allowed?
$if method.typ is fn () {
- println('$method.name IS a void method')
+ println('${method.name} IS a void method')
} $else {
- println('$method.name is NOT a void method')
+ println('${method.name} is NOT a void method')
}
println('')
}
diff --git a/examples/concurrency/concurrency.v b/examples/concurrency/concurrency.v
index 738c86347c..07bfa8603e 100644
--- a/examples/concurrency/concurrency.v
+++ b/examples/concurrency/concurrency.v
@@ -2,9 +2,9 @@ import time
// Simulate expensive computing using sleep function
fn expensive_computing(id int, duration int) {
- println('Executing expensive computing task ($id)...')
+ println('Executing expensive computing task (${id})...')
time.sleep(duration * time.millisecond)
- println('Finish task $id on $duration ms')
+ println('Finish task ${id} on ${duration} ms')
}
fn main() {
diff --git a/examples/concurrency/concurrency_returns.v b/examples/concurrency/concurrency_returns.v
index aa26c656d3..0922df7f5d 100644
--- a/examples/concurrency/concurrency_returns.v
+++ b/examples/concurrency/concurrency_returns.v
@@ -9,5 +9,5 @@ fn main() {
}
// Join all tasks
r := threads.wait()
- println('All jobs finished: $r')
+ println('All jobs finished: ${r}')
}
diff --git a/examples/database/sqlite.v b/examples/database/sqlite.v
index dcac9846e5..52d5207a11 100644
--- a/examples/database/sqlite.v
+++ b/examples/database/sqlite.v
@@ -9,13 +9,13 @@ fn main() {
db.exec("insert into users (name) values ('Kate')")
nr_users := db.q_int('select count(*) from users')
- println('nr users = $nr_users')
+ println('nr users = ${nr_users}')
name := db.q_string('select name from users where id = 1')
assert name == 'Sam'
users, code := db.exec('select * from users')
- println('SQL Result code: $code')
+ println('SQL Result code: ${code}')
for row in users {
println(row.vals)
}
diff --git a/examples/dynamic_library_loading/use.v b/examples/dynamic_library_loading/use.v
index a6a35c2255..4b1b857bab 100644
--- a/examples/dynamic_library_loading/use.v
+++ b/examples/dynamic_library_loading/use.v
@@ -15,5 +15,5 @@ fn main() {
f := FNAdder(dl.sym_opt(handle, 'add_1')!)
eprintln('f: ${ptr_str(f)}')
res := f(1, 2)
- eprintln('res: $res')
+ eprintln('res: ${res}')
}
diff --git a/examples/dynamic_library_loading/use_test.v b/examples/dynamic_library_loading/use_test.v
index d7042f1bdc..1f09e6d4b8 100644
--- a/examples/dynamic_library_loading/use_test.v
+++ b/examples/dynamic_library_loading/use_test.v
@@ -47,7 +47,7 @@ fn test_can_compile_and_use_library_with_skip_unused() {
}
fn v_compile(vopts string) os.Result {
- cmd := '${os.quoted_path(vexe)} -showcc $vopts'
+ cmd := '${os.quoted_path(vexe)} -showcc ${vopts}'
// dump(cmd)
res := os.execute_or_exit(cmd)
// dump(res)
diff --git a/examples/fetch.v b/examples/fetch.v
index a218ec0305..91f342b461 100644
--- a/examples/fetch.v
+++ b/examples/fetch.v
@@ -3,7 +3,7 @@ import net.http
fn main() {
resp := http.get('https://vlang.io/utc_now') or {
- eprintln('Failed to fetch data from the server. Error: $err')
+ eprintln('Failed to fetch data from the server. Error: ${err}')
return
}
diff --git a/examples/flappylearning/game.v b/examples/flappylearning/game.v
index c40f6e0b8e..97c57f219e 100644
--- a/examples/flappylearning/game.v
+++ b/examples/flappylearning/game.v
@@ -254,10 +254,10 @@ fn (app &App) display() {
app.bird)
}
}
- app.gg.draw_text_def(10, 25, 'Score: $app.score')
- app.gg.draw_text_def(10, 50, 'Max Score: $app.max_score')
- app.gg.draw_text_def(10, 75, 'Generation: $app.generation')
- app.gg.draw_text_def(10, 100, 'Alive: $app.alives / $app.nv.population')
+ app.gg.draw_text_def(10, 25, 'Score: ${app.score}')
+ app.gg.draw_text_def(10, 50, 'Max Score: ${app.max_score}')
+ app.gg.draw_text_def(10, 75, 'Generation: ${app.generation}')
+ app.gg.draw_text_def(10, 100, 'Alive: ${app.alives} / ${app.nv.population}')
}
fn (app &App) draw() {
diff --git a/examples/get_weather/get_weather.v b/examples/get_weather/get_weather.v
index e3e5415d27..ad018a8acf 100644
--- a/examples/get_weather/get_weather.v
+++ b/examples/get_weather/get_weather.v
@@ -38,7 +38,7 @@ fn main() {
}
rnd := rand.f32()
- url := 'https://api.caiyunapp.com/v2.5/96Ly7wgKGq6FhllM/116.391912,40.010711/weather.jsonp?hourlysteps=120&random=$rnd'
+ url := 'https://api.caiyunapp.com/v2.5/96Ly7wgKGq6FhllM/116.391912,40.010711/weather.jsonp?hourlysteps=120&random=${rnd}'
// println(url)
resp := http.fetch(http.FetchConfig{ ...config, url: url }) or {
diff --git a/examples/gg/arcs_and_slices.v b/examples/gg/arcs_and_slices.v
index f65fec6f6e..9d48018ef8 100644
--- a/examples/gg/arcs_and_slices.v
+++ b/examples/gg/arcs_and_slices.v
@@ -52,11 +52,11 @@ fn on_frame(mut app App) {
start := math.tau * app.mouse.y / (win_width * app.gg.scale)
end := math.tau * app.mouse.x / (win_width * app.gg.scale)
- segs := if app.sel == .segs { '[$app.segs]' } else { '$app.segs' }
- app.gg.draw_text_def(10, 10, 'Segments: $segs')
+ segs := if app.sel == .segs { '[${app.segs}]' } else { '${app.segs}' }
+ app.gg.draw_text_def(10, 10, 'Segments: ${segs}')
app.gg.draw_text_def(250, 10, 'Drawing Angles (radians)')
- app.gg.draw_text_def(200, 26, 'Start: $start°')
- app.gg.draw_text_def(350, 26, 'End: $end°')
+ app.gg.draw_text_def(200, 26, 'Start: ${start}°')
+ app.gg.draw_text_def(350, 26, 'End: ${end}°')
mut x, mut y := 0, -80
y += 150
diff --git a/examples/gg/drag_n_drop.v b/examples/gg/drag_n_drop.v
index 608703569b..04ce1c37cd 100644
--- a/examples/gg/drag_n_drop.v
+++ b/examples/gg/drag_n_drop.v
@@ -7,7 +7,7 @@ import sokol.sapp
const (
max_files = 12
- text = 'Drag&Drop here max $max_files files.'
+ text = 'Drag&Drop here max ${max_files} files.'
text_size = 16
)
@@ -62,7 +62,7 @@ fn frame(mut app App) {
mut y := 40
for c, f in app.dropped_file_list {
- app.gg.draw_text(12, y, '[$c] $f', txt_conf)
+ app.gg.draw_text(12, y, '[${c}] ${f}', txt_conf)
y += text_size
}
diff --git a/examples/gg/mandelbrot.v b/examples/gg/mandelbrot.v
index 0d11218d35..aa2619f09e 100644
--- a/examples/gg/mandelbrot.v
+++ b/examples/gg/mandelbrot.v
@@ -191,7 +191,7 @@ fn graphics_keydown(code gg.KeyCode, mod gg.Modifier, mut state AppState) {
// movement
mut d_x, mut d_y := 0.0, 0.0
if code == .enter {
- println('> ViewRect{$state.view.x_min, $state.view.x_max, $state.view.y_min, $state.view.y_max}')
+ println('> ViewRect{${state.view.x_min}, ${state.view.x_max}, ${state.view.y_min}, ${state.view.y_max}}')
}
if state.gg.pressed_keys[int(gg.KeyCode.left)] {
d_x -= s_x
diff --git a/examples/gg/worker_thread.v b/examples/gg/worker_thread.v
index 1d5231fd85..006c652190 100644
--- a/examples/gg/worker_thread.v
+++ b/examples/gg/worker_thread.v
@@ -77,7 +77,7 @@ fn frame(mut app App) {
app.counter += i64(f64(count) / time.second)
}
- label := '$app.counter'
+ label := '${app.counter}'
label_width := (f64(label.len * text_cfg.size) / 4.0)
label_height := (f64(1 * text_cfg.size) / 2.0)
mut x := f32(size.width) * 0.5 - label_width
diff --git a/examples/graphs/bellman-ford.v b/examples/graphs/bellman-ford.v
index 5b1c9c5b95..1ee12799d0 100644
--- a/examples/graphs/bellman-ford.v
+++ b/examples/graphs/bellman-ford.v
@@ -45,7 +45,7 @@ fn print_sol(dist []int) {
n_vertex := dist.len
print('\n Vertex Distance from Source')
for i in 0 .. n_vertex {
- print('\n $i --> ${dist[i]}')
+ print('\n ${i} --> ${dist[i]}')
}
}
@@ -154,7 +154,7 @@ fn main() {
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// allways starting by node 0
start_node := 0
- println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: $start_node)')
+ println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: ${start_node})')
bellman_ford(graph, start_node)
}
println('\n BYE -- OK')
diff --git a/examples/graphs/bfs.v b/examples/graphs/bfs.v
index 99d7521ac7..1a2c286016 100644
--- a/examples/graphs/bfs.v
+++ b/examples/graphs/bfs.v
@@ -7,9 +7,9 @@ fn main() {
'E': ['B', 'F']
'F': ['C', 'E']
}
- println('Graph: $graph')
+ println('Graph: ${graph}')
path := breadth_first_search_path(graph, 'A', 'F')
- println('The shortest path from node A to node F is: $path')
+ println('The shortest path from node A to node F is: ${path}')
assert path == ['A', 'C', 'F']
}
diff --git a/examples/graphs/bfs2.v b/examples/graphs/bfs2.v
index 93d734e05f..203bc45317 100644
--- a/examples/graphs/bfs2.v
+++ b/examples/graphs/bfs2.v
@@ -10,9 +10,9 @@ fn main() {
'E': ['B', 'F']
'F': ['C', 'E']
}
- println('Graph: $graph')
+ println('Graph: ${graph}')
path := breadth_first_search_path(graph, 'A', 'F')
- println('\n The shortest path from node A to node F is: $path.reverse()')
+ println('\n The shortest path from node A to node F is: ${path.reverse()}')
}
// Breadth-First Search (BFS) allows you to find the shortest distance between two nodes in the graph.
@@ -35,7 +35,7 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
return path
}
// Expansion of node removed from queue
- print('\n Expansion of node $node (true/false): ${graph[node]}')
+ print('\n Expansion of node ${node} (true/false): ${graph[node]}')
// take all nodes from the node
for vertex in graph[node] { // println("\n ...${vertex}")
// not explored yet
@@ -43,7 +43,7 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
queue << vertex
}
}
- print('\n QUEUE: $queue (only not visited) \n Visited: $visited')
+ print('\n QUEUE: ${queue} (only not visited) \n Visited: ${visited}')
}
}
path = ['Path not found, problem in the Graph, start or end nodes! ']
@@ -70,7 +70,7 @@ fn visited_init(a_graph map[string][]string) map[string]bool {
// Based in the current node that is final, search for its parent, already visited, up to the root or start node
fn build_path_reverse(graph map[string][]string, start string, final string, visited map[string]bool) []string {
- print('\n\n Nodes visited (true) or no (false): $visited')
+ print('\n\n Nodes visited (true) or no (false): ${visited}')
array_of_nodes := graph.keys()
mut current := final
mut path := []string{}
diff --git a/examples/graphs/dfs.v b/examples/graphs/dfs.v
index f722345340..667eb3292c 100644
--- a/examples/graphs/dfs.v
+++ b/examples/graphs/dfs.v
@@ -25,9 +25,9 @@ fn main() {
}
// println('Graph: $graph')
path_01 := depth_first_search_path(graph_01, 'A', 'F')
- println('\n Graph_01: a first path from node A to node F is: $path_01.reverse()')
+ println('\n Graph_01: a first path from node A to node F is: ${path_01.reverse()}')
path_02 := depth_first_search_path(graph_02, 'A', 'H')
- println('\n Graph_02: a first path from node A to node F is: $path_02.reverse()')
+ println('\n Graph_02: a first path from node A to node F is: ${path_02.reverse()}')
}
// Depth-First Search (BFS) allows you to find a path between two nodes in the graph.
@@ -51,7 +51,7 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
return path
}
// Exploring of node removed from stack and add its relatives
- print('\n Exploring of node $node (true/false): ${graph[node]}')
+ print('\n Exploring of node ${node} (true/false): ${graph[node]}')
// graph[node].reverse() take a classical choice for DFS
// at most os left in this case.
// use vertex in graph[node] the choice is right
@@ -64,7 +64,7 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
stack << vertex
}
}
- print('\n Stack: $stack (only not visited) \n Visited: $visited')
+ print('\n Stack: ${stack} (only not visited) \n Visited: ${visited}')
}
}
path = ['Path not found, problem in the Graph, start or end nodes! ']
@@ -84,7 +84,7 @@ fn visited_init(a_graph map[string][]string) map[string]bool {
// Based in the current node that is final, search for his parent, that is already visited, up to the root or start node
fn build_path_reverse(graph map[string][]string, start string, final string, visited map[string]bool) []string {
- print('\n\n Nodes visited (true) or no (false): $visited')
+ print('\n\n Nodes visited (true) or no (false): ${visited}')
array_of_nodes := graph.keys()
mut current := final
mut path := []string{}
diff --git a/examples/graphs/dijkstra.v b/examples/graphs/dijkstra.v
index 72fa066996..1bc3323a49 100644
--- a/examples/graphs/dijkstra.v
+++ b/examples/graphs/dijkstra.v
@@ -69,7 +69,7 @@ fn updating_priority(mut prior_queue []T, search_data int, new_priority int)
i++
// all the list was examined
if i >= lenght_pq {
- print('\n This data $search_data does exist ... PRIORITY QUEUE problem\n')
+ print('\n This data ${search_data} does exist ... PRIORITY QUEUE problem\n')
exit(1) // panic(s string)
}
} // end for
@@ -98,7 +98,7 @@ fn all_adjacents(g [][]T, v int) []int {
fn print_solution(dist []T) {
print('Vertex \tDistance from Source')
for node in 0 .. (dist.len) {
- print('\n $node ==> \t ${dist[node]}')
+ print('\n ${node} ==> \t ${dist[node]}')
}
}
@@ -107,7 +107,7 @@ fn print_paths_dist(path []T, dist []T) {
print('\n Read the nodes from right to left (a path): \n')
for node in 1 .. (path.len) {
- print('\n $node ')
+ print('\n ${node} ')
mut i := node
for path[i] != -1 {
print(' <= ${path[i]} ')
@@ -231,7 +231,7 @@ fn main() {
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// allways starting by node 0
start_node := 0
- println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: $start_node)')
+ println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: ${start_node})')
dijkstra(graph, start_node)
}
diff --git a/examples/graphs/minimal_spann_tree_prim.v b/examples/graphs/minimal_spann_tree_prim.v
index 60c0c47b5b..416400ebf5 100644
--- a/examples/graphs/minimal_spann_tree_prim.v
+++ b/examples/graphs/minimal_spann_tree_prim.v
@@ -100,13 +100,13 @@ fn print_solution(path []int, g [][]int) {
mut sum := 0
for node in 0 .. (path.len) {
if path[node] == -1 {
- print('\n $node <== reference or start node')
+ print('\n ${node} <== reference or start node')
} else {
- print('\n $node <--> ${path[node]} \t${g[node][path[node]]}')
+ print('\n ${node} <--> ${path[node]} \t${g[node][path[node]]}')
sum += g[node][path[node]]
}
}
- print('\n Minimum Cost Spanning Tree: $sum\n\n')
+ print('\n Minimum Cost Spanning Tree: ${sum}\n\n')
}
// check structure from: https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
diff --git a/examples/graphs/topological_sorting_dfs.v b/examples/graphs/topological_sorting_dfs.v
index 659e483815..3b1db4de1b 100644
--- a/examples/graphs/topological_sorting_dfs.v
+++ b/examples/graphs/topological_sorting_dfs.v
@@ -9,7 +9,7 @@
// THE DFS RECURSIVE .... classical searchig for leaves nodes
// the arguments are used in the function to avoid global variables....
fn dfs_recursive(u string, mut visited map[string]bool, graph map[string][]string, mut top_sorting []string) {
- print(' Visiting: $u -> ')
+ print(' Visiting: ${u} -> ')
visited[u] = true
for v in graph[u] {
@@ -67,7 +67,7 @@ fn main() {
mut graph := map[string][]string{} // the graph: adjacency matrix
for index, g_value in [graph_01, graph_02, graph_03] {
- println('Topological sorting for the graph $index using a DFS recursive')
+ println('Topological sorting for the graph ${index} using a DFS recursive')
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// mut n_nodes := graph.len
@@ -82,7 +82,7 @@ fn main() {
}
}
- print('\n A topological sorting of graph $index : ')
+ print('\n A topological sorting of graph ${index} : ')
// println(g_value)
println(top_sorting.reverse())
println('')
diff --git a/examples/graphs/topological_sorting_greedy.v b/examples/graphs/topological_sorting_greedy.v
index 868ec30e33..cac940679c 100644
--- a/examples/graphs/topological_sorting_greedy.v
+++ b/examples/graphs/topological_sorting_greedy.v
@@ -25,7 +25,7 @@ fn topog_sort_greedy(graph map[string][]string) []string {
Maybe it seems the Kahn's algorithm
*/
mut v_degree := in_degree(graph) // return: map [string] int
- print('V Degree $v_degree')
+ print('V Degree ${v_degree}')
mut small_degree := min_degree(v_degree)
mut new_graph := remove_node_from_graph(small_degree, graph)
top_order << small_degree
@@ -33,7 +33,7 @@ fn topog_sort_greedy(graph map[string][]string) []string {
for (count < n_nodes) {
v_degree = in_degree(new_graph) // return: map [string] int
- print('\nV Degree $v_degree')
+ print('\nV Degree ${v_degree}')
small_degree = min_degree(v_degree)
new_graph = remove_node_from_graph(small_degree, new_graph)
diff --git a/examples/hanoi.v b/examples/hanoi.v
index 0851ba0dd3..70575ac342 100644
--- a/examples/hanoi.v
+++ b/examples/hanoi.v
@@ -8,7 +8,7 @@ fn main() {
}
fn move(n int, a string, b string) {
- println('Disc $n from $a to ${b}...')
+ println('Disc ${n} from ${a} to ${b}...')
}
fn hanoi(n int, a string, b string, c string) {
diff --git a/examples/hello_v_js.v b/examples/hello_v_js.v
index 2286719546..2a85abfe9c 100644
--- a/examples/hello_v_js.v
+++ b/examples/hello_v_js.v
@@ -1,5 +1,5 @@
fn main() {
for i in 0 .. 3 {
- println('Hello from V.js ($i)')
+ println('Hello from V.js (${i})')
}
}
diff --git a/examples/js_dom_draw_bechmark_chart/README.md b/examples/js_dom_draw_bechmark_chart/README.md
index d92e93e8cb..c942169ce0 100644
--- a/examples/js_dom_draw_bechmark_chart/README.md
+++ b/examples/js_dom_draw_bechmark_chart/README.md
@@ -77,7 +77,7 @@ Create a service to request the benchmarks data by http
Decode the info to `FrameworkBenchmarkResponse`
```v ignore
fn typescript_sqlite_memory() ?FrameworkBenchmarkResponse {
- url := 'http://localhost:3000/sqlite-memory/$benchmark_loop_length'
+ url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
res := http.get(url) or { panic(err) }
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
return framework_benchmark_response
@@ -89,7 +89,7 @@ Create a service to request the benchmarks data by http
Decode the info to `FrameworkBenchmarkResponse`
```v ignore
fn typescript_sqlite_memory() ?FrameworkBenchmarkResponse {
- url := 'http://localhost:3000/sqlite-memory/$benchmark_loop_length'
+ url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
res := http.get(url) or { panic(err) }
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
return framework_benchmark_response
diff --git a/examples/js_dom_draw_bechmark_chart/chart/main.v b/examples/js_dom_draw_bechmark_chart/chart/main.v
index 08a5539d7c..6076c4845a 100644
--- a/examples/js_dom_draw_bechmark_chart/chart/main.v
+++ b/examples/js_dom_draw_bechmark_chart/chart/main.v
@@ -146,14 +146,14 @@ fn update_framework_benchmark_times() !FrameworkPlatform {
}
fn typescript_sqlite_memory() !FrameworkBenchmarkResponse {
- url := 'http://localhost:3000/sqlite-memory/$benchmark_loop_length'
+ url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
res := http.get(url) or { panic(err) }
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
return framework_benchmark_response
}
fn v_sqlite_memory() !FrameworkBenchmarkResponse {
- url := 'http://localhost:4000/sqlite-memory/$benchmark_loop_length'
+ url := 'http://localhost:4000/sqlite-memory/${benchmark_loop_length}'
res := http.get(url) or { panic(err) }
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
return framework_benchmark_response
diff --git a/examples/json.v b/examples/json.v
index 841d9e22cf..a30b0bfc30 100644
--- a/examples/json.v
+++ b/examples/json.v
@@ -14,16 +14,16 @@ fn main() {
return
}
for user in users {
- println('$user.name: $user.age')
+ println('${user.name}: ${user.age}')
}
println('')
for i, user in users {
- println('$i) $user.name')
+ println('${i}) ${user.name}')
if !user.can_register() {
- println('Cannot register $user.name, they are too young')
+ println('Cannot register ${user.name}, they are too young')
} else {
users[i].register()
- println('$user.name is registered')
+ println('${user.name} is registered')
}
}
// Let's encode users again just for fun
diff --git a/examples/lander.v b/examples/lander.v
index ad2600d5c4..de3af8bb2e 100644
--- a/examples/lander.v
+++ b/examples/lander.v
@@ -26,7 +26,7 @@ fn (l Lander) deorbit() {
}
fn (l Lander) open_parachutes(n int) {
- println('opening $n parachutes')
+ println('opening ${n} parachutes')
}
fn wait() {
diff --git a/examples/log.v b/examples/log.v
index c7ed77dfe4..e6426603ed 100644
--- a/examples/log.v
+++ b/examples/log.v
@@ -6,7 +6,7 @@ fn main() {
// Make a new file called info.log in the current folder
l.set_full_logpath('./info.log')
l.log_to_console_too()
- println('Please check the file: $l.output_file_name after this example crashes.')
+ println('Please check the file: ${l.output_file_name} after this example crashes.')
l.info('info')
l.warn('warn')
diff --git a/examples/macos_tray/tray.v b/examples/macos_tray/tray.v
index e1fd99d327..1a5a8bbdbb 100644
--- a/examples/macos_tray/tray.v
+++ b/examples/macos_tray/tray.v
@@ -39,7 +39,7 @@ mut:
}
fn (app &MyApp) on_menu_item_click(item TrayMenuItem) {
- println('click $item.id')
+ println('click ${item.id}')
if item.id == 'quit' {
C.tray_app_exit(app.tray_info)
}
diff --git a/examples/mini_calculator.v b/examples/mini_calculator.v
index 9aefa4be97..43d5e83885 100644
--- a/examples/mini_calculator.v
+++ b/examples/mini_calculator.v
@@ -48,7 +48,7 @@ fn expr_to_rev_pol(expr string) ![]string {
stack.delete(stack.len - 1)
}
else {
- return error('err: invalid character `$op`')
+ return error('err: invalid character `${op}`')
}
}
pos++
@@ -115,7 +115,7 @@ fn main() {
mut expr_count := 0
for {
expr_count++
- expr := os.input_opt('[$expr_count] ') or {
+ expr := os.input_opt('[${expr_count}] ') or {
println('')
break
}.trim_space()
diff --git a/examples/net_failconnect.v b/examples/net_failconnect.v
index 8f971f8e24..0da15b6473 100644
--- a/examples/net_failconnect.v
+++ b/examples/net_failconnect.v
@@ -2,4 +2,4 @@ import net
conn := net.dial_tcp('[::1]:57000')!
peer_addr := conn.peer_addr()!
-println('$peer_addr')
+println('${peer_addr}')
diff --git a/examples/net_peer_ip.v b/examples/net_peer_ip.v
index b27f32e6ce..00cfd265c7 100644
--- a/examples/net_peer_ip.v
+++ b/examples/net_peer_ip.v
@@ -2,4 +2,4 @@ import net
conn := net.dial_tcp('google.com:80')!
peer_addr := conn.peer_addr()!
-println('$peer_addr')
+println('${peer_addr}')
diff --git a/examples/net_resolve.v b/examples/net_resolve.v
index ddc058d04f..cc57246846 100644
--- a/examples/net_resolve.v
+++ b/examples/net_resolve.v
@@ -6,7 +6,7 @@ for addr in [
'steampowered.com:80',
'api.steampowered.com:80',
] {
- println('$addr')
+ println('${addr}')
for @type in [net.SocketType.tcp, .udp] {
family := net.AddrFamily.unspec
@@ -18,7 +18,7 @@ for addr in [
for a in addrs {
f := a.family()
- println('> $a $f ${@type}')
+ println('> ${a} ${f} ${@type}')
}
}
}
diff --git a/examples/net_udp_server_and_client.v b/examples/net_udp_server_and_client.v
index cb808fa342..426b8c3048 100644
--- a/examples/net_udp_server_and_client.v
+++ b/examples/net_udp_server_and_client.v
@@ -11,11 +11,11 @@ fn main() {
port := cmdline.option(os.args, '-p', '40001').int()
mut buf := []u8{len: 100}
if is_server {
- println('UDP echo server, listening for udp packets on port: $port')
- mut c := net.listen_udp(':$port')!
+ println('UDP echo server, listening for udp packets on port: ${port}')
+ mut c := net.listen_udp(':${port}')!
for {
read, addr := c.read(mut buf) or { continue }
- println('received $read bytes from $addr')
+ println('received ${read} bytes from ${addr}')
c.write_to(addr, buf[..read]) or {
println('Server: connection dropped')
continue
@@ -23,7 +23,7 @@ fn main() {
}
} else {
println('UDP client, sending packets to port: ${port}.\nType `exit` to exit.')
- mut c := net.dial_udp('localhost:$port')!
+ mut c := net.dial_udp('localhost:${port}')!
for {
mut line := os.input('client > ')
match line {
diff --git a/examples/news_fetcher.v b/examples/news_fetcher.v
index 4feef351a4..6c90f0e3a6 100644
--- a/examples/news_fetcher.v
+++ b/examples/news_fetcher.v
@@ -20,7 +20,7 @@ fn worker_fetch(mut p pool.PoolProcessor, cursor int, worker_id int) voidptr {
println('failed to decode a story')
return pool.no_result
}
- println('# $cursor) $story.title | $story.url')
+ println('# ${cursor}) ${story.title} | ${story.url}')
return pool.no_result
}
diff --git a/examples/password/password.v b/examples/password/password.v
index 32fd0d0680..e538ff573b 100644
--- a/examples/password/password.v
+++ b/examples/password/password.v
@@ -7,7 +7,7 @@ fn main() {
repeated_password := os.input_password('Confirm password : ')!
if original_password == repeated_password {
- println('Password confirmed! You entered: $original_password .')
+ println('Password confirmed! You entered: ${original_password} .')
} else {
println('Passwords do not match .')
}
diff --git a/examples/path_tracing.v b/examples/path_tracing.v
index 2d52cd4c39..3702a0faac 100644
--- a/examples/path_tracing.v
+++ b/examples/path_tracing.v
@@ -101,13 +101,13 @@ fn (image Image) save_as_ppm(file_name string) {
npixels := image.width * image.height
mut f_out := os.create(file_name) or { panic(err) }
f_out.writeln('P3') or { panic(err) }
- f_out.writeln('$image.width $image.height') or { panic(err) }
+ f_out.writeln('${image.width} ${image.height}') or { panic(err) }
f_out.writeln('255') or { panic(err) }
for i in 0 .. npixels {
c_r := to_int(unsafe { image.data[i] }.x)
c_g := to_int(unsafe { image.data[i] }.y)
c_b := to_int(unsafe { image.data[i] }.z)
- f_out.write_string('$c_r $c_g $c_b ') or { panic(err) }
+ f_out.write_string('${c_r} ${c_g} ${c_b} ') or { panic(err) }
}
f_out.close()
}
@@ -390,7 +390,7 @@ const (
//****************** main function for the radiance calculation *************
fn radiance(r Ray, depthi int, scene_id int) Vec {
if depthi > 1024 {
- eprintln('depthi: $depthi')
+ eprintln('depthi: ${depthi}')
eprintln('')
return Vec{}
}
@@ -595,7 +595,7 @@ fn main() {
t1 := time.ticks()
- eprintln('Path tracing samples: $samples, file_name: $file_name, scene_id: $scene_id, width: $width, height: $height')
+ eprintln('Path tracing samples: ${samples}, file_name: ${file_name}, scene_id: ${scene_id}, width: ${width}, height: ${height}')
eprintln('')
image := ray_trace(width, height, samples, file_name, scene_id)
t2 := time.ticks()
@@ -605,5 +605,5 @@ fn main() {
image.save_as_ppm(file_name)
t3 := time.ticks()
- eprintln('Image saved as [$file_name]. Took: ${(t3 - t2):5}ms')
+ eprintln('Image saved as [${file_name}]. Took: ${(t3 - t2):5}ms')
}
diff --git a/examples/pendulum-simulation/modules/sim/args/parser.v b/examples/pendulum-simulation/modules/sim/args/parser.v
index f7f8ede12f..02faf3928b 100644
--- a/examples/pendulum-simulation/modules/sim/args/parser.v
+++ b/examples/pendulum-simulation/modules/sim/args/parser.v
@@ -50,17 +50,17 @@ fn parse_sequential_args() !SequentialArgs {
fp.skip_executable()
// output parameters
- width := fp.int('width', `w`, sim.default_width, 'width of the image output. Defaults to $sim.default_width')
- height := fp.int('height', `h`, sim.default_height, 'height of the image output. Defaults to $sim.default_height')
+ width := fp.int('width', `w`, sim.default_width, 'width of the image output. Defaults to ${sim.default_width}')
+ height := fp.int('height', `h`, sim.default_height, 'height of the image output. Defaults to ${sim.default_height}')
filename := fp.string('output', `o`, 'out.ppm', 'name of the image output. Defaults to out.ppm')
// simulation parameters
- rope_length := fp.float('rope-length', 0, sim.default_rope_length, 'rope length to use on simulation. Defaults to $sim.default_rope_length')
- bearing_mass := fp.float('bearing-mass', 0, sim.default_bearing_mass, 'bearing mass to use on simulation. Defaults to $sim.default_bearing_mass')
- magnet_spacing := fp.float('magnet-spacing', 0, sim.default_magnet_spacing, 'magnet spacing to use on simulation. Defaults to $sim.default_magnet_spacing')
- magnet_height := fp.float('magnet-height', 0, sim.default_magnet_height, 'magnet height to use on simulation. Defaults to $sim.default_magnet_height')
- magnet_strength := fp.float('magnet-strength', 0, sim.default_magnet_strength, 'magnet strength to use on simulation. Defaults to $sim.default_magnet_strength')
- gravity := fp.float('gravity', 0, sim.default_gravity, 'gravity to use on simulation. Defaults to $sim.default_gravity')
+ rope_length := fp.float('rope-length', 0, sim.default_rope_length, 'rope length to use on simulation. Defaults to ${sim.default_rope_length}')
+ bearing_mass := fp.float('bearing-mass', 0, sim.default_bearing_mass, 'bearing mass to use on simulation. Defaults to ${sim.default_bearing_mass}')
+ magnet_spacing := fp.float('magnet-spacing', 0, sim.default_magnet_spacing, 'magnet spacing to use on simulation. Defaults to ${sim.default_magnet_spacing}')
+ magnet_height := fp.float('magnet-height', 0, sim.default_magnet_height, 'magnet height to use on simulation. Defaults to ${sim.default_magnet_height}')
+ magnet_strength := fp.float('magnet-strength', 0, sim.default_magnet_strength, 'magnet strength to use on simulation. Defaults to ${sim.default_magnet_strength}')
+ gravity := fp.float('gravity', 0, sim.default_gravity, 'gravity to use on simulation. Defaults to ${sim.default_gravity}')
fp.finalize() or {
println(fp.usage())
@@ -87,7 +87,7 @@ fn parse_sequential_args() !SequentialArgs {
grid: grid
}
- sim.log('$args')
+ sim.log('${args}')
return args
}
@@ -100,20 +100,20 @@ fn parse_parallel_args(extra_workers int) !ParallelArgs {
fp.description('This is a pendulum simulation written in pure V')
fp.skip_executable()
- workers := fp.int('workers', 0, args.max_parallel_workers, 'amount of workers to use on simulation. Defaults to $args.max_parallel_workers')
+ workers := fp.int('workers', 0, args.max_parallel_workers, 'amount of workers to use on simulation. Defaults to ${args.max_parallel_workers}')
// output parameters
- width := fp.int('width', `w`, sim.default_width, 'width of the image output. Defaults to $sim.default_width')
- height := fp.int('height', `h`, sim.default_height, 'height of the image output. Defaults to $sim.default_height')
+ width := fp.int('width', `w`, sim.default_width, 'width of the image output. Defaults to ${sim.default_width}')
+ height := fp.int('height', `h`, sim.default_height, 'height of the image output. Defaults to ${sim.default_height}')
filename := fp.string('output', `o`, 'out.ppm', 'name of the image output. Defaults to out.ppm')
// simulation parameters
- rope_length := fp.float('rope-length', 0, sim.default_rope_length, 'rope length to use on simulation. Defaults to $sim.default_rope_length')
- bearing_mass := fp.float('bearing-mass', 0, sim.default_bearing_mass, 'bearing mass to use on simulation. Defaults to $sim.default_bearing_mass')
- magnet_spacing := fp.float('magnet-spacing', 0, sim.default_magnet_spacing, 'magnet spacing to use on simulation. Defaults to $sim.default_magnet_spacing')
- magnet_height := fp.float('magnet-height', 0, sim.default_magnet_height, 'magnet height to use on simulation. Defaults to $sim.default_magnet_height')
- magnet_strength := fp.float('magnet-strength', 0, sim.default_magnet_strength, 'magnet strength to use on simulation. Defaults to $sim.default_magnet_strength')
- gravity := fp.float('gravity', 0, sim.default_gravity, 'gravity to use on simulation. Defaults to $sim.default_gravity')
+ rope_length := fp.float('rope-length', 0, sim.default_rope_length, 'rope length to use on simulation. Defaults to ${sim.default_rope_length}')
+ bearing_mass := fp.float('bearing-mass', 0, sim.default_bearing_mass, 'bearing mass to use on simulation. Defaults to ${sim.default_bearing_mass}')
+ magnet_spacing := fp.float('magnet-spacing', 0, sim.default_magnet_spacing, 'magnet spacing to use on simulation. Defaults to ${sim.default_magnet_spacing}')
+ magnet_height := fp.float('magnet-height', 0, sim.default_magnet_height, 'magnet height to use on simulation. Defaults to ${sim.default_magnet_height}')
+ magnet_strength := fp.float('magnet-strength', 0, sim.default_magnet_strength, 'magnet strength to use on simulation. Defaults to ${sim.default_magnet_strength}')
+ gravity := fp.float('gravity', 0, sim.default_gravity, 'gravity to use on simulation. Defaults to ${sim.default_gravity}')
fp.finalize() or {
println(fp.usage())
@@ -141,7 +141,7 @@ fn parse_parallel_args(extra_workers int) !ParallelArgs {
workers: get_workers(workers, extra_workers)
}
- sim.log('$args')
+ sim.log('${args}')
return args
}
diff --git a/examples/pendulum-simulation/modules/sim/img/ppm.v b/examples/pendulum-simulation/modules/sim/img/ppm.v
index 2b8b73dde9..f7bc291f4a 100644
--- a/examples/pendulum-simulation/modules/sim/img/ppm.v
+++ b/examples/pendulum-simulation/modules/sim/img/ppm.v
@@ -50,7 +50,7 @@ pub fn ppm_writer_for_fname(fname string, settings ImageSettings) !&PPMWriter {
pub fn (mut writer PPMWriter) start_for_file(fname string, settings ImageSettings) ! {
writer.file = os.create(fname)!
- writer.file.writeln('P6 $settings.width $settings.height 255')!
+ writer.file.writeln('P6 ${settings.width} ${settings.height} 255')!
}
pub fn (mut writer PPMWriter) handle_pixel(p gx.Color) ! {
diff --git a/examples/pendulum-simulation/modules/sim/img/worker.v b/examples/pendulum-simulation/modules/sim/img/worker.v
index 0b335700de..00359691ab 100644
--- a/examples/pendulum-simulation/modules/sim/img/worker.v
+++ b/examples/pendulum-simulation/modules/sim/img/worker.v
@@ -26,7 +26,7 @@ pub fn image_worker(mut writer PPMWriter, result_chan chan &sim.SimResult, setti
bmark.step()
writer.handle_pixel(pixel_buf[current_index].Color) or {
bmark.fail()
- sim.log(@MOD + '.' + @FN + ': pixel handler failed. Error $err')
+ sim.log(@MOD + '.' + @FN + ': pixel handler failed. Error ${err}')
break
}
bmark.ok()
diff --git a/examples/pendulum-simulation/modules/sim/img/writer.v b/examples/pendulum-simulation/modules/sim/img/writer.v
index f0a215453a..0354abc18f 100644
--- a/examples/pendulum-simulation/modules/sim/img/writer.v
+++ b/examples/pendulum-simulation/modules/sim/img/writer.v
@@ -38,7 +38,7 @@ pub fn (mut iw ImageWritter) handle(result sim.SimResult) !int {
for iw.current_index < total_pixels && iw.buffer[iw.current_index].valid {
iw.writer.handle_pixel(iw.buffer[iw.current_index].Color) or {
- sim.log(@MOD + '.' + @FN + ': pixel handler failed. Error $err')
+ sim.log(@MOD + '.' + @FN + ': pixel handler failed. Error ${err}')
break
}
iw.current_index++
diff --git a/examples/pendulum-simulation/modules/sim/runner.v b/examples/pendulum-simulation/modules/sim/runner.v
index 54ab9a9371..fc2a1ac984 100644
--- a/examples/pendulum-simulation/modules/sim/runner.v
+++ b/examples/pendulum-simulation/modules/sim/runner.v
@@ -42,7 +42,7 @@ pub fn run(params SimParams, settings RunnerSettings) {
if !isnil(settings.on_start) {
settings.on_start() or {
- log(@MOD + '.' + @FN + ': Simulation start handler failed. Error $err')
+ log(@MOD + '.' + @FN + ': Simulation start handler failed. Error ${err}')
}
}
@@ -77,7 +77,7 @@ pub fn run(params SimParams, settings RunnerSettings) {
params: params
}
settings.on_request(request) or {
- log(@MOD + '.' + @FN + ': request handler failed. Error $err')
+ log(@MOD + '.' + @FN + ': request handler failed. Error ${err}')
bmark.fail()
break
}
@@ -90,7 +90,7 @@ pub fn run(params SimParams, settings RunnerSettings) {
if !isnil(settings.on_finish) {
settings.on_finish() or {
- log(@MOD + '.' + @FN + ': Simulation stop handler failed. Error $err')
+ log(@MOD + '.' + @FN + ': Simulation stop handler failed. Error ${err}')
}
}
}
diff --git a/examples/pendulum-simulation/modules/sim/worker.v b/examples/pendulum-simulation/modules/sim/worker.v
index 2b0e22a57e..d4b026d1d7 100644
--- a/examples/pendulum-simulation/modules/sim/worker.v
+++ b/examples/pendulum-simulation/modules/sim/worker.v
@@ -36,7 +36,7 @@ pub fn sim_worker(id int, request_chan chan &SimRequest, result_channels []chan
bmark.ok()
}
bmark.stop()
- println(bmark.total_message(@FN + ': worker $id'))
+ println(bmark.total_message(@FN + ': worker ${id}'))
}
pub fn compute_result(request SimRequest) &SimResult {
diff --git a/examples/pico/pico.v b/examples/pico/pico.v
index 8a8a636d63..1725268539 100644
--- a/examples/pico/pico.v
+++ b/examples/pico/pico.v
@@ -47,6 +47,6 @@ fn callback(data voidptr, req picohttpparser.Request, mut res picohttpparser.Res
}
fn main() {
- println('Starting webserver on http://127.0.0.1:$port/ ...')
+ println('Starting webserver on http://127.0.0.1:${port}/ ...')
picoev.new(port: port, cb: &callback).serve()
}
diff --git a/examples/process/command.v b/examples/process/command.v
index 718ce96326..c779f0e35b 100644
--- a/examples/process/command.v
+++ b/examples/process/command.v
@@ -28,7 +28,7 @@ fn main() {
exec("bash -c 'find /tmp/'")
out = exec('echo to stdout')
out = exec('echo to stderr 1>&2')
- println("'$out'")
+ println("'${out}'")
// THIS DOES NOT WORK, is error, it goes to stderror of the command I run
assert out == 'to stderr'
}
diff --git a/examples/process/process_script.v b/examples/process/process_script.v
index fa415e7839..75d20a382a 100644
--- a/examples/process/process_script.v
+++ b/examples/process/process_script.v
@@ -20,11 +20,11 @@ fn exec(path string, redirect bool) {
if cmd.is_alive() {
for {
line = cmd.stdout_read()
- println('STDOUT: $line')
+ println('STDOUT: ${line}')
if !redirect {
line_err = cmd.stderr_read()
- println('STDERR: $line_err')
+ println('STDERR: ${line_err}')
}
if !cmd.is_alive() {
diff --git a/examples/process/process_stdin_trick.v b/examples/process/process_stdin_trick.v
index 79edaf8d44..b5476c82fc 100644
--- a/examples/process/process_stdin_trick.v
+++ b/examples/process/process_stdin_trick.v
@@ -17,7 +17,7 @@ fn exec(cmd string) (string, int) {
p.set_redirect_stdio()
p.run()
- p.stdin_write('$cmd2 && echo **OK**')
+ p.stdin_write('${cmd2} && echo **OK**')
os.fd_close(p.stdio_fd[0]) // important: close stdin so cmd can end by itself
for p.is_alive() {
diff --git a/examples/quadtree_demo/quadtree_demo.v b/examples/quadtree_demo/quadtree_demo.v
index a209a0557b..71f382e4f4 100644
--- a/examples/quadtree_demo/quadtree_demo.v
+++ b/examples/quadtree_demo/quadtree_demo.v
@@ -177,8 +177,8 @@ fn (app &App) display() {
app.gg.draw_rect_filled(f32(retrieved.x + 1), f32(retrieved.y + 1), f32(retrieved.width - 2),
f32(retrieved.height - 2), gx.green)
}
- app.gg.draw_text(1200, 25, 'Nodes: $app.nodes.len', font_small)
- app.gg.draw_text(1200, 50, 'Particles: $app.particles.len', font_small)
+ app.gg.draw_text(1200, 25, 'Nodes: ${app.nodes.len}', font_small)
+ app.gg.draw_text(1200, 50, 'Particles: ${app.particles.len}', font_small)
}
fn (app &App) draw() {
diff --git a/examples/quick_sort.v b/examples/quick_sort.v
index ebfb9c317f..8455f642dc 100644
--- a/examples/quick_sort.v
+++ b/examples/quick_sort.v
@@ -10,7 +10,7 @@ fn main() {
for _ in 0 .. gen_len {
arr << rand.intn(gen_max) or { 0 }
}
- println('length of random array is $arr.len')
+ println('length of random array is ${arr.len}')
println('before quick sort whether array is sorted: ${is_sorted(arr)}')
quick_sort(mut arr, 0, arr.len - 1)
println('after quick sort whether array is sorted: ${is_sorted(arr)}')
diff --git a/examples/regex/regex_example.v b/examples/regex/regex_example.v
index 138c7da2be..e0f68cd320 100644
--- a/examples/regex/regex_example.v
+++ b/examples/regex/regex_example.v
@@ -23,18 +23,18 @@ fn convert_html_rgb(in_col string) u32 {
// NOTE: if you want use escaped code you must use the r"" (raw) strings,
// *** please remember that V interpoaltion doesn't work on raw strings. ***
- query := '#([a-fA-F0-9]{$n_digit})([a-fA-F0-9]{$n_digit})([a-fA-F0-9]{$n_digit})'
+ query := '#([a-fA-F0-9]{${n_digit}})([a-fA-F0-9]{${n_digit}})([a-fA-F0-9]{${n_digit}})'
mut re := regex.regex_opt(query) or { panic(err) }
start, end := re.match_string(in_col)
- println('start: $start, end: $end')
+ println('start: ${start}, end: ${end}')
mut res := u32(0)
if start >= 0 {
group_list := re.get_group_list()
r := ('0x' + in_col[group_list[0].start..group_list[0].end]).int() << col_mul
g := ('0x' + in_col[group_list[1].start..group_list[1].end]).int() << col_mul
b := ('0x' + in_col[group_list[2].start..group_list[2].end]).int() << col_mul
- println('r: $r g: $g b: $b')
+ println('r: ${r} g: ${g} b: ${b}')
res = u32(r) << 16 | u32(g) << 8 | u32(b)
}
return res
@@ -47,11 +47,11 @@ fn convert_html_rgb_n(in_col string) u32 {
mut n_digit := if in_col.len == 4 { 1 } else { 2 }
mut col_mul := if in_col.len == 4 { 4 } else { 0 }
- query := '#(?P[a-fA-F0-9]{$n_digit})(?P[a-fA-F0-9]{$n_digit})(?P[a-fA-F0-9]{$n_digit})'
+ query := '#(?P[a-fA-F0-9]{${n_digit}})(?P[a-fA-F0-9]{${n_digit}})(?P[a-fA-F0-9]{${n_digit}})'
mut re := regex.regex_opt(query) or { panic(err) }
start, end := re.match_string(in_col)
- println('start: $start, end: $end')
+ println('start: ${start}, end: ${end}')
mut res := u32(0)
if start >= 0 {
red_s, red_e := re.get_group_bounds_by_name('red')
@@ -63,7 +63,7 @@ fn convert_html_rgb_n(in_col string) u32 {
blue_s, blue_e := re.get_group_bounds_by_name('blue')
b := ('0x' + in_col[blue_s..blue_e]).int() << col_mul
- println('r: $r g: $g b: $b')
+ println('r: ${r} g: ${g} b: ${b}')
res = u32(r) << 16 | u32(g) << 8 | u32(b)
}
return res
diff --git a/examples/regex/regex_with_memoization.v b/examples/regex/regex_with_memoization.v
index 28e346b511..c6609acb17 100644
--- a/examples/regex/regex_with_memoization.v
+++ b/examples/regex/regex_with_memoization.v
@@ -114,14 +114,14 @@ fn main() {
println('enter `exit` to quit\n')
for {
cnt++
- src := os.input('[$cnt] enter source string: ')
+ src := os.input('[${cnt}] enter source string: ')
if src == 'exit' {
break
}
- pat := os.input('[$cnt] enter pattern string: ')
+ pat := os.input('[${cnt}] enter pattern string: ')
if pat == 'exit' {
break
}
- println('[$cnt] whether `$src` matches `$pat`: ${regex_match(src, pat)}')
+ println('[${cnt}] whether `${src}` matches `${pat}`: ${regex_match(src, pat)}')
}
}
diff --git a/examples/snek/snek.js.v b/examples/snek/snek.js.v
index 0f64aca12c..30636ff130 100644
--- a/examples/snek/snek.js.v
+++ b/examples/snek/snek.js.v
@@ -153,7 +153,7 @@ fn on_frame(mut app App) {
// drawing top
app.gg.draw_rect(0, 0, canvas_size, top_height, gx.black)
- app.gg.draw_text(350, top_height / 2, 'Score: $app.score', gx.TextCfg{
+ app.gg.draw_text(350, top_height / 2, 'Score: ${app.score}', gx.TextCfg{
color: gx.white
align: .center
vertical_align: .middle
diff --git a/examples/snek/snek.v b/examples/snek/snek.v
index 367ecdb130..f624f57047 100644
--- a/examples/snek/snek.v
+++ b/examples/snek/snek.v
@@ -167,13 +167,13 @@ fn on_frame(mut app App) {
// drawing top
app.gg.draw_rect_filled(0, 0, canvas_size, top_height, gx.black)
- app.gg.draw_text(150, top_height / 2, 'Score: $app.score', gx.TextCfg{
+ app.gg.draw_text(150, top_height / 2, 'Score: ${app.score}', gx.TextCfg{
color: gx.white
align: .center
vertical_align: .middle
size: 65
})
- app.gg.draw_text(canvas_size - 150, top_height / 2, 'Best: $app.best', gx.TextCfg{
+ app.gg.draw_text(canvas_size - 150, top_height / 2, 'Best: ${app.best}', gx.TextCfg{
color: gx.white
align: .center
vertical_align: .middle
diff --git a/examples/sokol/06_obj_viewer/modules/obj/obj.v b/examples/sokol/06_obj_viewer/modules/obj/obj.v
index e01bf900a4..9c4a815fbc 100644
--- a/examples/sokol/06_obj_viewer/modules/obj/obj.v
+++ b/examples/sokol/06_obj_viewer/modules/obj/obj.v
@@ -291,10 +291,10 @@ pub fn (mut obj_part ObjPart) parse_obj_buffer(rows []string, single_material bo
}
// if c == 2 { break }
if c % 100000 == 0 && c > 0 {
- println('$c rows parsed')
+ println('${c} rows parsed')
}
}
- println('$row_count .obj Rows parsed')
+ println('${row_count} .obj Rows parsed')
// remove default part if empty
if obj_part.part.len > 1 && obj_part.part[0].faces.len == 0 {
obj_part.part = obj_part.part[1..]
@@ -304,7 +304,7 @@ pub fn (mut obj_part ObjPart) parse_obj_buffer(rows []string, single_material bo
// load the materials if found the .mtl file
fn (mut obj_part ObjPart) load_materials() {
rows := read_lines_from_file(obj_part.material_file)
- println('Material file [$obj_part.material_file] $rows.len Rows.')
+ println('Material file [${obj_part.material_file}] ${rows.len} Rows.')
for row in rows {
// println("$row")
mut i := 0
@@ -479,7 +479,7 @@ pub fn (mut obj_part ObjPart) get_buffer(in_part_list []int) Skl_buffer {
v_index := face[vertex_index][0] // vertex index
n_index := face[vertex_index][1] // normal index
t_index := face[vertex_index][2] // uv texture index
- key := '${v_index}_${n_index}_$t_index'
+ key := '${v_index}_${n_index}_${t_index}'
if key !in cache {
cache[key] = v_count_index
mut pnct := Vertex_pnct{
@@ -529,10 +529,10 @@ pub fn (mut obj_part ObjPart) get_buffer(in_part_list []int) Skl_buffer {
// print on the console the summary of the .obj model loaded
pub fn (obj_part ObjPart) summary() {
println('---- Stats ----')
- println('vertices: $obj_part.v.len')
- println('normals : $obj_part.vn.len')
- println('uv : $obj_part.vt.len')
- println('parts : $obj_part.part.len')
+ println('vertices: ${obj_part.v.len}')
+ println('normals : ${obj_part.vn.len}')
+ println('uv : ${obj_part.vt.len}')
+ println('parts : ${obj_part.part.len}')
// Parts
println('---- Parts ----')
for c, x in obj_part.part {
@@ -540,17 +540,17 @@ pub fn (obj_part ObjPart) summary() {
}
// Materials
println('---- Materials ----')
- println('Material dict: $obj_part.mat_map.keys()')
+ println('Material dict: ${obj_part.mat_map.keys()}')
for c, mat in obj_part.mat {
println('${c:3} [${mat.name:-16}]')
for k, v in mat.ks {
- print('$k = $v')
+ print('${k} = ${v}')
}
for k, v in mat.ns {
- println('$k = $v')
+ println('${k} = ${v}')
}
for k, v in mat.maps {
- println('$k = $v')
+ println('${k} = ${v}')
}
}
}
diff --git a/examples/sokol/06_obj_viewer/modules/obj/rend.v b/examples/sokol/06_obj_viewer/modules/obj/rend.v
index d6dc8902b5..c4a2eeef72 100644
--- a/examples/sokol/06_obj_viewer/modules/obj/rend.v
+++ b/examples/sokol/06_obj_viewer/modules/obj/rend.v
@@ -50,7 +50,7 @@ pub fn load_texture(file_name string) gfx.Image {
buffer := read_bytes_from_file(file_name)
stbi.set_flip_vertically_on_load(true)
img := stbi.load_from_memory(buffer.data, buffer.len) or {
- eprintln('Texure file: [$file_name] ERROR!')
+ eprintln('Texure file: [${file_name}] ERROR!')
exit(0)
}
res := create_texture(int(img.width), int(img.height), img.data)
diff --git a/examples/sokol/06_obj_viewer/modules/obj/util.v b/examples/sokol/06_obj_viewer/modules/obj/util.v
index 3726e65c42..94d8b64be9 100644
--- a/examples/sokol/06_obj_viewer/modules/obj/util.v
+++ b/examples/sokol/06_obj_viewer/modules/obj/util.v
@@ -9,14 +9,14 @@ pub fn read_lines_from_file(file_path string) []string {
$if android {
path = 'models/' + file_path
bts := os.read_apk_asset(path) or {
- eprintln('File [$path] NOT FOUND!')
+ eprintln('File [${path}] NOT FOUND!')
return rows
}
rows = bts.bytestr().split_into_lines()
} $else {
path = os.resource_abs_path('assets/models/' + file_path)
rows = os.read_lines(path) or {
- eprintln('File [$path] NOT FOUND! file_path: $file_path')
+ eprintln('File [${path}] NOT FOUND! file_path: ${file_path}')
return rows
}
}
@@ -30,13 +30,13 @@ pub fn read_bytes_from_file(file_path string) []u8 {
$if android {
path = 'models/' + file_path
buffer = os.read_apk_asset(path) or {
- eprintln('Texure file: [$path] NOT FOUND!')
+ eprintln('Texure file: [${path}] NOT FOUND!')
exit(0)
}
} $else {
path = os.resource_abs_path('assets/models/' + file_path)
buffer = os.read_bytes(path) or {
- eprintln('Texure file: [$path] NOT FOUND!')
+ eprintln('Texure file: [${path}] NOT FOUND!')
exit(0)
}
}
diff --git a/examples/sokol/06_obj_viewer/show_obj.v b/examples/sokol/06_obj_viewer/show_obj.v
index 394a4551f1..d4647a7ce7 100644
--- a/examples/sokol/06_obj_viewer/show_obj.v
+++ b/examples/sokol/06_obj_viewer/show_obj.v
@@ -296,8 +296,8 @@ fn main() {
if os.args.len >= 3 {
app.single_material_flag = os.args[2].bool()
}
- println('Loading model: $app.file_name')
- println('Using single material: $app.single_material_flag')
+ println('Loading model: ${app.file_name}')
+ println('Using single material: ${app.single_material_flag}')
}
app.gg = gg.new_context(
diff --git a/examples/sokol/fonts.v b/examples/sokol/fonts.v
index e8048b75dc..2487e8137d 100644
--- a/examples/sokol/fonts.v
+++ b/examples/sokol/fonts.v
@@ -50,7 +50,7 @@ fn init(mut state AppState) {
if bytes := os.read_bytes(os.resource_abs_path(os.join_path('..', 'assets', 'fonts',
'RobotoMono-Regular.ttf')))
{
- println('loaded font: $bytes.len')
+ println('loaded font: ${bytes.len}')
state.font_normal = state.font_context.add_font_mem('sans', bytes, false)
}
}
diff --git a/examples/sokol/freetype_raven.v b/examples/sokol/freetype_raven.v
index 7c68642a38..b7aedbc08f 100644
--- a/examples/sokol/freetype_raven.v
+++ b/examples/sokol/freetype_raven.v
@@ -102,7 +102,7 @@ fn init(user_data voidptr) {
if bytes := os.read_bytes(os.resource_abs_path(os.join_path('..', 'assets', 'fonts',
'RobotoMono-Regular.ttf')))
{
- println('loaded font: $bytes.len')
+ println('loaded font: ${bytes.len}')
state.font_normal = state.fons.add_font_mem('sans', bytes, false)
}
}
diff --git a/examples/sokol/sounds/wav_player.v b/examples/sokol/sounds/wav_player.v
index 575b8b3b35..4ce73cd69d 100644
--- a/examples/sokol/sounds/wav_player.v
+++ b/examples/sokol/sounds/wav_player.v
@@ -23,12 +23,12 @@ fn play_sounds(files []string) ! {
player.init()
for f in files {
if !os.exists(f) || os.is_dir(f) {
- eprintln('skipping "$f" (does not exist)')
+ eprintln('skipping "${f}" (does not exist)')
continue
}
fext := os.file_ext(f).to_lower()
if fext != '.wav' {
- eprintln('skipping "$f" (not a .wav file)')
+ eprintln('skipping "${f}" (not a .wav file)')
continue
}
player.play_wav_file(f)!
@@ -66,7 +66,7 @@ fn (mut p Player) stop() {
}
fn (mut p Player) play_wav_file(fpath string) ! {
- println('> play_wav_file: $fpath')
+ println('> play_wav_file: ${fpath}')
samples := read_wav_file_samples(fpath)!
p.finished = true
p.samples << samples
diff --git a/examples/tcp_echo_server.v b/examples/tcp_echo_server.v
index 26ca7691d2..dfd8cddbe2 100644
--- a/examples/tcp_echo_server.v
+++ b/examples/tcp_echo_server.v
@@ -11,7 +11,7 @@ import net
fn main() {
mut server := net.listen_tcp(.ip6, ':12345')!
laddr := server.addr()!
- eprintln('Listen on $laddr ...')
+ eprintln('Listen on ${laddr} ...')
for {
mut socket := server.accept()!
spawn handle_client(mut socket)
@@ -23,7 +23,7 @@ fn handle_client(mut socket net.TcpConn) {
socket.close() or { panic(err) }
}
client_addr := socket.peer_addr() or { return }
- eprintln('> new client: $client_addr')
+ eprintln('> new client: ${client_addr}')
mut reader := io.new_buffered_reader(reader: socket)
defer {
unsafe {
@@ -36,7 +36,7 @@ fn handle_client(mut socket net.TcpConn) {
if received_line == '' {
return
}
- println('client $client_addr: $received_line')
- socket.write_string('server: $received_line\n') or { return }
+ println('client ${client_addr}: ${received_line}')
+ socket.write_string('server: ${received_line}\n') or { return }
}
}
diff --git a/examples/tcp_notify_echo_server.v b/examples/tcp_notify_echo_server.v
index d97ecb30fd..4360235917 100644
--- a/examples/tcp_notify_echo_server.v
+++ b/examples/tcp_notify_echo_server.v
@@ -19,7 +19,7 @@ fn main() {
listener.close() or {}
}
addr := listener.addr()!
- eprintln('Listening on $addr')
+ eprintln('Listening on ${addr}')
eprintln('Type `stop` to stop the server')
// create file descriptor notifier
@@ -40,10 +40,10 @@ fn main() {
if _ := notifier.add(conn.sock.handle, .read | .peer_hangup) {
eprintln('connected')
} else {
- eprintln('error adding to notifier: $err')
+ eprintln('error adding to notifier: ${err}')
}
} else {
- eprintln('unable to accept: $err')
+ eprintln('unable to accept: ${err}')
}
}
0 {
@@ -60,7 +60,7 @@ fn main() {
if _ := notifier.remove(event.fd) {
eprintln('remote disconnected')
} else {
- eprintln('error removing from notifier: $err')
+ eprintln('error removing from notifier: ${err}')
}
} else {
s, _ := os.fd_read(event.fd, 10)
diff --git a/examples/term.ui/event_viewer.v b/examples/term.ui/event_viewer.v
index 28a900c86c..77304e935d 100644
--- a/examples/term.ui/event_viewer.v
+++ b/examples/term.ui/event_viewer.v
@@ -10,10 +10,10 @@ fn event(e &tui.Event, x voidptr) {
app.tui.clear()
app.tui.set_cursor_position(0, 0)
app.tui.write('V term.input event viewer (press `esc` to exit)\n\n')
- app.tui.write('$e')
- app.tui.write('\n\nRaw event bytes: "$e.utf8.bytes().hex()" = $e.utf8.bytes()')
+ app.tui.write('${e}')
+ app.tui.write('\n\nRaw event bytes: "${e.utf8.bytes().hex()}" = ${e.utf8.bytes()}')
if !e.modifiers.is_empty() {
- app.tui.write('\nModifiers: $e.modifiers = ')
+ app.tui.write('\nModifiers: ${e.modifiers} = ')
if e.modifiers.has(.ctrl) {
app.tui.write('ctrl. ')
}
diff --git a/examples/term.ui/term_drawing.v b/examples/term.ui/term_drawing.v
index c4d31c57b5..55a65e4ebf 100644
--- a/examples/term.ui/term_drawing.v
+++ b/examples/term.ui/term_drawing.v
@@ -319,7 +319,7 @@ fn (mut app App) select_color(primary bool, idx int) {
app.secondary_color = color
}
c_str := if primary { 'primary' } else { 'secondary' }
- app.show_msg('set $c_str color idx: $idx', 1)
+ app.show_msg('set ${c_str} color idx: ${idx}', 1)
}
fn (mut app App) set_pixel(x_ int, y_ int, c ui.Color) {
@@ -420,11 +420,11 @@ fn (mut app App) draw_header() {
g: 220
b: 220
)
- app.ui.draw_text(0, 0, ' $app.msg ')
+ app.ui.draw_text(0, 0, ' ${app.msg} ')
app.ui.reset()
}
//'tick: $app.ui.frame_count | ' +
- app.ui.draw_text(3, 2, 'terminal size: ($app.ui.window_width, $app.ui.window_height) | primary color: $app.primary_color.hex() | secondary color: $app.secondary_color.hex()')
+ app.ui.draw_text(3, 2, 'terminal size: (${app.ui.window_width}, ${app.ui.window_height}) | primary color: ${app.primary_color.hex()} | secondary color: ${app.secondary_color.hex()}')
app.ui.horizontal_separator(3)
}
@@ -452,7 +452,7 @@ fn (mut app App) draw_footer() {
app.ui.reset_bg_color()
app.ui.draw_text(3, wh - 3, select_color)
app.ui.bold()
- app.ui.draw_text(3, wh - 1, '$select_size $app.size')
+ app.ui.draw_text(3, wh - 1, '${select_size} ${app.size}')
app.ui.reset()
// TODO: help button
@@ -468,7 +468,7 @@ fn (mut app App) inc_size() {
if app.size < 30 {
app.size++
}
- app.show_msg('inc. size: $app.size', 1)
+ app.show_msg('inc. size: ${app.size}', 1)
}
[inline]
@@ -476,7 +476,7 @@ fn (mut app App) dec_size() {
if app.size > 1 {
app.size--
}
- app.show_msg('dec. size: $app.size', 1)
+ app.show_msg('dec. size: ${app.size}', 1)
}
fn (mut app App) footer_click(event &ui.Event) {
diff --git a/examples/term.ui/text_editor.v b/examples/term.ui/text_editor.v
index db243bfe4b..7b999f2f54 100644
--- a/examples/term.ui/text_editor.v
+++ b/examples/term.ui/text_editor.v
@@ -119,7 +119,7 @@ fn (mut a App) footer() {
finfo := if a.cfile().len > 0 { ' (' + os.file_name(a.cfile()) + ')' } else { '' }
mut status := a.status
a.tui.draw_text(0, h - 1, '─'.repeat(w))
- footer := '$finfo Line ${b.cursor.pos_y + 1:4}/${b.lines.len:-4}, Column ${b.cursor.pos_x + 1:3}/${b.cur_line().len:-3} index: ${b.cursor_index():5} (ESC = quit, Ctrl+s = save)'
+ footer := '${finfo} Line ${b.cursor.pos_y + 1:4}/${b.lines.len:-4}, Column ${b.cursor.pos_x + 1:3}/${b.cur_line().len:-3} index: ${b.cursor_index():5} (ESC = quit, Ctrl+s = save)'
if footer.len < w {
a.tui.draw_text((w - footer.len) / 2, h, footer)
} else if footer.len == w {
@@ -140,7 +140,7 @@ fn (mut a App) footer() {
g: 0
b: 0
)
- a.tui.draw_text((w + 4 - status.len) / 2, h - 1, ' $status ')
+ a.tui.draw_text((w + 4 - status.len) / 2, h - 1, ' ${status} ')
a.tui.reset()
a.t -= 33
}
@@ -243,7 +243,7 @@ fn (mut b Buffer) put(s string) {
}
$if debug {
flat := s.replace('\n', r'\n')
- eprintln(@MOD + '.' + @STRUCT + '::' + @FN + ' "$flat"')
+ eprintln(@MOD + '.' + @STRUCT + '::' + @FN + ' "${flat}"')
}
}
@@ -349,7 +349,7 @@ fn (mut b Buffer) del(amount int) string {
}
$if debug {
flat := removed.replace('\n', r'\n')
- eprintln(@MOD + '.' + @STRUCT + '::' + @FN + ' "$flat"')
+ eprintln(@MOD + '.' + @STRUCT + '::' + @FN + ' "${flat}"')
}
return removed
}
diff --git a/examples/term.ui/vyper.v b/examples/term.ui/vyper.v
index 0cf8ab3daf..4f5da524cb 100644
--- a/examples/term.ui/vyper.v
+++ b/examples/term.ui/vyper.v
@@ -436,7 +436,7 @@ fn (mut a App) draw_debug() {
snake := a.snake
a.termui.draw_text(block_size, 1 * block_size, 'Display_width: ${a.width:04d} Display_height: ${a.height:04d}')
a.termui.draw_text(block_size, 2 * block_size, 'Vx: ${snake.velocity.x:+02d} Vy: ${snake.velocity.y:+02d}')
- a.termui.draw_text(block_size, 3 * block_size, 'F: $snake.direction')
+ a.termui.draw_text(block_size, 3 * block_size, 'F: ${snake.direction}')
snake_head := snake.get_head()
rat := a.rat
a.termui.draw_text(block_size, 4 * block_size, 'Sx: ${snake_head.pos.x:+03d} Sy: ${snake_head.pos.y:+03d}')
diff --git a/examples/toml.v b/examples/toml.v
index a245cf3510..c0b645f4cb 100644
--- a/examples/toml.v
+++ b/examples/toml.v
@@ -40,9 +40,9 @@ hosts = [
fn main() {
doc := toml.parse_text(toml_text) or { panic(err) }
title := doc.value('title').string()
- println('title: "$title"')
+ println('title: "${title}"')
ip := doc.value('servers.alpha.ip').string()
- println('Server IP: "$ip"')
+ println('Server IP: "${ip}"')
toml_json := to.json(doc)
println(toml_json)
diff --git a/examples/tree_of_nodes.v b/examples/tree_of_nodes.v
index e9bb47d394..b0a45e2f04 100644
--- a/examples/tree_of_nodes.v
+++ b/examples/tree_of_nodes.v
@@ -22,6 +22,6 @@ fn main() {
node1 := Node{30, Empty{}, Empty{}}
node2 := Node{20, Empty{}, Empty{}}
tree := Node{10, node1, node2}
- println('tree structure:\n $tree')
+ println('tree structure:\n ${tree}')
println('tree size: ${size(tree)}')
}
diff --git a/examples/ttf_font/example_ttf.v b/examples/ttf_font/example_ttf.v
index 78f1a5ced6..8d7c0a53e9 100644
--- a/examples/ttf_font/example_ttf.v
+++ b/examples/ttf_font/example_ttf.v
@@ -36,7 +36,7 @@ fn my_init(mut app App_data) {
}
fn draw_frame(mut app App_data) {
- cframe_txt := 'Current Frame: $app.frame_c'
+ cframe_txt := 'Current Frame: ${app.frame_c}'
app.gg.begin()
sgl.defaults()
sgl.matrix_mode_projection()
@@ -68,7 +68,7 @@ fn draw_frame(mut app App_data) {
// block test
block_txt := "Today it is a good day!
Tommorow I'm not so sure :(
-Frame: $app.frame_c
+Frame: ${app.frame_c}
But Vwill prevail for sure, V is the way!!
òàèì@ò!£$%&
"
@@ -96,7 +96,7 @@ But Vwill prevail for sure, V is the way!!
if app.mouse_x >= 0 {
txt1 = unsafe { &app.ttf_render[2] }
txt1.destroy_texture()
- txt1.create_text('$app.mouse_x,$app.mouse_y', 25)
+ txt1.create_text('${app.mouse_x},${app.mouse_y}', 25)
txt1.create_texture()
r := app.mouse_x % 255
g := app.mouse_y % 255
@@ -135,9 +135,9 @@ fn main() {
for font_path in font_paths {
mut tf := ttf.TTF_File{}
tf.buf = os.read_bytes(font_path) or { panic(err) }
- println('TrueTypeFont file [$font_path] len: $tf.buf.len')
+ println('TrueTypeFont file [${font_path}] len: ${tf.buf.len}')
tf.init()
- println('Unit per EM: $tf.units_per_em')
+ println('Unit per EM: ${tf.units_per_em}')
app.tf << tf
}
// TTF render 0 Frame counter
diff --git a/examples/vcasino/vcasino.v b/examples/vcasino/vcasino.v
index 46f25aba90..53394e2aa9 100644
--- a/examples/vcasino/vcasino.v
+++ b/examples/vcasino/vcasino.v
@@ -52,12 +52,12 @@ fn get_bet_nbr() int {
continue
}
if !str_is_nbr(line) {
- println('error: $line is not a number.')
+ println('error: ${line} is not a number.')
continue
}
bet_nbr = line.int()
if bet_nbr < 0 || bet_nbr > 49 {
- println('error: $line is not between 0 and 49.')
+ println('error: ${line} is not between 0 and 49.')
bet_nbr = -1
continue
}
@@ -68,22 +68,22 @@ fn get_bet_nbr() int {
fn get_bet(money int) int {
mut bet := -1
for bet <= 0 || bet > money {
- println('You have $money V. Type in the amount of your bet:')
+ println('You have ${money} V. Type in the amount of your bet:')
line := os.get_line().trim_space()
if line.len < 1 {
println('error: empty line.')
continue
}
if !str_is_nbr(line) {
- println('error: $line is not a number.')
+ println('error: ${line} is not a number.')
continue
}
bet = line.int()
if bet <= 0 {
- println('error: $line is not higher than 1.')
+ println('error: ${line} is not higher than 1.')
continue
} else if bet > money {
- println('error: $line is more money than you have.')
+ println('error: ${line} is more money than you have.')
}
}
return bet
@@ -92,7 +92,7 @@ fn get_bet(money int) int {
fn run_wheel(bet_nbr int, _bet int) int {
mut bet := _bet
winning_nbr := rand.intn(50) or { 0 }
- print('Roulette Wheel spinning... and stops on the number $winning_nbr which is a ')
+ print('Roulette Wheel spinning... and stops on the number ${winning_nbr} which is a ')
if winning_nbr % 2 == 1 {
println(odd)
} else {
@@ -100,12 +100,12 @@ fn run_wheel(bet_nbr int, _bet int) int {
}
if winning_nbr == bet_nbr {
bet *= 3
- println('Congratulations! You get $bet V!')
+ println('Congratulations! You get ${bet} V!')
} else if winning_nbr % 2 == bet_nbr % 2 {
bet /= 2
- println('You bet the right color. You get $bet V!')
+ println('You bet the right color. You get ${bet} V!')
} else {
- println('Sorry buddy. You lost $bet V!')
+ println('Sorry buddy. You lost ${bet} V!')
bet *= -1
}
return bet
@@ -117,7 +117,7 @@ fn is_broke(money int) bool {
return false
}
quit := Options{'yes', 'y'}
- println('You have $money V. Do you want to quit the casino with your winnings? (y/n)')
+ println('You have ${money} V. Do you want to quit the casino with your winnings? (y/n)')
line := os.get_line().trim_space().to_lower()
if line == quit.long_opt || line == quit.short_opt {
return false
@@ -129,7 +129,7 @@ fn game_loop() {
mut can_play := true
mut money := 1000
println(g_desc)
- println('You start the game with $money V.\n')
+ println('You start the game with ${money} V.\n')
for can_play {
bet_nbr := get_bet_nbr()
bet := get_bet(money)
diff --git a/examples/viewer/file_scan.v b/examples/viewer/file_scan.v
index a37e57c1e1..8af503025b 100644
--- a/examples/viewer/file_scan.v
+++ b/examples/viewer/file_scan.v
@@ -140,7 +140,7 @@ fn (item_list Item_list) get_file_path() string {
return ''
}
if item_list.lst[item_list.item_index].path.len > 0 {
- return '${item_list.lst[item_list.item_index].path}$item_list.path_sep${item_list.lst[item_list.item_index].name}'
+ return '${item_list.lst[item_list.item_index].path}${item_list.path_sep}${item_list.lst[item_list.item_index].name}'
}
return item_list.lst[item_list.item_index].name
}
@@ -151,13 +151,13 @@ fn (item_list Item_list) get_file_path() string {
*
******************************************************************************/
fn (mut item_list Item_list) scan_folder(path string, in_index int) ! {
- println('Scanning [$path]')
+ println('Scanning [${path}]')
mut folder_list := []string{}
lst := os.ls(path)!
// manage the single files
for c, x in lst {
- pt := '$path$item_list.path_sep$x'
+ pt := '${path}${item_list.path_sep}${x}'
mut item := Item{
path: path
name: x
@@ -187,7 +187,7 @@ fn (mut item_list Item_list) scan_folder(path string, in_index int) ! {
// manage the folders
for x in folder_list {
- pt := '$path$item_list.path_sep$x'
+ pt := '${path}${item_list.path_sep}${x}'
item := Item{
path: path
name: x
@@ -209,15 +209,15 @@ fn (item_list Item_list) print_list() {
if x.i_type == .zip {
print('[ZIP]')
}
- println('$x.path => $x.container_index $x.container_item_index $x.name ne:$x.need_extract')
+ println('${x.path} => ${x.container_index} ${x.container_item_index} ${x.name} ne:${x.need_extract}')
}
- println('n_item: $item_list.n_item index: $item_list.item_index')
+ println('n_item: ${item_list.n_item} index: ${item_list.item_index}')
println('================================')
}
fn (mut item_list Item_list) get_items_list(args []string) {
item_list.loaded = false
- println('Args: $args')
+ println('Args: ${args}')
item_list.path_sep = $if windows { '\\' } $else { '/' }
for x in args {
@@ -231,7 +231,7 @@ fn (mut item_list Item_list) get_items_list(args []string) {
}
item_list.lst << item
item_list.scan_folder(x, item_list.lst.len - 1) or {
- eprintln('ERROR: scanning folder [$x]!')
+ eprintln('ERROR: scanning folder [${x}]!')
continue
}
} else {
@@ -246,7 +246,7 @@ fn (mut item_list Item_list) get_items_list(args []string) {
item.i_type = .zip
item_list.lst << item
item_list.scan_zip(x, item_list.lst.len - 1) or {
- eprintln('ERROR: scanning zip [$x]!')
+ eprintln('ERROR: scanning zip [${x}]!')
continue
}
continue
@@ -265,7 +265,7 @@ fn (mut item_list Item_list) get_items_list(args []string) {
// debug call for list all the loaded items
// item_list.print_list()
- println('Items: $item_list.n_item')
+ println('Items: ${item_list.n_item}')
println('Scanning done.')
item_list.get_next_item(1)
diff --git a/examples/viewer/view.v b/examples/viewer/view.v
index f4cb2a934f..e450b95d27 100644
--- a/examples/viewer/view.v
+++ b/examples/viewer/view.v
@@ -163,7 +163,7 @@ fn update_text_texture(sg_img gfx.Image, w int, h int, buf &u8) {
fn (mut app App) resize_buf_if_needed(in_size int) {
// manage the memory buffer
if app.mem_buf_size < in_size {
- println('Managing FILE memory buffer, allocated [$in_size]Bytes')
+ println('Managing FILE memory buffer, allocated [${in_size}]Bytes')
// free previous buffer if any exist
if app.mem_buf_size > 0 {
unsafe {
@@ -187,7 +187,7 @@ fn (mut app App) resize_buf_if_needed(in_size int) {
[manualfree]
fn (mut app App) read_bytes(path string) bool {
mut fp := os.vfopen(path, 'rb') or {
- eprintln('ERROR: Can not open the file [$path].')
+ eprintln('ERROR: Can not open the file [${path}].')
return false
}
defer {
@@ -195,12 +195,12 @@ fn (mut app App) read_bytes(path string) bool {
}
cseek := C.fseek(fp, 0, C.SEEK_END)
if cseek != 0 {
- eprintln('ERROR: Can not seek in the file [$path].')
+ eprintln('ERROR: Can not seek in the file [${path}].')
return false
}
fsize := C.ftell(fp)
if fsize < 0 {
- eprintln('ERROR: File [$path] has size is 0.')
+ eprintln('ERROR: File [${path}] has size is 0.')
return false
}
C.rewind(fp)
@@ -209,7 +209,7 @@ fn (mut app App) read_bytes(path string) bool {
nr_read_elements := int(C.fread(app.mem_buf, fsize, 1, fp))
if nr_read_elements == 0 && fsize > 0 {
- eprintln('ERROR: Can not read the file [$path] in the memory buffer.')
+ eprintln('ERROR: Can not read the file [${path}] in the memory buffer.')
return false
}
return true
@@ -219,7 +219,7 @@ fn (mut app App) read_bytes(path string) bool {
pub fn read_bytes_from_file(file_path string) []u8 {
mut buffer := []u8{}
buffer = os.read_bytes(file_path) or {
- eprintln('ERROR: Texure file: [$file_path] NOT FOUND.')
+ eprintln('ERROR: Texure file: [${file_path}] NOT FOUND.')
exit(0)
}
return buffer
@@ -531,10 +531,10 @@ fn frame(mut app App) {
rotation_angle := 90 * rotation
scale_str := '${app.scale:.2}'
app.bl.clear()
- app.bl.write_string('$num/$of_num')
- app.bl.write_string(' [${app.img_w}x$app.img_h]=>[${x_screen}x$y_screen]')
+ app.bl.write_string('${num}/${of_num}')
+ app.bl.write_string(' [${app.img_w}x${app.img_h}]=>[${x_screen}x${y_screen}]')
app.bl.write_string(' ${app.item_list.lst[app.item_list.item_index].name}')
- app.bl.write_string(' scale: $scale_str rotation: $rotation_angle')
+ app.bl.write_string(' scale: ${scale_str} rotation: ${rotation_angle}')
draw_text(mut app, app.bl.str(), 10, 10, 20)
} else {
if app.item_list.n_item <= 0 {
@@ -723,7 +723,7 @@ fn my_event_manager(mut ev gg.Event, mut app App) {
// full screen
if ev.key_code == .f {
- println('Full screen state: $sapp.is_fullscreen()')
+ println('Full screen state: ${sapp.is_fullscreen()}')
sapp.toggle_fullscreen()
}
}
@@ -740,7 +740,7 @@ fn my_event_manager(mut ev gg.Event, mut app App) {
for i in 0 .. num {
file_list << sapp.get_dropped_file_path(i)
}
- println('Scanning: $file_list')
+ println('Scanning: ${file_list}')
app.item_list = &Item_list{}
app.item_list.loaded = false
@@ -765,14 +765,14 @@ fn main() {
// mut font_path := os.resource_abs_path(os.join_path('../assets/fonts/', 'RobotoMono-Regular.ttf'))
font_name := 'RobotoMono-Regular.ttf'
font_path := os.join_path(os.temp_dir(), font_name)
- println('Temporary path for the font file: [$font_path]')
+ println('Temporary path for the font file: [${font_path}]')
// if the font doesn't exist create it from the ebedded one
if os.exists(font_path) == false {
- println('Write font [$font_name] in temp folder.')
+ println('Write font [${font_name}] in temp folder.')
embedded_file := $embed_file('../assets/fonts/RobotoMono-Regular.ttf')
os.write_file(font_path, embedded_file.to_string()) or {
- eprintln('ERROR: not able to write font file to [$font_path]')
+ eprintln('ERROR: not able to write font file to [${font_path}]')
exit(1)
}
}
@@ -780,13 +780,13 @@ fn main() {
// logo image
logo_name := 'logo.png'
logo_path := os.join_path(os.temp_dir(), logo_name)
- println('Temporary path for the logo: [$logo_path]')
+ println('Temporary path for the logo: [${logo_path}]')
// if the logo doesn't exist create it from the ebedded one
if os.exists(logo_path) == false {
- println('Write logo [$logo_name] in temp folder.')
+ println('Write logo [${logo_name}] in temp folder.')
embedded_file := $embed_file('../assets/logo.png')
os.write_file(logo_path, embedded_file.to_string()) or {
- eprintln('ERROR: not able to write logo file to [$logo_path]')
+ eprintln('ERROR: not able to write logo file to [${logo_path}]')
exit(1)
}
}
diff --git a/examples/viewer/zip_container.v b/examples/viewer/zip_container.v
index 73e8ce79bc..9cafa76cc4 100644
--- a/examples/viewer/zip_container.v
+++ b/examples/viewer/zip_container.v
@@ -12,7 +12,7 @@ import sokol.gfx
import szip
fn (mut il Item_list) scan_zip(path string, in_index int) ! {
- println('Scanning ZIP [$path]')
+ println('Scanning ZIP [${path}]')
mut zp := szip.open(path, szip.CompressionLevel.no_compression, szip.OpenMode.read_only)!
n_entries := zp.total()!
// println(n_entries)
diff --git a/examples/vmod.v b/examples/vmod.v
index 9e34b32851..54de3d5f09 100644
--- a/examples/vmod.v
+++ b/examples/vmod.v
@@ -4,6 +4,6 @@ import v.vmod
fn main() {
mod := vmod.decode(@VMOD_FILE) or { panic('Error decoding v.mod') }
- println('$mod.name has version $mod.version')
- println('\nThe full mod struct: \n$mod')
+ println('${mod.name} has version ${mod.version}')
+ println('\nThe full mod struct: \n${mod}')
}
diff --git a/examples/vweb/server_sent_events/server.v b/examples/vweb/server_sent_events/server.v
index bb00a897d9..6585e56778 100644
--- a/examples/vweb/server_sent_events/server.v
+++ b/examples/vweb/server_sent_events/server.v
@@ -28,9 +28,9 @@ fn (mut app App) sse() vweb.Result {
session.start() or { return app.server_error(501) }
session.send_message(data: 'ok') or { return app.server_error(501) }
for {
- data := '{"time": "$time.now().str()", "random_id": "$rand.ulid()"}'
+ data := '{"time": "${time.now().str()}", "random_id": "${rand.ulid()}"}'
session.send_message(event: 'ping', data: data) or { return app.server_error(501) }
- println('> sent event: $data')
+ println('> sent event: ${data}')
time.sleep(1 * time.second)
}
return app.server_error(501)
diff --git a/examples/vweb/vweb_example.v b/examples/vweb/vweb_example.v
index 465ce7c1fa..db63e2fd86 100644
--- a/examples/vweb/vweb_example.v
+++ b/examples/vweb/vweb_example.v
@@ -19,7 +19,7 @@ mut:
}
pub fn (app App) before_request() {
- println('[Vweb] $app.Context.req.method $app.Context.req.url')
+ println('[Vweb] ${app.Context.req.method} ${app.Context.req.url}')
}
fn main() {
@@ -51,10 +51,10 @@ pub fn (mut app App) show_text() vweb.Result {
pub fn (mut app App) cookie() vweb.Result {
app.set_cookie(name: 'cookie', value: 'test')
- return app.text('Response Headers\n$app.header')
+ return app.text('Response Headers\n${app.header}')
}
[post]
pub fn (mut app App) post() vweb.Result {
- return app.text('Post body: $app.req.data')
+ return app.text('Post body: ${app.req.data}')
}
diff --git a/examples/vweb_orm_jwt/src/auth_controllers.v b/examples/vweb_orm_jwt/src/auth_controllers.v
index 91635eb45e..8cebfa4298 100644
--- a/examples/vweb_orm_jwt/src/auth_controllers.v
+++ b/examples/vweb_orm_jwt/src/auth_controllers.v
@@ -7,12 +7,12 @@ import json
pub fn (mut app App) controller_auth() vweb.Result {
body := json.decode(AuthRequestDto, app.req.data) or {
app.set_status(400, '')
- return app.text('Failed to decode json, error: $err')
+ return app.text('Failed to decode json, error: ${err}')
}
response := app.service_auth(body.username, body.password) or {
app.set_status(400, '')
- return app.text('error: $err')
+ return app.text('error: ${err}')
}
return app.json(response)
diff --git a/examples/vweb_orm_jwt/src/auth_services.v b/examples/vweb_orm_jwt/src/auth_services.v
index bd0583e4e7..f56d0cfaa9 100644
--- a/examples/vweb_orm_jwt/src/auth_services.v
+++ b/examples/vweb_orm_jwt/src/auth_services.v
@@ -45,7 +45,7 @@ fn (mut app App) service_auth(username string, password string) !string {
db.close()!
bcrypt.compare_hash_and_password(password.bytes(), user.password.bytes()) or {
- return error('Failed to auth user, $err')
+ return error('Failed to auth user, ${err}')
}
token := make_token(user)
@@ -58,17 +58,17 @@ fn make_token(user User) string {
jwt_header := JwtHeader{'HS256', 'JWT'}
jwt_payload := JwtPayload{
- sub: '$user.id'
- name: '$user.username'
+ sub: '${user.id}'
+ name: '${user.username}'
iat: time.now()
}
header := base64.url_encode(json.encode(jwt_header).bytes())
payload := base64.url_encode(json.encode(jwt_payload).bytes())
- signature := base64.url_encode(hmac.new(secret.bytes(), '${header}.$payload'.bytes(),
+ signature := base64.url_encode(hmac.new(secret.bytes(), '${header}.${payload}'.bytes(),
sha256.sum, sha256.block_size).bytestr().bytes())
- jwt := '${header}.${payload}.$signature'
+ jwt := '${header}.${payload}.${signature}'
return jwt
}
diff --git a/examples/vweb_orm_jwt/src/main.v b/examples/vweb_orm_jwt/src/main.v
index ef7dcd40fd..dfa9ac4c3f 100644
--- a/examples/vweb_orm_jwt/src/main.v
+++ b/examples/vweb_orm_jwt/src/main.v
@@ -12,7 +12,7 @@ struct App {
}
pub fn (app App) before_request() {
- println('[Vweb] $app.Context.req.method $app.Context.req.url')
+ println('[Vweb] ${app.Context.req.method} ${app.Context.req.url}')
}
fn main() {
@@ -20,7 +20,7 @@ fn main() {
sql db {
create table User
- } or { panic('error on create table: $err') }
+ } or { panic('error on create table: ${err}') }
db.close() or { panic(err) }
diff --git a/examples/vweb_orm_jwt/src/user_controllers.v b/examples/vweb_orm_jwt/src/user_controllers.v
index ef93f198b0..37fafc333d 100644
--- a/examples/vweb_orm_jwt/src/user_controllers.v
+++ b/examples/vweb_orm_jwt/src/user_controllers.v
@@ -8,7 +8,7 @@ import databases
pub fn (mut app App) controller_get_user_by_id(id int) vweb.Result {
response := app.service_get_user_by_id(id) or {
app.set_status(400, '')
- return app.text('$err')
+ return app.text('${err}')
}
return app.json(response)
}
@@ -17,12 +17,12 @@ pub fn (mut app App) controller_get_user_by_id(id int) vweb.Result {
pub fn (mut app App) controller_create_user() vweb.Result {
body := json.decode(User, app.req.data) or {
app.set_status(400, '')
- return app.text('Failed to decode json, error: $err')
+ return app.text('Failed to decode json, error: ${err}')
}
response := app.service_add_user(body.username, body.password) or {
app.set_status(400, '')
- return app.text('error: $err')
+ return app.text('error: ${err}')
}
return app.json(response)
@@ -39,7 +39,7 @@ pub fn (mut app App) controller_get_all_user() vweb.Result {
response := app.service_get_all_user() or {
app.set_status(400, '')
- return app.text('$err')
+ return app.text('${err}')
}
return app.json(response)
}
@@ -48,7 +48,7 @@ pub fn (mut app App) controller_get_all_user() vweb.Result {
pub fn (mut app App) controller_get_by_username(username string) vweb.Result {
response := app.service_get_by_username(username) or {
app.set_status(400, '')
- return app.text('$err')
+ return app.text('${err}')
}
return app.json(response)
}
@@ -57,7 +57,7 @@ pub fn (mut app App) controller_get_by_username(username string) vweb.Result {
pub fn (mut app App) delete() vweb.Result {
mut db := databases.create_db_connection() or {
app.set_status(400, '')
- return app.text('$err')
+ return app.text('${err}')
}
defer {
diff --git a/examples/web_crawler/web_crawler.v b/examples/web_crawler/web_crawler.v
index 1503f1db56..21e69d4b3b 100644
--- a/examples/web_crawler/web_crawler.v
+++ b/examples/web_crawler/web_crawler.v
@@ -18,8 +18,8 @@ fn main() {
el := tag.children[1].children[0].children[0].children[0]
href := el.attributes['href'] or { panic('key not found') }
title := el.attributes['title'] or { panic('key not found') }
- println('href: $href')
- println('title: $title')
+ println('href: ${href}')
+ println('title: ${title}')
println('')
}
}
diff --git a/examples/websocket/client-server/client.v b/examples/websocket/client-server/client.v
index f7ba20fea3..b952f6c55a 100644
--- a/examples/websocket/client-server/client.v
+++ b/examples/websocket/client-server/client.v
@@ -9,7 +9,7 @@ import term
// to all other connected clients
fn main() {
mut ws := start_client()!
- println(term.green('client $ws.id ready'))
+ println(term.green('client ${ws.id} ready'))
println('Write message and enter to send...')
for {
line := os.get_line()
@@ -18,7 +18,7 @@ fn main() {
}
ws.write_string(line)!
}
- ws.close(1000, 'normal') or { println(term.red('panicing $err')) }
+ ws.close(1000, 'normal') or { println(term.red('panicing ${err}')) }
unsafe {
ws.free()
}
@@ -33,7 +33,7 @@ fn start_client() !&websocket.Client {
})
// use on_error_ref if you want to send any reference object
ws.on_error(fn (mut ws websocket.Client, err string) ! {
- println(term.red('error: $err'))
+ println(term.red('error: ${err}'))
})
// use on_close_ref if you want to send any reference object
ws.on_close(fn (mut ws websocket.Client, code int, reason string) ! {
@@ -43,11 +43,11 @@ fn start_client() !&websocket.Client {
ws.on_message(fn (mut ws websocket.Client, msg &websocket.Message) ! {
if msg.payload.len > 0 {
message := msg.payload.bytestr()
- println(term.blue('$message'))
+ println(term.blue('${message}'))
}
})
- ws.connect() or { println(term.red('error on connect: $err')) }
+ ws.connect() or { println(term.red('error on connect: ${err}')) }
spawn ws.listen() // or { println(term.red('error on listen $err')) }
return ws
diff --git a/examples/websocket/client-server/server.v b/examples/websocket/client-server/server.v
index afc9e23372..013b10bdd7 100644
--- a/examples/websocket/client-server/server.v
+++ b/examples/websocket/client-server/server.v
@@ -34,9 +34,9 @@ fn start_server() ! {
}, s)
s.on_close(fn (mut ws websocket.Client, code int, reason string) ! {
- println(term.green('client ($ws.id) closed connection'))
+ println(term.green('client (${ws.id}) closed connection'))
})
- s.listen() or { println(term.red('error on server listen: $err')) }
+ s.listen() or { println(term.red('error on server listen: ${err}')) }
unsafe {
s.free()
}
diff --git a/examples/websocket/ping.v b/examples/websocket/ping.v
index ef600aa1a3..2a1b1f61a5 100644
--- a/examples/websocket/ping.v
+++ b/examples/websocket/ping.v
@@ -29,7 +29,7 @@ fn start_server() ! {
s.on_close(fn (mut ws websocket.Client, code int, reason string) ! {
// println('client ($ws.id) closed connection')
})
- s.listen() or { println('error on server listen: $err') }
+ s.listen() or { println('error on server listen: ${err}') }
unsafe {
s.free()
}
@@ -46,7 +46,7 @@ fn start_client() ! {
})
// use on_error_ref if you want to send any reference object
ws.on_error(fn (mut ws websocket.Client, err string) ! {
- println('error: $err')
+ println('error: ${err}')
})
// use on_close_ref if you want to send any reference object
ws.on_close(fn (mut ws websocket.Client, code int, reason string) ! {
@@ -56,7 +56,7 @@ fn start_client() ! {
ws.on_message(fn (mut ws websocket.Client, msg &websocket.Message) ! {
if msg.payload.len > 0 {
message := msg.payload.bytestr()
- println('client got type: $msg.opcode payload:\n$message')
+ println('client got type: ${msg.opcode} payload:\n${message}')
}
})
// you can add any pointer reference to use in callback
@@ -64,9 +64,9 @@ fn start_client() ! {
// ws.on_message_ref(fn (mut ws websocket.Client, msg &websocket.Message, r &SomeRef) ? {
// // println('type: $msg.opcode payload:\n$msg.payload ref: $r')
// }, &r)
- ws.connect() or { println('error on connect: $err') }
+ ws.connect() or { println('error on connect: ${err}') }
spawn write_echo(mut ws) // or { println('error on write_echo $err') }
- ws.listen() or { println('error on listen $err') }
+ ws.listen() or { println('error on listen ${err}') }
unsafe {
ws.free()
}
@@ -76,8 +76,8 @@ fn write_echo(mut ws websocket.Client) ! {
message := 'echo this'
for i := 0; i <= 10; i++ {
// Server will send pings every 30 seconds
- ws.write_string(message) or { println('panicing writing $err') }
+ ws.write_string(message) or { println('panicing writing ${err}') }
time.sleep(100 * time.millisecond)
}
- ws.close(1000, 'normal') or { println('panicing $err') }
+ ws.close(1000, 'normal') or { println('panicing ${err}') }
}
diff --git a/examples/word_counter/word_counter.v b/examples/word_counter/word_counter.v
index ee4c635554..2de77c4b69 100644
--- a/examples/word_counter/word_counter.v
+++ b/examples/word_counter/word_counter.v
@@ -7,12 +7,12 @@ fn main() {
mut path := 'cinderella.txt'
if os.args.len != 2 {
println('usage: word_counter [text_file]')
- println('using $path')
+ println('using ${path}')
} else {
path = os.args[1]
}
contents := os.read_file(path.trim_space()) or {
- println('failed to open $path')
+ println('failed to open ${path}')
return
}
mut m := map[string]int{}
@@ -25,7 +25,7 @@ fn main() {
// Print the map
for key in keys {
val := m[key]
- println('$key => $val')
+ println('${key} => ${val}')
}
}
diff --git a/thirdparty/stdatomic/nix/cpp/gen.v b/thirdparty/stdatomic/nix/cpp/gen.v
index 6e0b6868ae..44e0f24fbe 100644
--- a/thirdparty/stdatomic/nix/cpp/gen.v
+++ b/thirdparty/stdatomic/nix/cpp/gen.v
@@ -11,8 +11,8 @@ fn main() {
exit(1)
}
cc_type, cc_version, cc_os := get_cc_info(cc)
- triple := '$cc_type-$cc_version-$cc_os'
- println('compiler: $triple')
+ triple := '${cc_type}-${cc_version}-${cc_os}'
+ println('compiler: ${triple}')
search_paths := get_search_paths(cc)
atomic_path := find_file(search_paths, 'atomic') or {
@@ -40,7 +40,7 @@ fn main() {
}
}
- println('$atomic_path:::$bitsatomicbase_path')
+ println('${atomic_path}:::${bitsatomicbase_path}')
}
fn get_cc_info(cc string) (string, string, string) {
@@ -57,7 +57,7 @@ fn get_cc_info(cc string) (string, string, string) {
lines := os.execute('${os.quoted_path(cc)} -v').output.split('\n')
// gcc and clang both have the same way way to say what version they have and what the host target triple is
- cc_version := lines.filter(it.contains('$cc_type version '))[0].all_after('$cc_type version ').all_before('.')
+ cc_version := lines.filter(it.contains('${cc_type} version '))[0].all_after('${cc_type} version ').all_before('.')
cc_os := lines.filter(it.starts_with('Target: '))[0].all_after('Target: ').split('-')[2]
@@ -81,7 +81,7 @@ fn find_file(search_paths []string, file string) ?string {
return os.join_path(search_path, file)
}
}
- return error('$file not found')
+ return error('${file} not found')
}
fn patch_atomic(outfile string, infile string) ? {
diff --git a/vlib/benchmark/benchmark.v b/vlib/benchmark/benchmark.v
index 704245c688..c89e942244 100644
--- a/vlib/benchmark/benchmark.v
+++ b/vlib/benchmark/benchmark.v
@@ -123,7 +123,7 @@ pub fn start() Benchmark {
pub fn (mut b Benchmark) measure(label string) i64 {
b.ok()
res := b.step_timer.elapsed().microseconds()
- println(b.step_message_with_label(benchmark.b_spent, 'in $label'))
+ println(b.step_message_with_label(benchmark.b_spent, 'in ${label}'))
b.step()
return res
}
@@ -158,9 +158,9 @@ pub fn (b &Benchmark) step_message_with_label_and_duration(label string, msg str
'${b.cstep:4d}/${b.nexpected_steps:4d}'
}
}
- return '${label:-5s} [$sprogress] $timed_line'
+ return '${label:-5s} [${sprogress}] ${timed_line}'
}
- return '${label:-5s}$timed_line'
+ return '${label:-5s}${timed_line}'
}
// step_message_with_label returns a string describing the current step using current time as duration.
@@ -191,15 +191,17 @@ pub fn (b &Benchmark) step_message_skip(msg string) string {
// total_message returns a string with total summary of the benchmark run.
pub fn (b &Benchmark) total_message(msg string) string {
the_label := term.colorize(term.gray, msg)
+ // vfmt off
mut tmsg := '${term.colorize(term.bold, 'Summary for $the_label:')} '
+ // vfmt on
if b.nfail > 0 {
- tmsg += term.colorize(term.bold, term.colorize(term.red, '$b.nfail failed')) + ', '
+ tmsg += term.colorize(term.bold, term.colorize(term.red, '${b.nfail} failed')) + ', '
}
if b.nok > 0 {
- tmsg += term.colorize(term.bold, term.colorize(term.green, '$b.nok passed')) + ', '
+ tmsg += term.colorize(term.bold, term.colorize(term.green, '${b.nok} passed')) + ', '
}
if b.nskip > 0 {
- tmsg += term.colorize(term.bold, term.colorize(term.yellow, '$b.nskip skipped')) + ', '
+ tmsg += term.colorize(term.bold, term.colorize(term.yellow, '${b.nskip} skipped')) + ', '
}
mut njobs_label := ''
if b.njobs > 0 {
@@ -209,7 +211,7 @@ pub fn (b &Benchmark) total_message(msg string) string {
njobs_label = ', on ${term.colorize(term.bold, b.njobs.str())} parallel jobs'
}
}
- tmsg += '$b.ntotal total. ${term.colorize(term.bold, 'Runtime:')} ${b.bench_timer.elapsed().microseconds() / 1000} ms${njobs_label}.\n'
+ tmsg += '${b.ntotal} total. ${term.colorize(term.bold, 'Runtime:')} ${b.bench_timer.elapsed().microseconds() / 1000} ms${njobs_label}.\n'
return tmsg
}
@@ -221,7 +223,7 @@ pub fn (b &Benchmark) total_duration() i64 {
// tdiff_in_ms prefixes `s` with a time difference calculation.
fn (b &Benchmark) tdiff_in_ms(s string, tdiff i64) string {
if b.verbose {
- return '${f64(tdiff) / 1000.0:9.3f} ms $s'
+ return '${f64(tdiff) / 1000.0:9.3f} ms ${s}'
}
return s
}
diff --git a/vlib/builtin/array.v b/vlib/builtin/array.v
index e9dccfb5b4..76073152ff 100644
--- a/vlib/builtin/array.v
+++ b/vlib/builtin/array.v
@@ -169,7 +169,7 @@ pub fn (a array) repeat(count int) array {
[direct_array_access; unsafe]
pub fn (a array) repeat_to_depth(count int, depth int) array {
if count < 0 {
- panic('array.repeat: count is negative: $count')
+ panic('array.repeat: count is negative: ${count}')
}
mut size := u64(count) * u64(a.len) * u64(a.element_size)
if size == 0 {
@@ -221,7 +221,7 @@ pub fn (a array) repeat_to_depth(count int, depth int) array {
pub fn (mut a array) insert(i int, val voidptr) {
$if !no_bounds_checking {
if i < 0 || i > a.len {
- panic('array.insert: index out of range (i == $i, a.len == $a.len)')
+ panic('array.insert: index out of range (i == ${i}, a.len == ${a.len})')
}
}
if a.len >= a.cap {
@@ -240,7 +240,7 @@ pub fn (mut a array) insert(i int, val voidptr) {
fn (mut a array) insert_many(i int, val voidptr, size int) {
$if !no_bounds_checking {
if i < 0 || i > a.len {
- panic('array.insert_many: index out of range (i == $i, a.len == $a.len)')
+ panic('array.insert_many: index out of range (i == ${i}, a.len == ${a.len})')
}
}
a.ensure_cap(a.len + size)
@@ -300,7 +300,7 @@ pub fn (mut a array) delete_many(i int, size int) {
$if !no_bounds_checking {
if i < 0 || i + size > a.len {
endidx := if size > 1 { '..${i + size}' } else { '' }
- panic('array.delete: index out of range (i == $i$endidx, a.len == $a.len)')
+ panic('array.delete: index out of range (i == ${i}${endidx}, a.len == ${a.len})')
}
}
if a.flags.all(.noshrink | .noslices) {
@@ -381,7 +381,7 @@ fn (a array) get_unsafe(i int) voidptr {
fn (a array) get(i int) voidptr {
$if !no_bounds_checking {
if i < 0 || i >= a.len {
- panic('array.get: index out of range (i == $i, a.len == $a.len)')
+ panic('array.get: index out of range (i == ${i}, a.len == ${a.len})')
}
}
unsafe {
@@ -482,13 +482,13 @@ fn (a array) slice(start int, _end int) array {
mut end := _end
$if !no_bounds_checking {
if start > end {
- panic('array.slice: invalid slice index ($start > $end)')
+ panic('array.slice: invalid slice index (${start} > ${end})')
}
if end > a.len {
- panic('array.slice: slice bounds out of range ($end >= $a.len)')
+ panic('array.slice: slice bounds out of range (${end} >= ${a.len})')
}
if start < 0 {
- panic('array.slice: slice bounds out of range ($start < 0)')
+ panic('array.slice: slice bounds out of range (${start} < 0)')
}
}
// TODO: integrate reference counting
@@ -618,7 +618,7 @@ fn (mut a array) set_unsafe(i int, val voidptr) {
fn (mut a array) set(i int, val voidptr) {
$if !no_bounds_checking {
if i < 0 || i >= a.len {
- panic('array.set: index out of range (i == $i, a.len == $a.len)')
+ panic('array.set: index out of range (i == ${i}, a.len == ${a.len})')
}
}
unsafe { vmemcpy(&u8(a.data) + u64(a.element_size) * u64(i), val, a.element_size) }
diff --git a/vlib/builtin/array_d_gcboehm_opt.v b/vlib/builtin/array_d_gcboehm_opt.v
index 1cfa5c6f6f..95cecdab85 100644
--- a/vlib/builtin/array_d_gcboehm_opt.v
+++ b/vlib/builtin/array_d_gcboehm_opt.v
@@ -89,7 +89,7 @@ fn (mut a array) ensure_cap_noscan(required int) {
[unsafe]
fn (a array) repeat_to_depth_noscan(count int, depth int) array {
if count < 0 {
- panic('array.repeat: count is negative: $count')
+ panic('array.repeat: count is negative: ${count}')
}
mut size := u64(count) * u64(a.len) * u64(a.element_size)
if size == 0 {
@@ -124,7 +124,7 @@ fn (a array) repeat_to_depth_noscan(count int, depth int) array {
fn (mut a array) insert_noscan(i int, val voidptr) {
$if !no_bounds_checking {
if i < 0 || i > a.len {
- panic('array.insert: index out of range (i == $i, a.len == $a.len)')
+ panic('array.insert: index out of range (i == ${i}, a.len == ${a.len})')
}
}
a.ensure_cap_noscan(a.len + 1)
@@ -140,7 +140,7 @@ fn (mut a array) insert_noscan(i int, val voidptr) {
fn (mut a array) insert_many_noscan(i int, val voidptr, size int) {
$if !no_bounds_checking {
if i < 0 || i > a.len {
- panic('array.insert_many: index out of range (i == $i, a.len == $a.len)')
+ panic('array.insert_many: index out of range (i == ${i}, a.len == ${a.len})')
}
}
a.ensure_cap_noscan(a.len + size)
diff --git a/vlib/builtin/array_shrinkage_test.v b/vlib/builtin/array_shrinkage_test.v
index 84893fe864..b9919ccdd8 100644
--- a/vlib/builtin/array_shrinkage_test.v
+++ b/vlib/builtin/array_shrinkage_test.v
@@ -1,5 +1,5 @@
fn show_array(name string, a []int) {
- eprintln('${name:10} .flags: ${a.flags:34} | .cap: ${a.cap:2} | .len: ${a.len:2} | .data: $a.data | $a')
+ eprintln('${name:10} .flags: ${a.flags:34} | .cap: ${a.cap:2} | .len: ${a.len:2} | .data: ${a.data} | ${a}')
}
fn trace_delete_elements(name string, mut a []int) int {
@@ -11,7 +11,7 @@ fn trace_delete_elements(name string, mut a []int) int {
show_array(name, a)
a << 77
res := a.cap
- eprintln(' << ${name:10} .cap: $a.cap >>')
+ eprintln(' << ${name:10} .cap: ${a.cap} >>')
show_array(name, a)
a << 88
show_array(name, a)
diff --git a/vlib/builtin/array_test.v b/vlib/builtin/array_test.v
index 3881276ef3..432e73b520 100644
--- a/vlib/builtin/array_test.v
+++ b/vlib/builtin/array_test.v
@@ -498,11 +498,11 @@ fn (ta []Test2) str() string {
}
fn (t Test2) str() string {
- return '{$t.one $t.two}'
+ return '{${t.one} ${t.two}}'
}
fn (t Test) str() string {
- return '{$t.a $t.b}'
+ return '{${t.a} ${t.b}}'
}
fn test_struct_print() {
@@ -671,7 +671,7 @@ fn test_map() {
// type switch
assert nums.map(it * 10) == [10, 20, 30, 40, 50, 60]
assert nums.map(it * it) == [1, 4, 9, 16, 25, 36]
- assert nums.map('$it') == ['1', '2', '3', '4', '5', '6']
+ assert nums.map('${it}') == ['1', '2', '3', '4', '5', '6']
assert nums.map(it % 2 == 0) == [false, true, false, true, false, true]
assert strs.map(it.to_upper()) == ['V', 'IS', 'AWESOME']
assert strs.map(it == 'awesome') == [false, false, true]
@@ -685,13 +685,13 @@ fn test_map() {
assert []int{len: 0}.map(it * 2) == []
// nested maps (where it is of same type)
assert nums.map(strs.map(int(7)) == [7, 7, 7]) == [true, true, true, true, true, true]
- assert nums.map('$it' + strs.map('a')[0]) == ['1a', '2a', '3a', '4a', '5a', '6a']
+ assert nums.map('${it}' + strs.map('a')[0]) == ['1a', '2a', '3a', '4a', '5a', '6a']
assert nums.map(it + strs.map(int(7))[0]) == [8, 9, 10, 11, 12, 13]
assert nums.map(it + strs.map(it.len)[0]) == [2, 3, 4, 5, 6, 7]
assert strs.map(it.len + strs.map(it.len)[0]) == [2, 3, 8]
// nested (different it types)
assert strs.map(it[nums.map(it - it)[0]]) == [u8(`v`), `i`, `a`]
- assert nums[0..3].map('$it' + strs.map(it)[it - 1]) == ['1v', '2is', '3awesome']
+ assert nums[0..3].map('${it}' + strs.map(it)[it - 1]) == ['1v', '2is', '3awesome']
assert nums.map(map_test_helper_1) == [1, 4, 9, 16, 25, 36]
assert [1, 5, 10].map(map_test_helper_1) == [1, 25, 100]
assert nums == [1, 2, 3, 4, 5, 6]
@@ -1134,10 +1134,10 @@ fn test_array_with_cap() {
fn test_multi_array_index() {
mut a := [][]int{len: 2, init: []int{len: 3, init: 0}}
a[0][0] = 1
- assert '$a' == '[[1, 0, 0], [0, 0, 0]]'
+ assert '${a}' == '[[1, 0, 0], [0, 0, 0]]'
mut b := [[0].repeat(3)].repeat(2)
b[0][0] = 1
- assert '$b' == '[[1, 0, 0], [0, 0, 0]]'
+ assert '${b}' == '[[1, 0, 0], [0, 0, 0]]'
}
fn test_plus_assign_string() {
@@ -1470,7 +1470,7 @@ fn test_array_struct_contains() {
coords << coord_1
exists := coord_1 in coords
not_exists := coord_1 !in coords
- println('`exists`: $exists and `not exists`: $not_exists')
+ println('`exists`: ${exists} and `not exists`: ${not_exists}')
assert exists == true
assert not_exists == false
}
@@ -1505,7 +1505,7 @@ fn test_array_of_array_append() {
println(x) // OK
x[2] << 123 // RTE
println(x)
- assert '$x' == '[[], [], [123], []]'
+ assert '${x}' == '[[], [], [123], []]'
}
fn test_array_of_map_insert() {
@@ -1513,12 +1513,12 @@ fn test_array_of_map_insert() {
println(x) // OK
x[2]['123'] = 123 // RTE
println(x)
- assert '$x' == "[{}, {}, {'123': 123}, {}]"
+ assert '${x}' == "[{}, {}, {'123': 123}, {}]"
}
fn test_multi_fixed_array_init() {
a := [3][3]int{}
- assert '$a' == '[[0, 0, 0], [0, 0, 0], [0, 0, 0]]'
+ assert '${a}' == '[[0, 0, 0], [0, 0, 0], [0, 0, 0]]'
}
struct Numbers {
diff --git a/vlib/builtin/builtin.c.v b/vlib/builtin/builtin.c.v
index 324554f7c9..e99efd1a1f 100644
--- a/vlib/builtin/builtin.c.v
+++ b/vlib/builtin/builtin.c.v
@@ -35,11 +35,11 @@ fn panic_debug(line_no int, file string, mod string, fn_name string, s string) {
bare_panic(s)
} $else {
eprintln('================ V panic ================')
- eprintln(' module: $mod')
+ eprintln(' module: ${mod}')
eprintln(' function: ${fn_name}()')
- eprintln(' message: $s')
- eprintln(' file: $file:$line_no')
- eprintln(' v hash: $vcommithash()')
+ eprintln(' message: ${s}')
+ eprintln(' file: ${file}:${line_no}')
+ eprintln(' v hash: ${vcommithash()}')
eprintln('=========================================')
$if exit_after_panic_message ? {
C.exit(1)
@@ -72,14 +72,14 @@ fn panic_debug(line_no int, file string, mod string, fn_name string, s string) {
// It ends the program with a panic.
[noreturn]
pub fn panic_optional_not_set(s string) {
- panic('optional not set ($s)')
+ panic('optional not set (${s})')
}
// panic_optional_not_set is called by V, when you use result error propagation in your main function
// It ends the program with a panic.
[noreturn]
pub fn panic_result_not_set(s string) {
- panic('result not set ($s)')
+ panic('result not set (${s})')
}
// panic prints a nice error message, then exits the process with exit code of 1.
@@ -91,7 +91,7 @@ pub fn panic(s string) {
} $else {
eprint('V panic: ')
eprintln(s)
- eprintln('v hash: $vcommithash()')
+ eprintln('v hash: ${vcommithash()}')
$if exit_after_panic_message ? {
C.exit(1)
} $else $if no_backtrace ? {
@@ -123,7 +123,7 @@ pub fn panic(s string) {
pub fn c_error_number_str(errnum int) string {
mut err_msg := ''
$if freestanding {
- err_msg = 'error $errnum'
+ err_msg = 'error ${errnum}'
} $else {
$if !vinix {
c_msg := C.strerror(errnum)
@@ -297,7 +297,7 @@ __global total_m = i64(0)
[unsafe]
pub fn malloc(n isize) &u8 {
if n <= 0 {
- panic('malloc($n <= 0)')
+ panic('malloc(${n} <= 0)')
}
$if vplayground ? {
if n > 10000 {
@@ -327,7 +327,7 @@ pub fn malloc(n isize) &u8 {
res = unsafe { C.malloc(n) }
}
if res == 0 {
- panic('malloc($n) failed')
+ panic('malloc(${n}) failed')
}
$if debug_malloc ? {
// Fill in the memory with something != 0 i.e. `M`, so it is easier to spot
@@ -340,7 +340,7 @@ pub fn malloc(n isize) &u8 {
[unsafe]
pub fn malloc_noscan(n isize) &u8 {
if n <= 0 {
- panic('malloc_noscan($n <= 0)')
+ panic('malloc_noscan(${n} <= 0)')
}
$if vplayground ? {
if n > 10000 {
@@ -374,7 +374,7 @@ pub fn malloc_noscan(n isize) &u8 {
res = unsafe { C.malloc(n) }
}
if res == 0 {
- panic('malloc_noscan($n) failed')
+ panic('malloc_noscan(${n}) failed')
}
$if debug_malloc ? {
// Fill in the memory with something != 0 i.e. `M`, so it is easier to spot
@@ -389,7 +389,7 @@ pub fn malloc_noscan(n isize) &u8 {
[unsafe]
pub fn malloc_uncollectable(n isize) &u8 {
if n <= 0 {
- panic('malloc_uncollectable($n <= 0)')
+ panic('malloc_uncollectable(${n} <= 0)')
}
$if vplayground ? {
if n > 10000 {
@@ -417,7 +417,7 @@ pub fn malloc_uncollectable(n isize) &u8 {
res = unsafe { C.malloc(n) }
}
if res == 0 {
- panic('malloc_uncollectable($n) failed')
+ panic('malloc_uncollectable(${n}) failed')
}
$if debug_malloc ? {
// Fill in the memory with something != 0 i.e. `M`, so it is easier to spot
@@ -449,7 +449,7 @@ pub fn v_realloc(b &u8, n isize) &u8 {
new_ptr = unsafe { C.realloc(b, n) }
}
if new_ptr == 0 {
- panic('realloc($n) failed')
+ panic('realloc(${n}) failed')
}
return new_ptr
}
@@ -495,7 +495,7 @@ pub fn realloc_data(old_data &u8, old_size int, new_size int) &u8 {
nptr = unsafe { C.realloc(old_data, new_size) }
}
if nptr == 0 {
- panic('realloc_data($old_data, $old_size, $new_size) failed')
+ panic('realloc_data(${old_data}, ${old_size}, ${new_size}) failed')
}
return nptr
}
@@ -505,7 +505,7 @@ pub fn realloc_data(old_data &u8, old_size int, new_size int) &u8 {
// Unlike `v_calloc` vcalloc checks for negative values given in `n`.
pub fn vcalloc(n isize) &u8 {
if n < 0 {
- panic('calloc($n < 0)')
+ panic('calloc(${n} < 0)')
} else if n == 0 {
return &u8(0)
}
@@ -538,7 +538,7 @@ pub fn vcalloc_noscan(n isize) &u8 {
}
}
if n < 0 {
- panic('calloc_noscan($n < 0)')
+ panic('calloc_noscan(${n} < 0)')
}
return $if gcboehm_opt ? {
unsafe { &u8(C.memset(C.GC_MALLOC_ATOMIC(n), 0, n)) }
@@ -613,7 +613,7 @@ pub fn memdup_uncollectable(src voidptr, sz int) voidptr {
fn v_fixed_index(i int, len int) int {
$if !no_bounds_checking {
if i < 0 || i >= len {
- s := 'fixed array index out of range (index: $i, len: $len)'
+ s := 'fixed array index out of range (index: ${i}, len: ${len})'
panic(s)
}
}
diff --git a/vlib/builtin/builtin.v b/vlib/builtin/builtin.v
index af647b5675..ab1b21f3c6 100644
--- a/vlib/builtin/builtin.v
+++ b/vlib/builtin/builtin.v
@@ -36,7 +36,7 @@ fn __as_cast(obj voidptr, obj_type int, expected_type int) voidptr {
expected_name = x.tname.clone()
}
}
- panic('as cast: cannot cast `$obj_name` to `$expected_name`')
+ panic('as cast: cannot cast `${obj_name}` to `${expected_name}`')
}
return obj
}
@@ -76,16 +76,16 @@ pub fn (ami &VAssertMetaInfo) free() {
}
fn __print_assert_failure(i &VAssertMetaInfo) {
- eprintln('$i.fpath:${i.line_nr + 1}: FAIL: fn $i.fn_name: assert $i.src')
+ eprintln('${i.fpath}:${i.line_nr + 1}: FAIL: fn ${i.fn_name}: assert ${i.src}')
if i.op.len > 0 && i.op != 'call' {
- eprintln(' left value: $i.llabel = $i.lvalue')
+ eprintln(' left value: ${i.llabel} = ${i.lvalue}')
if i.rlabel == i.rvalue {
- eprintln(' right value: $i.rlabel')
+ eprintln(' right value: ${i.rlabel}')
} else {
- eprintln(' right value: $i.rlabel = $i.rvalue')
+ eprintln(' right value: ${i.rlabel} = ${i.rvalue}')
}
if i.has_msg {
- eprintln(' message: $i.message')
+ eprintln(' message: ${i.message}')
}
}
}
diff --git a/vlib/builtin/builtin_d_use_libbacktrace.c.v b/vlib/builtin/builtin_d_use_libbacktrace.c.v
index 90f2688b42..ee2dd21b40 100644
--- a/vlib/builtin/builtin_d_use_libbacktrace.c.v
+++ b/vlib/builtin/builtin_d_use_libbacktrace.c.v
@@ -50,7 +50,7 @@ fn bt_print_callback(data &BacktraceOptions, pc voidptr, filename_ptr &char, lin
}
// keep it for later
// pc_64 := u64(pc)
- bt_str := '$filename:$line: by $fn_name'
+ bt_str := '${filename}:${line}: by ${fn_name}'
if data.stdin {
println(bt_str)
} else {
@@ -66,7 +66,7 @@ fn bt_error_callback(data voidptr, msg_ptr &char, errnum int) {
// }
msg := unsafe { msg_ptr.vstring() }
- eprint('libbacktrace: $msg')
+ eprint('libbacktrace: ${msg}')
if errnum > 0 {
eprint(': ${C.strerror(errnum)}')
}
diff --git a/vlib/builtin/builtin_nix.c.v b/vlib/builtin/builtin_nix.c.v
index 871dc84428..786b0b75a0 100644
--- a/vlib/builtin/builtin_nix.c.v
+++ b/vlib/builtin/builtin_nix.c.v
@@ -32,7 +32,7 @@ fn print_backtrace_skipping_top_frames(xskipframes int) bool {
} $else $if linux {
return print_backtrace_skipping_top_frames_linux(skipframes)
} $else {
- println('print_backtrace_skipping_top_frames is not implemented. skipframes: $skipframes')
+ println('print_backtrace_skipping_top_frames is not implemented. skipframes: ${skipframes}')
}
}
return false
@@ -93,7 +93,7 @@ fn print_backtrace_skipping_top_frames_linux(skipframes int) bool {
executable := sframe.all_before('(')
addr := sframe.all_after('[').all_before(']')
beforeaddr := sframe.all_before('[')
- cmd := 'addr2line -e $executable $addr'
+ cmd := 'addr2line -e ${executable} ${addr}'
// taken from os, to avoid depending on the os module inside builtin.v
f := C.popen(&char(cmd.str), c'r')
if f == unsafe { nil } {
@@ -120,7 +120,7 @@ fn print_backtrace_skipping_top_frames_linux(skipframes int) bool {
// Note: it is shortened here to just d. , just so that it fits, and so
// that the common error file:lineno: line format is enforced.
output = output.replace(' (discriminator', ': (d.')
- eprintln('${output:-55s} | ${addr:14s} | $beforeaddr')
+ eprintln('${output:-55s} | ${addr:14s} | ${beforeaddr}')
}
}
}
diff --git a/vlib/builtin/builtin_windows.c.v b/vlib/builtin/builtin_windows.c.v
index 1de0c09836..54d5ca9e63 100644
--- a/vlib/builtin/builtin_windows.c.v
+++ b/vlib/builtin/builtin_windows.c.v
@@ -156,23 +156,23 @@ fn print_backtrace_skipping_top_frames_msvc(skipframes int) bool {
if C.SymGetLineFromAddr64(handle, frame_addr, &offset, &sline64) == 1 {
file_name := unsafe { tos3(sline64.f_file_name) }
lnumber := sline64.f_line_number
- lineinfo = '$file_name:$lnumber'
+ lineinfo = '${file_name}:${lnumber}'
} else {
// addr:
lineinfo = '?? : address = 0x${(&frame_addr):x}'
}
sfunc := unsafe { tos3(fname) }
- eprintln('${nframe:-2d}: ${sfunc:-25s} $lineinfo')
+ eprintln('${nframe:-2d}: ${sfunc:-25s} ${lineinfo}')
} else {
// https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes
cerr := int(C.GetLastError())
if cerr == 87 {
- eprintln('SymFromAddr failure: $cerr = The parameter is incorrect)')
+ eprintln('SymFromAddr failure: ${cerr} = The parameter is incorrect)')
} else if cerr == 487 {
// probably caused because the .pdb isn't in the executable folder
- eprintln('SymFromAddr failure: $cerr = Attempt to access invalid address (Verify that you have the .pdb file in the right folder.)')
+ eprintln('SymFromAddr failure: ${cerr} = Attempt to access invalid address (Verify that you have the .pdb file in the right folder.)')
} else {
- eprintln('SymFromAddr failure: $cerr (see https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes)')
+ eprintln('SymFromAddr failure: ${cerr} (see https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes)')
}
}
}
@@ -289,7 +289,7 @@ pub fn winapi_lasterr_str() string {
C.NULL, err_msg_id, C.MAKELANGID(C.LANG_NEUTRAL, C.SUBLANG_DEFAULT), &msgbuf,
0, C.NULL)
err_msg := if res == 0 {
- 'Win-API error $err_msg_id'
+ 'Win-API error ${err_msg_id}'
} else {
unsafe { string_from_wide(msgbuf) }
}
diff --git a/vlib/builtin/float_test.v b/vlib/builtin/float_test.v
index d2514c5a38..b6991bf371 100644
--- a/vlib/builtin/float_test.v
+++ b/vlib/builtin/float_test.v
@@ -233,7 +233,7 @@ fn test_float_zero_str() {
f1 := f32(0.0)
f2 := 0.0
assert f1.str() == '0.0'
- assert '$f1' == '0.0'
+ assert '${f1}' == '0.0'
assert f2.str() == '0.0'
- assert '$f2' == '0.0'
+ assert '${f2}' == '0.0'
}
diff --git a/vlib/builtin/int.v b/vlib/builtin/int.v
index 9f308b31f4..480570d726 100644
--- a/vlib/builtin/int.v
+++ b/vlib/builtin/int.v
@@ -570,7 +570,7 @@ pub fn (b []u8) byterune() ?rune {
// repeat returns a new string with `count` number of copies of the byte it was called on.
pub fn (b u8) repeat(count int) string {
if count < 0 {
- panic('byte.repeat: count is negative: $count')
+ panic('byte.repeat: count is negative: ${count}')
} else if count == 0 {
return ''
} else if count == 1 {
diff --git a/vlib/builtin/js/array.js.v b/vlib/builtin/js/array.js.v
index 0b00bfcbe7..516c14e325 100644
--- a/vlib/builtin/js/array.js.v
+++ b/vlib/builtin/js/array.js.v
@@ -108,7 +108,7 @@ pub fn (mut a array) trim(index int) {
[unsafe]
pub fn (a array) repeat_to_depth(count int, depth int) array {
if count < 0 {
- panic('array.repeat: count is negative: $count')
+ panic('array.repeat: count is negative: ${count}')
}
mut arr := empty_array()
diff --git a/vlib/builtin/js/array_test.js.v b/vlib/builtin/js/array_test.js.v
index d8520f0041..6a81a56159 100644
--- a/vlib/builtin/js/array_test.js.v
+++ b/vlib/builtin/js/array_test.js.v
@@ -486,11 +486,11 @@ fn (ta []Test2) str() string {
}
fn (t Test2) str() string {
- return '{$t.one $t.two}'
+ return '{${t.one} ${t.two}}'
}
fn (t Test) str() string {
- return '{$t.a $t.b}'
+ return '{${t.a} ${t.b}}'
}
fn test_struct_print() {
@@ -659,7 +659,7 @@ fn test_map() {
// type switch
assert nums.map(it * 10) == [10, 20, 30, 40, 50, 60]
assert nums.map(it * it) == [1, 4, 9, 16, 25, 36]
- assert nums.map('$it') == ['1', '2', '3', '4', '5', '6']
+ assert nums.map('${it}') == ['1', '2', '3', '4', '5', '6']
assert nums.map(it % 2 == 0) == [false, true, false, true, false, true]
assert strs.map(it.to_upper()) == ['V', 'IS', 'AWESOME']
assert strs.map(it == 'awesome') == [false, false, true]
@@ -673,13 +673,13 @@ fn test_map() {
assert []int{len: 0}.map(it * 2) == []
// nested maps (where it is of same type)
assert nums.map(strs.map(int(7)) == [7, 7, 7]) == [true, true, true, true, true, true]
- assert nums.map('$it' + strs.map('a')[0]) == ['1a', '2a', '3a', '4a', '5a', '6a']
+ assert nums.map('${it}' + strs.map('a')[0]) == ['1a', '2a', '3a', '4a', '5a', '6a']
assert nums.map(it + strs.map(int(7))[0]) == [8, 9, 10, 11, 12, 13]
assert nums.map(it + strs.map(it.len)[0]) == [2, 3, 4, 5, 6, 7]
assert strs.map(it.len + strs.map(it.len)[0]) == [2, 3, 8]
// nested (different it types)
assert strs.map(it[nums.map(it - it)[0]]) == [u8(`v`), `i`, `a`]
- assert nums[0..3].map('$it' + strs.map(it)[it - 1]) == ['1v', '2is', '3awesome']
+ assert nums[0..3].map('${it}' + strs.map(it)[it - 1]) == ['1v', '2is', '3awesome']
assert nums.map(map_test_helper_1) == [1, 4, 9, 16, 25, 36]
assert [1, 5, 10].map(map_test_helper_1) == [1, 25, 100]
assert nums == [1, 2, 3, 4, 5, 6]
@@ -1076,7 +1076,7 @@ fn test_array_with_cap() {
fn test_multi_array_index() {
mut a := [][]int{len: 2, init: []int{len: 3, init: 0}}
a[0][0] = 1
- assert '$a' == '[[1, 0, 0], [0, 0, 0]]'
+ assert '${a}' == '[[1, 0, 0], [0, 0, 0]]'
// mut b := [[0].repeat(3)].repeat(2)
// b[0][0] = 1
// assert '$b' == '[[1, 0, 0], [0, 0, 0]]'
@@ -1381,7 +1381,7 @@ fn test_array_struct_contains() {
coords << coord_1
exists := coord_1 in coords
not_exists := coord_1 !in coords
- println('`exists`: $exists and `not exists`: $not_exists')
+ println('`exists`: ${exists} and `not exists`: ${not_exists}')
assert exists == true
assert not_exists == false
}
@@ -1416,7 +1416,7 @@ fn test_array_of_array_append() {
println(x) // OK
x[2] << 123 // RTE
println(x)
- assert '$x' == '[[], [], [123], []]'
+ assert '${x}' == '[[], [], [123], []]'
}
fn test_array_of_map_insert() {
@@ -1430,7 +1430,7 @@ fn test_array_of_map_insert() {
fn test_multi_fixed_array_init() {
a := [3][3]int{}
- assert '$a' == '[[0, 0, 0], [0, 0, 0], [0, 0, 0]]'
+ assert '${a}' == '[[0, 0, 0], [0, 0, 0], [0, 0, 0]]'
}
struct Numbers {
diff --git a/vlib/builtin/js/builtin.js.v b/vlib/builtin/js/builtin.js.v
index 5feef9b72a..818c25e0ec 100644
--- a/vlib/builtin/js/builtin.js.v
+++ b/vlib/builtin/js/builtin.js.v
@@ -60,7 +60,7 @@ pub fn eprint(s string) {
[noreturn]
pub fn exit(c int) {
JS.process.exit(c)
- js_throw('exit($c)')
+ js_throw('exit(${c})')
}
fn opt_ok(data voidptr, option Option) {
diff --git a/vlib/builtin/js/builtin.v b/vlib/builtin/js/builtin.v
index 91580e98e3..b54c87719d 100644
--- a/vlib/builtin/js/builtin.v
+++ b/vlib/builtin/js/builtin.v
@@ -8,7 +8,7 @@ fn (a any) toString()
[noreturn]
pub fn panic(s string) {
- eprintln('V panic: $s\n$js_stacktrace()')
+ eprintln('V panic: ${s}\n${js_stacktrace()}')
exit(1)
}
@@ -39,10 +39,10 @@ pub fn (err IError) str() string {
// TODO: remove once deprecation period for `IError` methods has ended
old_error_style := unsafe { voidptr(&err.msg) != voidptr(&err.code) } // if fields are not defined (new style) they don't have an offset between them
if old_error_style {
- '$err.type_name(): $err.msg'
+ '${err.type_name()}: ${err.msg}'
} else {
// <<
- '$err.type_name(): $err.msg()'
+ '${err.type_name()}: ${err.msg()}'
}
}
}
@@ -101,7 +101,7 @@ pub fn (o Option) str() string {
if o.state == 1 {
return 'Option{ none }'
}
- return 'Option{ error: "$o.err" }'
+ return 'Option{ error: "${o.err}" }'
}
pub struct _option {
@@ -117,12 +117,12 @@ pub fn (o _option) str() string {
if o.state == 1 {
return 'Option{ none }'
}
- return 'Option{ error: "$o.err" }'
+ return 'Option{ error: "${o.err}" }'
}
// trace_error prints to stderr a string and a backtrace of the error
fn trace_error(x string) {
- eprintln('> ${@FN} | $x')
+ eprintln('> ${@FN} | ${x}')
}
// error returns a default error instance containing the error given in `message`.
diff --git a/vlib/builtin/js/map_test.js.v b/vlib/builtin/js/map_test.js.v
index da6efd436f..c486506056 100644
--- a/vlib/builtin/js/map_test.js.v
+++ b/vlib/builtin/js/map_test.js.v
@@ -502,10 +502,10 @@ fn test_map_str_after_delete() {
'second': 2
'third': 3
}
- osm := '$m'
+ osm := '${m}'
m.delete('second')
- nsm := '$m'
- println('m: $m')
+ nsm := '${m}'
+ println('m: ${m}')
assert osm == "{'first': 1, 'second': 2, 'third': 3}"
assert nsm == "{'first': 1, 'third': 3}"
}
@@ -655,7 +655,7 @@ fn test_rune_keys() {
m[`@`] = 7
assert m.len == 3
println(m)
- assert '$m' == '{`!`: 2, `%`: 3, `@`: 7}'
+ assert '${m}' == '{`!`: 2, `%`: 3, `@`: 7}'
/*
mut a := []rune{}
@@ -752,7 +752,7 @@ fn test_map_assign_empty_map_init() {
a = {}
println(a)
assert a == map[string]int{}
- assert '$a' == '{}'
+ assert '${a}' == '{}'
}
fn test_in_map_literal() {
@@ -947,11 +947,11 @@ fn test_map_set_fixed_array_variable() {
mut m := map[string][2]f64{}
m['A'] = [1.1, 2.2]!
println(m)
- assert '$m' == "{'A': [1.1, 2.2]}"
+ assert '${m}' == "{'A': [1.1, 2.2]}"
mut m2 := map[string][2]f64{}
arr := [1.1, 2.2]!
m2['A'] = arr
println(m2)
- assert '$m2' == "{'A': [1.1, 2.2]}"
+ assert '${m2}' == "{'A': [1.1, 2.2]}"
}
diff --git a/vlib/builtin/js/rune.js.v b/vlib/builtin/js/rune.js.v
index 22dc3d2fdc..8780d17781 100644
--- a/vlib/builtin/js/rune.js.v
+++ b/vlib/builtin/js/rune.js.v
@@ -11,7 +11,7 @@ pub fn (ra []rune) string() string {
pub fn (c rune) repeat(count int) string {
if count < 0 {
- panic('rune.repeat: count is negative: $count')
+ panic('rune.repeat: count is negative: ${count}')
} else if count == 0 {
return ''
} else if count == 1 {
diff --git a/vlib/builtin/js/string_test.js.v b/vlib/builtin/js/string_test.js.v
index 5b0d048229..042fca10e0 100644
--- a/vlib/builtin/js/string_test.js.v
+++ b/vlib/builtin/js/string_test.js.v
@@ -713,7 +713,7 @@ fn test_raw() {
lines := raw.split('\n')
println(lines)
assert lines.len == 1
- println('raw string: "$raw"')
+ println('raw string: "${raw}"')
raw2 := r'Hello V\0'
assert raw2[7] == `\\`
@@ -736,7 +736,7 @@ fn test_raw_with_quotes() {
fn test_escape() {
a := 10
- println("\"$a")
+ println("\"${a}")
// assert "\"$a" == '"10'
}
@@ -761,9 +761,9 @@ fn test_raw_inter() {
fn test_c_r() {
// This used to break because of r'' and c''
c := 42
- println('$c')
+ println('${c}')
r := 50
- println('$r')
+ println('${r}')
}
fn test_inter_before_comptime_if() {
@@ -778,9 +778,9 @@ fn test_inter_before_comptime_if() {
fn test_double_quote_inter() {
a := 1
b := 2
- println('$a $b')
- assert '$a $b' == '1 2'
- assert '$a $b' == '1 2'
+ println('${a} ${b}')
+ assert '${a} ${b}' == '1 2'
+ assert '${a} ${b}' == '1 2'
}
fn foo(b u8) u8 {
diff --git a/vlib/builtin/linux_bare/old/syscallwrapper_test.v b/vlib/builtin/linux_bare/old/syscallwrapper_test.v
index 1bd79c9f1d..f3c369d973 100644
--- a/vlib/builtin/linux_bare/old/syscallwrapper_test.v
+++ b/vlib/builtin/linux_bare/old/syscallwrapper_test.v
@@ -15,7 +15,7 @@ fn test_syscallwrappers() {
os.chdir(dot_checks) or {}
checks_v := 'checks.v'
assert os.exists(checks_v)
- rc := os.execute_or_exit('v run $checks_v')
+ rc := os.execute_or_exit('v run ${checks_v}')
assert rc.exit_code == 0
assert !rc.output.contains('V panic: An assertion failed.')
assert !rc.output.contains('failed')
diff --git a/vlib/builtin/map_of_floats_test.v b/vlib/builtin/map_of_floats_test.v
index 7f21a34cd8..986de75675 100644
--- a/vlib/builtin/map_of_floats_test.v
+++ b/vlib/builtin/map_of_floats_test.v
@@ -2,7 +2,7 @@ fn test_map_of_f32() {
mut m32 := map[f32]string{}
m32[1.0] = 'one'
println(m32)
- assert '$m32' == r"{1.0: 'one'}"
+ assert '${m32}' == r"{1.0: 'one'}"
for k, v in m32 {
assert typeof(k).name == 'f32'
assert typeof(v).name == 'string'
@@ -17,7 +17,7 @@ fn test_map_of_f64() {
}
m64[1.0] = 'one'
println(m64)
- assert '$m64' == r"{3.14: 'pi', 1.0: 'one'}"
+ assert '${m64}' == r"{3.14: 'pi', 1.0: 'one'}"
for k, v in m64 {
assert typeof(k).name == 'f64'
assert typeof(v).name == 'string'
diff --git a/vlib/builtin/map_test.v b/vlib/builtin/map_test.v
index de3ba7e705..54f105be23 100644
--- a/vlib/builtin/map_test.v
+++ b/vlib/builtin/map_test.v
@@ -519,10 +519,10 @@ fn test_map_str_after_delete() {
'second': 2
'third': 3
}
- osm := '$m'
+ osm := '${m}'
m.delete('second')
- nsm := '$m'
- println('m: $m')
+ nsm := '${m}'
+ println('m: ${m}')
assert osm == "{'first': 1, 'second': 2, 'third': 3}"
assert nsm == "{'first': 1, 'third': 3}"
}
@@ -683,7 +683,7 @@ fn test_rune_keys() {
m[`@`] = 7
assert m.len == 3
println(m)
- assert '$m' == '{`!`: 2, `%`: 3, `@`: 7}'
+ assert '${m}' == '{`!`: 2, `%`: 3, `@`: 7}'
mut a := []rune{}
for k, v in m {
@@ -777,7 +777,7 @@ fn test_map_assign_empty_map_init() {
a = {}
println(a)
assert a == map[string]int{}
- assert '$a' == '{}'
+ assert '${a}' == '{}'
}
fn test_in_map_literal() {
@@ -972,13 +972,13 @@ fn test_map_set_fixed_array_variable() {
mut m := map[string][2]f64{}
m['A'] = [1.1, 2.2]!
println(m)
- assert '$m' == "{'A': [1.1, 2.2]}"
+ assert '${m}' == "{'A': [1.1, 2.2]}"
mut m2 := map[string][2]f64{}
arr := [1.1, 2.2]!
m2['A'] = arr
println(m2)
- assert '$m2' == "{'A': [1.1, 2.2]}"
+ assert '${m2}' == "{'A': [1.1, 2.2]}"
}
type Map = map[int]int
diff --git a/vlib/builtin/option.v b/vlib/builtin/option.v
index 8b684fa90c..22fb665923 100644
--- a/vlib/builtin/option.v
+++ b/vlib/builtin/option.v
@@ -30,7 +30,7 @@ pub fn (err IError) str() string {
// TODO: remove once deprecation period for `IError` methods has ended
// old_error_style := unsafe { voidptr(&err.msg) != voidptr(&err.code) } // if fields are not defined (new style) they don't have an offset between
// <<
- '$err.type_name(): $err.msg()'
+ '${err.type_name()}: ${err.msg()}'
}
}
}
@@ -56,7 +56,7 @@ pub:
// msg returns the message of MessageError
pub fn (err MessageError) msg() string {
if err.code > 0 {
- return '$err.msg; code: $err.code'
+ return '${err.msg}; code: ${err.code}'
}
return err.msg
}
@@ -83,7 +83,7 @@ fn (_ None__) str() string {
[if trace_error ?]
fn trace_error(x string) {
- eprintln('> ${@FN} | $x')
+ eprintln('> ${@FN} | ${x}')
}
// error returns a default error instance containing the error given in `message`.
@@ -100,7 +100,7 @@ pub fn error(message string) IError {
// Example: if ouch { return error_with_code('an error occurred', 1) }
[inline]
pub fn error_with_code(message string, code int) IError {
- trace_error('$message | code: $code')
+ trace_error('${message} | code: ${code}')
return &MessageError{
msg: message
code: code
diff --git a/vlib/builtin/prealloc.c.v b/vlib/builtin/prealloc.c.v
index 9c399a2b7d..0586f85936 100644
--- a/vlib/builtin/prealloc.c.v
+++ b/vlib/builtin/prealloc.c.v
@@ -82,10 +82,10 @@ fn prealloc_vcleanup() {
mut mb := g_memory_block
for unsafe { mb != 0 } {
nr_mallocs += mb.mallocs
- eprintln('> freeing mb.id: ${mb.id:3} | cap: ${mb.cap:7} | rem: ${mb.remaining:7} | start: ${voidptr(mb.start)} | current: ${voidptr(mb.current)} | diff: ${u64(mb.current) - u64(mb.start):7} bytes | mallocs: $mb.mallocs')
+ eprintln('> freeing mb.id: ${mb.id:3} | cap: ${mb.cap:7} | rem: ${mb.remaining:7} | start: ${voidptr(mb.start)} | current: ${voidptr(mb.current)} | diff: ${u64(mb.current) - u64(mb.start):7} bytes | mallocs: ${mb.mallocs}')
mb = mb.previous
}
- eprintln('> nr_mallocs: $nr_mallocs')
+ eprintln('> nr_mallocs: ${nr_mallocs}')
}
unsafe {
for g_memory_block != 0 {
diff --git a/vlib/builtin/rune.v b/vlib/builtin/rune.v
index 4fdb538285..4afcbe6ccc 100644
--- a/vlib/builtin/rune.v
+++ b/vlib/builtin/rune.v
@@ -43,7 +43,7 @@ pub fn (ra []rune) string() string {
// repeat returns a new string with `count` number of copies of the rune it was called on.
pub fn (c rune) repeat(count int) string {
if count < 0 {
- panic('rune.repeat: count is negative: $count')
+ panic('rune.repeat: count is negative: ${count}')
} else if count == 0 {
return ''
} else if count == 1 {
diff --git a/vlib/builtin/sorting_test.v b/vlib/builtin/sorting_test.v
index 08a6c2bcb7..26415ac635 100644
--- a/vlib/builtin/sorting_test.v
+++ b/vlib/builtin/sorting_test.v
@@ -7,14 +7,14 @@ const (
fn test_sorting_simple() {
mut a := unsorted.clone()
a.sort()
- println(' a: $a')
+ println(' a: ${a}')
assert a == sorted_asc
}
fn test_sorting_with_condition_expression() {
mut a := unsorted.clone()
a.sort(a > b)
- println(' a: $a')
+ println(' a: ${a}')
assert a == sorted_desc
}
@@ -44,7 +44,7 @@ fn mysort(mut a []int) {
fn test_sorting_by_passing_a_mut_array_to_a_function() {
mut a := unsorted.clone()
mysort(mut a)
- println(' a: $a')
+ println(' a: ${a}')
assert a == sorted_asc
}
@@ -59,10 +59,10 @@ fn test_sorting_by_passing_an_anonymous_sorting_function() {
fn test_sorting_u64s() {
mut a := [u64(3), 2, 1, 9, 0, 8]
a.sort()
- println(' a: $a')
+ println(' a: ${a}')
assert a == [u64(0), 1, 2, 3, 8, 9]
a.sort(a > b)
- println(' a: $a')
+ println(' a: ${a}')
assert a == [u64(9), 8, 3, 2, 1, 0]
}
diff --git a/vlib/builtin/string.v b/vlib/builtin/string.v
index 3e81a4f774..452267708e 100644
--- a/vlib/builtin/string.v
+++ b/vlib/builtin/string.v
@@ -875,7 +875,7 @@ fn (s string) substr2(start int, _end int, end_max bool) string {
pub fn (s string) substr(start int, end int) string {
$if !no_bounds_checking {
if start > end || start > s.len || end > s.len || start < 0 || end < 0 {
- panic('substr($start, $end) out of bounds (len=$s.len)')
+ panic('substr(${start}, ${end}) out of bounds (len=${s.len})')
}
}
len := end - start
@@ -902,7 +902,7 @@ pub fn (s string) substr(start int, end int) string {
[direct_array_access]
pub fn (s string) substr_with_check(start int, end int) ?string {
if start > end || start > s.len || end > s.len || start < 0 || end < 0 {
- return error('substr($start, $end) out of bounds (len=$s.len)')
+ return error('substr(${start}, ${end}) out of bounds (len=${s.len})')
}
len := end - start
if len == s.len {
@@ -1584,7 +1584,7 @@ pub fn (s string) str() string {
fn (s string) at(idx int) byte {
$if !no_bounds_checking {
if idx < 0 || idx >= s.len {
- panic('string index out of range: $idx / $s.len')
+ panic('string index out of range: ${idx} / ${s.len}')
}
}
unsafe {
@@ -1881,7 +1881,7 @@ pub fn (s string) bytes() []u8 {
[direct_array_access]
pub fn (s string) repeat(count int) string {
if count < 0 {
- panic('string.repeat: count is negative: $count')
+ panic('string.repeat: count is negative: ${count}')
} else if count == 0 {
return ''
} else if count == 1 {
diff --git a/vlib/builtin/string_interpolation.v b/vlib/builtin/string_interpolation.v
index 5db89a8eef..00995d1750 100644
--- a/vlib/builtin/string_interpolation.v
+++ b/vlib/builtin/string_interpolation.v
@@ -687,22 +687,22 @@ pub const (
[inline]
pub fn str_intp_sq(in_str string) string {
- return 'str_intp(2, _MOV((StrIntpData[]){{_SLIT("\'"), $si_s_code, {.d_s = $in_str}},{_SLIT("\'"), 0, {.d_c = 0 }}}))'
+ return 'str_intp(2, _MOV((StrIntpData[]){{_SLIT("\'"), ${si_s_code}, {.d_s = ${in_str}}},{_SLIT("\'"), 0, {.d_c = 0 }}}))'
}
[inline]
pub fn str_intp_rune(in_str string) string {
- return 'str_intp(2, _MOV((StrIntpData[]){{_SLIT("\`"), $si_s_code, {.d_s = $in_str}},{_SLIT("\`"), 0, {.d_c = 0 }}}))'
+ return 'str_intp(2, _MOV((StrIntpData[]){{_SLIT("\`"), ${si_s_code}, {.d_s = ${in_str}}},{_SLIT("\`"), 0, {.d_c = 0 }}}))'
}
[inline]
pub fn str_intp_g32(in_str string) string {
- return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, $si_g32_code, {.d_f32 = $in_str }}}))'
+ return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${si_g32_code}, {.d_f32 = ${in_str} }}}))'
}
[inline]
pub fn str_intp_g64(in_str string) string {
- return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, $si_g64_code, {.d_f64 = $in_str }}}))'
+ return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${si_g64_code}, {.d_f64 = ${in_str} }}}))'
}
// replace %% with the in_str
@@ -718,12 +718,12 @@ pub fn str_intp_sub(base_str string, in_str string) string {
st_str := base_str[..index]
if index + 2 < base_str.len {
en_str := base_str[index + 2..]
- res_str := 'str_intp(2, _MOV((StrIntpData[]){{_SLIT("$st_str"), $si_s_code, {.d_s = $in_str }},{_SLIT("$en_str"), 0, {.d_c = 0}}}))'
+ res_str := 'str_intp(2, _MOV((StrIntpData[]){{_SLIT("${st_str}"), ${si_s_code}, {.d_s = ${in_str} }},{_SLIT("${en_str}"), 0, {.d_c = 0}}}))'
st_str.free()
en_str.free()
return res_str
}
- res2_str := 'str_intp(1, _MOV((StrIntpData[]){{_SLIT("$st_str"), $si_s_code, {.d_s = $in_str }}}))'
+ res2_str := 'str_intp(1, _MOV((StrIntpData[]){{_SLIT("${st_str}"), ${si_s_code}, {.d_s = ${in_str} }}}))'
st_str.free()
return res2_str
}
diff --git a/vlib/builtin/string_test.v b/vlib/builtin/string_test.v
index 04dbbc12fa..6697d53463 100644
--- a/vlib/builtin/string_test.v
+++ b/vlib/builtin/string_test.v
@@ -805,7 +805,7 @@ fn test_raw() {
lines := raw.split('\n')
println(lines)
assert lines.len == 1
- println('raw string: "$raw"')
+ println('raw string: "${raw}"')
raw2 := r'Hello V\0'
assert raw2[7] == `\\`
@@ -827,8 +827,8 @@ fn test_raw_with_quotes() {
fn test_escape() {
a := 10
- println("\"$a")
- assert "\"$a" == '"10'
+ println("\"${a}")
+ assert "\"${a}" == '"10'
}
fn test_atoi() {
@@ -852,9 +852,9 @@ fn test_raw_inter() {
fn test_c_r() {
// This used to break because of r'' and c''
c := 42
- println('$c')
+ println('${c}')
r := 50
- println('$r')
+ println('${r}')
}
fn test_inter_before_comptime_if() {
@@ -869,9 +869,9 @@ fn test_inter_before_comptime_if() {
fn test_double_quote_inter() {
a := 1
b := 2
- println('$a $b')
- assert '$a $b' == '1 2'
- assert '$a $b' == '1 2'
+ println('${a} ${b}')
+ assert '${a} ${b}' == '1 2'
+ assert '${a} ${b}' == '1 2'
}
fn foo(b u8) u8 {
@@ -884,7 +884,7 @@ fn filter(b u8) bool {
fn test_split_into_lines() {
line_content := 'Line'
- text_crlf := '$line_content\r\n$line_content\r\n$line_content'
+ text_crlf := '${line_content}\r\n${line_content}\r\n${line_content}'
lines_crlf := text_crlf.split_into_lines()
assert lines_crlf.len == 3
@@ -892,7 +892,7 @@ fn test_split_into_lines() {
assert line == line_content
}
- text_lf := '$line_content\n$line_content\n$line_content'
+ text_lf := '${line_content}\n${line_content}\n${line_content}'
lines_lf := text_lf.split_into_lines()
assert lines_lf.len == 3
@@ -965,24 +965,24 @@ fn test_interpolation_after_quoted_variable_still_works() {
tt := 'xyz'
// Basic interpolation, no internal quotes
- yy := 'Replacing $rr with $tt'
+ yy := 'Replacing ${rr} with ${tt}'
assert yy == 'Replacing abc with xyz'
// Interpolation after quoted variable ending with 'r'quote
// that may be mistaken with the start of a raw string,
// ensure that it is not.
- ss := 'Replacing "$rr" with "$tt"'
+ ss := 'Replacing "${rr}" with "${tt}"'
assert ss == 'Replacing "abc" with "xyz"'
- zz := "Replacing '$rr' with '$tt'"
+ zz := "Replacing '${rr}' with '${tt}'"
assert zz == "Replacing 'abc' with 'xyz'"
// Interpolation after quoted variable ending with 'c'quote
// may be mistaken with the start of a c string, so
// check it is not.
cc := 'abc'
- ccc := "Replacing '$cc' with '$tt'"
+ ccc := "Replacing '${cc}' with '${tt}'"
assert ccc == "Replacing 'abc' with 'xyz'"
- cccq := 'Replacing "$cc" with "$tt"'
+ cccq := 'Replacing "${cc}" with "${tt}"'
assert cccq == 'Replacing "abc" with "xyz"'
}
diff --git a/vlib/builtin/utf8_test.v b/vlib/builtin/utf8_test.v
index 769b4818ca..d961800b93 100644
--- a/vlib/builtin/utf8_test.v
+++ b/vlib/builtin/utf8_test.v
@@ -12,10 +12,10 @@ fn test_utf8_wide_char() {
}
r := `✔`
s := '✔'
- println('r: $r')
- println('s: $s')
+ println('r: ${r}')
+ println('s: ${s}')
rstr := r.str()
- println('rstr: $rstr')
+ println('rstr: ${rstr}')
assert utf8_char_len(r) == 1
assert utf8_char_len(s[0]) == 3
assert s == rstr
diff --git a/vlib/cli/command.v b/vlib/cli/command.v
index 877e7f26bc..cdd760e893 100644
--- a/vlib/cli/command.v
+++ b/vlib/cli/command.v
@@ -39,29 +39,29 @@ pub mut:
pub fn (cmd Command) str() string {
mut res := []string{}
res << 'Command{'
- res << ' name: "$cmd.name"'
- res << ' usage: "$cmd.usage"'
- res << ' version: "$cmd.version"'
- res << ' description: "$cmd.description"'
- res << ' man_description: "$cmd.man_description"'
- res << ' disable_help: $cmd.disable_help'
- res << ' disable_man: $cmd.disable_man'
- res << ' disable_flags: $cmd.disable_flags'
- res << ' disable_version: $cmd.disable_version'
- res << ' sort_flags: $cmd.sort_flags'
- res << ' sort_commands: $cmd.sort_commands'
- res << ' cb execute: $cmd.execute'
- res << ' cb pre_execute: $cmd.pre_execute'
- res << ' cb post_execute: $cmd.post_execute'
+ res << ' name: "${cmd.name}"'
+ res << ' usage: "${cmd.usage}"'
+ res << ' version: "${cmd.version}"'
+ res << ' description: "${cmd.description}"'
+ res << ' man_description: "${cmd.man_description}"'
+ res << ' disable_help: ${cmd.disable_help}'
+ res << ' disable_man: ${cmd.disable_man}'
+ res << ' disable_flags: ${cmd.disable_flags}'
+ res << ' disable_version: ${cmd.disable_version}'
+ res << ' sort_flags: ${cmd.sort_flags}'
+ res << ' sort_commands: ${cmd.sort_commands}'
+ res << ' cb execute: ${cmd.execute}'
+ res << ' cb pre_execute: ${cmd.pre_execute}'
+ res << ' cb post_execute: ${cmd.post_execute}'
if unsafe { cmd.parent == 0 } {
res << ' parent: &Command(0)'
} else {
- res << ' parent: &Command{$cmd.parent.name ...}'
+ res << ' parent: &Command{${cmd.parent.name} ...}'
}
- res << ' commands: $cmd.commands'
- res << ' flags: $cmd.flags'
- res << ' required_args: $cmd.required_args'
- res << ' args: $cmd.args'
+ res << ' commands: ${cmd.commands}'
+ res << ' flags: ${cmd.flags}'
+ res << ' required_args: ${cmd.required_args}'
+ res << ' args: ${cmd.args}'
res << '}'
return res.join('\n')
}
@@ -84,7 +84,7 @@ pub fn (cmd Command) full_name() string {
if cmd.is_root() {
return cmd.name
}
- return cmd.parent.full_name() + ' $cmd.name'
+ return cmd.parent.full_name() + ' ${cmd.name}'
}
// add_commands adds the `commands` array of `Command`s as sub-commands.
@@ -98,7 +98,7 @@ pub fn (mut cmd Command) add_commands(commands []Command) {
pub fn (mut cmd Command) add_command(command Command) {
mut subcmd := command
if cmd.commands.contains(subcmd.name) {
- eprintln_exit('Command with the name `$subcmd.name` already exists')
+ eprintln_exit('Command with the name `${subcmd.name}` already exists')
}
subcmd.parent = unsafe { cmd }
cmd.commands << subcmd
@@ -124,7 +124,7 @@ pub fn (mut cmd Command) add_flags(flags []Flag) {
// add_flag adds `flag` to this `Command`.
pub fn (mut cmd Command) add_flag(flag Flag) {
if cmd.flags.contains(flag.name) {
- eprintln_exit('Flag with the name `$flag.name` already exists')
+ eprintln_exit('Flag with the name `${flag.name}` already exists')
}
cmd.flags << flag
}
@@ -191,14 +191,14 @@ fn (mut cmd Command) parse_flags() {
found = true
flag.found = true
cmd.args = flag.parse(cmd.args, cmd.posix_mode) or {
- eprintln_exit('Failed to parse flag `${cmd.args[0]}`: $err')
+ eprintln_exit('Failed to parse flag `${cmd.args[0]}`: ${err}')
}
break
}
}
}
if !found {
- eprintln_exit('Command `$cmd.name` has no flag `${cmd.args[0]}`')
+ eprintln_exit('Command `${cmd.name}` has no flag `${cmd.args[0]}`')
}
}
}
@@ -230,7 +230,7 @@ fn (mut cmd Command) parse_commands() {
// if no further command was found, execute current command
if cmd.required_args > 0 {
if cmd.required_args > cmd.args.len {
- eprintln_exit('Command `$cmd.name` needs at least $cmd.required_args arguments')
+ eprintln_exit('Command `${cmd.name}` needs at least ${cmd.required_args} arguments')
}
}
cmd.check_required_flags()
@@ -243,8 +243,8 @@ fn (mut cmd Command) parse_commands() {
fn (mut cmd Command) handle_cb(cb FnCommandCallback, label string) {
if !isnil(cb) {
cb(*cmd) or {
- label_message := term.ecolorize(term.bright_red, 'cli $label error:')
- eprintln_exit('$label_message $err')
+ label_message := term.ecolorize(term.bright_red, 'cli ${label} error:')
+ eprintln_exit('${label_message} ${err}')
}
}
}
@@ -284,7 +284,7 @@ fn (cmd Command) check_required_flags() {
for flag in cmd.flags {
if flag.required && flag.value.len == 0 {
full_name := cmd.full_name()
- eprintln_exit('Flag `$flag.name` is required by `$full_name`')
+ eprintln_exit('Flag `${flag.name}` is required by `${full_name}`')
}
}
}
@@ -317,7 +317,7 @@ fn (cmds []Command) get(name string) !Command {
return cmd
}
}
- return error('Command `$name` not found in $cmds')
+ return error('Command `${name}` not found in ${cmds}')
}
fn (cmds []Command) contains(name string) bool {
diff --git a/vlib/cli/flag.v b/vlib/cli/flag.v
index 4570443f44..1419353a74 100644
--- a/vlib/cli/flag.v
+++ b/vlib/cli/flag.v
@@ -44,7 +44,7 @@ pub fn (flags []Flag) get_all_found() []Flag {
// get_bool returns an error if the `FlagType` is not boolean.
pub fn (flag Flag) get_bool() !bool {
if flag.flag != .bool {
- return error('$flag.name: Invalid flag type `$flag.flag`, expected `bool`')
+ return error('${flag.name}: Invalid flag type `${flag.flag}`, expected `bool`')
}
val := flag.get_value_or_default_value()
@@ -63,7 +63,7 @@ pub fn (flags []Flag) get_bool(name string) !bool {
// get_int returns an error if the `FlagType` is not integer.
pub fn (flag Flag) get_int() !int {
if flag.flag != .int {
- return error('$flag.name: Invalid flag type `$flag.flag`, expected `int`')
+ return error('${flag.name}: Invalid flag type `${flag.flag}`, expected `int`')
}
val := flag.get_value_or_default_value()
@@ -79,7 +79,7 @@ pub fn (flag Flag) get_int() !int {
// get_ints returns an error if the `FlagType` is not integer.
pub fn (flag Flag) get_ints() ![]int {
if flag.flag != .int_array {
- return error('$flag.name: Invalid flag type `$flag.flag`, expected `int_array`')
+ return error('${flag.name}: Invalid flag type `${flag.flag}`, expected `int_array`')
}
val := flag.get_value_or_default_value()
@@ -115,7 +115,7 @@ pub fn (flags []Flag) get_ints(name string) ![]int {
// get_float returns an error if the `FlagType` is not floating point.
pub fn (flag Flag) get_float() !f64 {
if flag.flag != .float {
- return error('$flag.name: Invalid flag type `$flag.flag`, expected `float`')
+ return error('${flag.name}: Invalid flag type `${flag.flag}`, expected `float`')
}
val := flag.get_value_or_default_value()
@@ -131,7 +131,7 @@ pub fn (flag Flag) get_float() !f64 {
// get_floats returns an error if the `FlagType` is not floating point.
pub fn (flag Flag) get_floats() ![]f64 {
if flag.flag != .float_array {
- return error('$flag.name: Invalid flag type `$flag.flag`, expected `float_array`')
+ return error('${flag.name}: Invalid flag type `${flag.flag}`, expected `float_array`')
}
val := flag.get_value_or_default_value()
@@ -167,7 +167,7 @@ pub fn (flags []Flag) get_floats(name string) ![]f64 {
// get_string returns an error if the `FlagType` is not string.
pub fn (flag Flag) get_string() !string {
if flag.flag != .string {
- return error('$flag.name: Invalid flag type `$flag.flag`, expected `string`')
+ return error('${flag.name}: Invalid flag type `${flag.flag}`, expected `string`')
}
val := flag.get_value_or_default_value()
@@ -183,7 +183,7 @@ pub fn (flag Flag) get_string() !string {
// get_strings returns an error if the `FlagType` is not string.
pub fn (flag Flag) get_strings() ![]string {
if flag.flag != .string_array {
- return error('$flag.name: Invalid flag type `$flag.flag`, expected `string_array`')
+ return error('${flag.name}: Invalid flag type `${flag.flag}`, expected `string_array`')
}
val := flag.get_value_or_default_value()
@@ -219,7 +219,7 @@ fn (mut flag Flag) parse(args []string, posix_mode bool) ![]string {
} else {
if flag.value.len > 0 && flag.flag != .int_array && flag.flag != .float_array
&& flag.flag != .string_array {
- return error('The argument `$flag.name` accept only one value!')
+ return error('The argument `${flag.name}` accept only one value!')
}
new_args := flag.parse_raw(args)!
@@ -233,10 +233,10 @@ fn (mut flag Flag) parse(args []string, posix_mode bool) ![]string {
// matches returns `true` if first arg in `args` matches this flag.
fn (mut flag Flag) matches(args []string, posix_mode bool) bool {
prefix := if posix_mode { '--' } else { '-' }
- return (flag.name != '' && args[0] == '$prefix$flag.name')
- || (flag.name != '' && args[0].starts_with('$prefix$flag.name='))
- || (flag.abbrev != '' && args[0] == '-$flag.abbrev')
- || (flag.abbrev != '' && args[0].starts_with('-$flag.abbrev='))
+ return (flag.name != '' && args[0] == '${prefix}${flag.name}')
+ || (flag.name != '' && args[0].starts_with('${prefix}${flag.name}='))
+ || (flag.abbrev != '' && args[0] == '-${flag.abbrev}')
+ || (flag.abbrev != '' && args[0].starts_with('-${flag.abbrev}='))
}
fn (mut flag Flag) parse_raw(args []string) ![]string {
@@ -247,7 +247,7 @@ fn (mut flag Flag) parse_raw(args []string) ![]string {
flag.value << args[1]
return args[2..]
}
- return error('Missing argument for `$flag.name`')
+ return error('Missing argument for `${flag.name}`')
}
fn (mut flag Flag) parse_bool(args []string) ![]string {
@@ -273,7 +273,7 @@ fn (flags []Flag) get(name string) !Flag {
return flag
}
}
- return error('Flag `$name` not found in $flags')
+ return error('Flag `${name}` not found in ${flags}')
}
fn (flags []Flag) contains(name string) bool {
diff --git a/vlib/cli/help.v b/vlib/cli/help.v
index e6437ca620..a41aa7891a 100644
--- a/vlib/cli/help.v
+++ b/vlib/cli/help.v
@@ -43,7 +43,7 @@ pub fn print_help_for_command(help_cmd Command) ! {
}
if !found {
args := help_cmd.args.join(' ')
- println('Invalid command: $args')
+ println('Invalid command: ${args}')
return
}
}
@@ -58,7 +58,7 @@ pub fn print_help_for_command(help_cmd Command) ! {
// help_message returns a generated help message as a `string` for the `Command`.
pub fn (cmd Command) help_message() string {
mut help := ''
- help += 'Usage: $cmd.full_name()'
+ help += 'Usage: ${cmd.full_name()}'
if cmd.flags.len > 0 {
help += ' [flags]'
}
@@ -66,15 +66,15 @@ pub fn (cmd Command) help_message() string {
help += ' [commands]'
}
if cmd.usage.len > 0 {
- help += ' $cmd.usage'
+ help += ' ${cmd.usage}'
} else {
for i in 0 .. cmd.required_args {
- help += ' '
+ help += ' '
}
}
help += '\n'
if cmd.description != '' {
- help += '\n$cmd.description\n'
+ help += '\n${cmd.description}\n'
}
mut abbrev_len := 0
mut name_len := cli.min_description_indent_len
@@ -106,10 +106,10 @@ pub fn (cmd Command) help_message() string {
prefix := if cmd.posix_mode { '--' } else { '-' }
if flag.abbrev != '' {
abbrev_indent := ' '.repeat(abbrev_len - flag.abbrev.len - 1) // - 1 for '-' in front
- flag_name = '-$flag.abbrev$abbrev_indent$prefix$flag.name'
+ flag_name = '-${flag.abbrev}${abbrev_indent}${prefix}${flag.name}'
} else {
abbrev_indent := ' '.repeat(abbrev_len)
- flag_name = '$abbrev_indent$prefix$flag.name'
+ flag_name = '${abbrev_indent}${prefix}${flag.name}'
}
mut required := ''
if flag.required {
@@ -117,7 +117,7 @@ pub fn (cmd Command) help_message() string {
}
base_indent := ' '.repeat(cli.base_indent_len)
description_indent := ' '.repeat(name_len - flag_name.len)
- help += '$base_indent$flag_name$description_indent' +
+ help += '${base_indent}${flag_name}${description_indent}' +
pretty_description(flag.description + required, cli.base_indent_len + name_len) +
'\n'
}
@@ -127,7 +127,7 @@ pub fn (cmd Command) help_message() string {
for command in cmd.commands {
base_indent := ' '.repeat(cli.base_indent_len)
description_indent := ' '.repeat(name_len - command.name.len)
- help += '$base_indent$command.name$description_indent' +
+ help += '${base_indent}${command.name}${description_indent}' +
pretty_description(command.description, name_len) + '\n'
}
}
@@ -148,7 +148,7 @@ fn pretty_description(s string, indent_len int) string {
mut acc := strings.new_builder(((s.len / chars_per_line) + 1) * (width + 1))
for k, line in s.split('\n') {
if k != 0 {
- acc.write_string('\n$indent')
+ acc.write_string('\n${indent}')
}
mut i := chars_per_line - 2
mut j := 0
diff --git a/vlib/cli/man.v b/vlib/cli/man.v
index ad490adea3..8f2a834303 100644
--- a/vlib/cli/man.v
+++ b/vlib/cli/man.v
@@ -35,7 +35,7 @@ pub fn print_manpage_for_command(man_cmd Command) ! {
}
if !found {
args := man_cmd.args.join(' ')
- println('Invalid command: $args')
+ println('Invalid command: ${args}')
return
}
}
@@ -52,9 +52,9 @@ pub fn print_manpage_for_command(man_cmd Command) ! {
pub fn (cmd Command) manpage() string {
mut mdoc := '.Dd ${time.now().strftime('%B %d, %Y')}\n'
mdoc += '.Dt ${cmd.full_name().replace(' ', '-').to_upper()} 1\n'
- mdoc += '.Os\n.Sh NAME\n.Nm ${cmd.full_name().replace(' ', '-')}\n.Nd $cmd.description\n'
+ mdoc += '.Os\n.Sh NAME\n.Nm ${cmd.full_name().replace(' ', '-')}\n.Nd ${cmd.description}\n'
mdoc += '.Sh SYNOPSIS\n'
- mdoc += '.Nm $cmd.root().name\n'
+ mdoc += '.Nm ${cmd.root().name}\n'
if unsafe { cmd.parent != 0 } {
mut parents := []Command{}
if !cmd.parent.is_root() {
@@ -68,20 +68,20 @@ pub fn (cmd Command) manpage() string {
}
}
for c in parents {
- mdoc += '.Ar $c.name\n'
+ mdoc += '.Ar ${c.name}\n'
}
}
- mdoc += '.Ar $cmd.name\n'
+ mdoc += '.Ar ${cmd.name}\n'
}
for flag in cmd.flags {
mdoc += '.Op'
if flag.abbrev != '' {
- mdoc += ' Fl $flag.abbrev'
+ mdoc += ' Fl ${flag.abbrev}'
} else {
if cmd.posix_mode {
- mdoc += ' Fl -$flag.name'
+ mdoc += ' Fl -${flag.name}'
} else {
- mdoc += ' Fl $flag.name'
+ mdoc += ' Fl ${flag.name}'
}
}
match flag.flag {
@@ -92,10 +92,10 @@ pub fn (cmd Command) manpage() string {
mdoc += '\n'
}
for i in 0 .. cmd.required_args {
- mdoc += '.Ar arg$i\n'
+ mdoc += '.Ar arg${i}\n'
}
if cmd.commands.len > 0 {
- mdoc += '.Nm $cmd.root().name\n'
+ mdoc += '.Nm ${cmd.root().name}\n'
if unsafe { cmd.parent != 0 } {
mut parents := []Command{}
if !cmd.parent.is_root() {
@@ -109,19 +109,19 @@ pub fn (cmd Command) manpage() string {
}
}
for c in parents {
- mdoc += '.Ar $c.name\n'
+ mdoc += '.Ar ${c.name}\n'
}
}
- mdoc += '.Ar $cmd.name\n'
+ mdoc += '.Ar ${cmd.name}\n'
}
mdoc += '.Ar subcommand\n'
}
mdoc += '.Sh DESCRIPTION\n'
if cmd.man_description != '' {
- mdoc += '$cmd.man_description\n'
+ mdoc += '${cmd.man_description}\n'
} else if cmd.description != '' {
- mdoc += '$cmd.description\n'
+ mdoc += '${cmd.description}\n'
}
if cmd.flags.len > 0 {
mdoc += '.Pp\nThe options are as follows:\n'
@@ -129,16 +129,16 @@ pub fn (cmd Command) manpage() string {
for flag in cmd.flags {
mdoc += '.It'
if flag.abbrev != '' {
- mdoc += ' Fl $flag.abbrev'
+ mdoc += ' Fl ${flag.abbrev}'
}
if cmd.posix_mode {
- mdoc += ' Fl -$flag.name'
+ mdoc += ' Fl -${flag.name}'
} else {
- mdoc += ' Fl $flag.name'
+ mdoc += ' Fl ${flag.name}'
}
mdoc += '\n'
if flag.description != '' {
- mdoc += '$flag.description\n'
+ mdoc += '${flag.description}\n'
}
}
mdoc += '.El\n'
@@ -147,9 +147,9 @@ pub fn (cmd Command) manpage() string {
mdoc += '.Pp\nThe subcommands are as follows:\n'
mdoc += '.Bl -tag -width indent\n'
for c in cmd.commands {
- mdoc += '.It Cm $c.name\n'
+ mdoc += '.It Cm ${c.name}\n'
if c.description != '' {
- mdoc += '$c.description\n'
+ mdoc += '${c.description}\n'
}
}
mdoc += '.El\n'
@@ -167,7 +167,7 @@ pub fn (cmd Command) manpage() string {
cmds.sort()
mut i := 1
for c in cmds {
- mdoc += '.Xr $c 1'
+ mdoc += '.Xr ${c} 1'
if i == cmds.len {
mdoc += '\n'
} else {
diff --git a/vlib/cli/version.v b/vlib/cli/version.v
index b6a7c62eb5..579bead89b 100644
--- a/vlib/cli/version.v
+++ b/vlib/cli/version.v
@@ -20,6 +20,6 @@ fn version_cmd() Command {
fn version_func(version_cmd Command) ! {
cmd := version_cmd.parent
- version := '$cmd.name version $cmd.version'
+ version := '${cmd.name} version ${cmd.version}'
println(version)
}
diff --git a/vlib/compress/compress.v b/vlib/compress/compress.v
index 78e13b4b9d..aa7351701b 100644
--- a/vlib/compress/compress.v
+++ b/vlib/compress/compress.v
@@ -13,7 +13,7 @@ fn C.tinfl_decompress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_
[manualfree]
pub fn compress(data []u8, flags int) ![]u8 {
if u64(data.len) > compress.max_size {
- return error('data too large ($data.len > $compress.max_size)')
+ return error('data too large (${data.len} > ${compress.max_size})')
}
mut out_len := usize(0)
@@ -22,7 +22,7 @@ pub fn compress(data []u8, flags int) ![]u8 {
return error('compression failed')
}
if u64(out_len) > compress.max_size {
- return error('compressed data is too large ($out_len > $compress.max_size)')
+ return error('compressed data is too large (${out_len} > ${compress.max_size})')
}
return unsafe { address.vbytes(int(out_len)) }
}
@@ -38,7 +38,7 @@ pub fn decompress(data []u8, flags int) ![]u8 {
return error('decompression failed')
}
if u64(out_len) > compress.max_size {
- return error('decompressed data is too large ($out_len > $compress.max_size)')
+ return error('decompressed data is too large (${out_len} > ${compress.max_size})')
}
return unsafe { address.vbytes(int(out_len)) }
}
diff --git a/vlib/compress/gzip/gzip.v b/vlib/compress/gzip/gzip.v
index cc1fabc8a9..c8cfd25c04 100644
--- a/vlib/compress/gzip/gzip.v
+++ b/vlib/compress/gzip/gzip.v
@@ -110,7 +110,7 @@ pub fn decompress(data []u8, params DecompressParams) ![]u8 {
decompressed := compress.decompress(data[header_length..data.len - 8], 0)!
length_expected := (u32(data[data.len - 4]) << 24) | (u32(data[data.len - 3]) << 16) | (u32(data[data.len - 2]) << 8) | data[data.len - 1]
if params.verify_length && decompressed.len != length_expected {
- return error('length verification failed, got $decompressed.len, expected $length_expected')
+ return error('length verification failed, got ${decompressed.len}, expected ${length_expected}')
}
checksum := crc32.sum(decompressed)
checksum_expected := (u32(data[data.len - 8]) << 24) | (u32(data[data.len - 7]) << 16) | (u32(data[data.len - 6]) << 8) | data[data.len - 5]
diff --git a/vlib/context/README.md b/vlib/context/README.md
index 548eab3c60..f3ae603d43 100644
--- a/vlib/context/README.md
+++ b/vlib/context/README.md
@@ -73,7 +73,7 @@ fn main() {
ch := gen(mut ctx2)
for i in 0 .. 5 {
v := <-ch
- println('> received value: $v')
+ println('> received value: ${v}')
assert i == v
}
println('> main is finished here')
@@ -187,4 +187,4 @@ fn main() {
assert value == dump(f(ctx, key))
assert not_found_value == dump(f(ctx, 'color'))
}
-```
\ No newline at end of file
+```
diff --git a/vlib/crypto/README.md b/vlib/crypto/README.md
index 610f83fe54..32e8894352 100644
--- a/vlib/crypto/README.md
+++ b/vlib/crypto/README.md
@@ -23,7 +23,7 @@ import crypto.rand
fn main() {
// remember to save this key somewhere if you ever want to decrypt your data
key := rand.bytes(32)!
- println('KEY: $key')
+ println('KEY: ${key}')
// this data is one block (16 bytes) big
mut data := 'THIS IS THE DATA'.bytes()
@@ -70,17 +70,17 @@ fn main() {
token := make_token(secret)
ok := auth_verify(secret, token)
dt := sw.elapsed().microseconds()
- println('token: $token')
- println('auth_verify(secret, token): $ok')
- println('Elapsed time: $dt uS')
+ println('token: ${token}')
+ println('auth_verify(secret, token): ${ok}')
+ println('Elapsed time: ${dt} uS')
}
fn make_token(secret string) string {
header := base64.url_encode(json.encode(JwtHeader{'HS256', 'JWT'}).bytes())
payload := base64.url_encode(json.encode(JwtPayload{'1234567890', 'John Doe', 1516239022}).bytes())
- signature := base64.url_encode(hmac.new(secret.bytes(), '${header}.$payload'.bytes(),
+ signature := base64.url_encode(hmac.new(secret.bytes(), '${header}.${payload}'.bytes(),
sha256.sum, sha256.block_size))
- jwt := '${header}.${payload}.$signature'
+ jwt := '${header}.${payload}.${signature}'
return jwt
}
diff --git a/vlib/crypto/bcrypt/bcrypt.v b/vlib/crypto/bcrypt/bcrypt.v
index 82ad2875c9..519dc0229f 100644
--- a/vlib/crypto/bcrypt/bcrypt.v
+++ b/vlib/crypto/bcrypt/bcrypt.v
@@ -32,14 +32,14 @@ const magic_cipher_data = [u8(0x4f), 0x72, 0x70, 0x68, 0x65, 0x61, 0x6e, 0x42, 0
// generate_from_password return a bcrypt string from Hashed struct.
pub fn generate_from_password(password []u8, cost int) ?string {
- mut p := new_from_password(password, cost) or { return error('Error: $err') }
+ mut p := new_from_password(password, cost) or { return error('Error: ${err}') }
x := p.hash_u8()
return x.bytestr()
}
// compare_hash_and_password compares a bcrypt hashed password with its possible hashed version.
pub fn compare_hash_and_password(password []u8, hashed_password []u8) ? {
- mut p := new_from_hash(hashed_password) or { return error('Error: $err') }
+ mut p := new_from_hash(hashed_password) or { return error('Error: ${err}') }
p.salt << `=`
p.salt << `=`
other_hash := bcrypt(password, p.cost, p.salt) or { return error('err') }
@@ -169,7 +169,7 @@ fn (mut h Hashed) decode_version(sbytes []u8) ?int {
return error("bcrypt hashes must start with '$'")
}
if sbytes[1] != bcrypt.major_version[0] {
- return error('bcrypt algorithm version $bcrypt.major_version')
+ return error('bcrypt algorithm version ${bcrypt.major_version}')
}
h.major = sbytes[1].ascii_str()
mut n := 3
diff --git a/vlib/crypto/ed25519/ed25519.v b/vlib/crypto/ed25519/ed25519.v
index ba966705da..85943890e0 100644
--- a/vlib/crypto/ed25519/ed25519.v
+++ b/vlib/crypto/ed25519/ed25519.v
@@ -68,7 +68,7 @@ pub fn sign(privatekey PrivateKey, message []u8) ![]u8 {
fn sign_generic(mut signature []u8, privatekey []u8, message []u8) ! {
if privatekey.len != ed25519.private_key_size {
- panic('ed25519: bad private key length: $privatekey.len')
+ panic('ed25519: bad private key length: ${privatekey.len}')
}
seed, publickey := privatekey[..ed25519.seed_size], privatekey[ed25519.seed_size..]
@@ -110,7 +110,7 @@ fn sign_generic(mut signature []u8, privatekey []u8, message []u8) ! {
// verify reports whether sig is a valid signature of message by publickey.
pub fn verify(publickey PublicKey, message []u8, sig []u8) !bool {
if publickey.len != ed25519.public_key_size {
- return error('ed25519: bad public key length: $publickey.len')
+ return error('ed25519: bad public key length: ${publickey.len}')
}
if sig.len != ed25519.signature_size || sig[63] & 224 != 0 {
@@ -165,7 +165,7 @@ pub fn new_key_from_seed(seed []u8) PrivateKey {
fn new_key_from_seed_generic(mut privatekey []u8, seed []u8) {
if seed.len != ed25519.seed_size {
- panic('ed25519: bad seed length: $seed.len')
+ panic('ed25519: bad seed length: ${seed.len}')
}
mut h := sha512.sum512(seed)
diff --git a/vlib/crypto/ed25519/examples/example.v b/vlib/crypto/ed25519/examples/example.v
index 1f6d852c40..0343d64cb1 100644
--- a/vlib/crypto/ed25519/examples/example.v
+++ b/vlib/crypto/ed25519/examples/example.v
@@ -15,14 +15,14 @@ fn main() {
sig := ed25519.sign(priv, m)!
println('=== Message ===')
- println('Msg: $msg \nHash: $m')
+ println('Msg: ${msg} \nHash: ${m}')
println('=== Public key ===')
println('Public key (Hex): ${hex.encode(publ)}')
println(' Public key (Base64): ${base64.encode(publ)}')
println('=== Private key ===')
- println('Private key: $priv.seed().hex()') // priv[0:32]
+ println('Private key: ${priv.seed().hex()}') // priv[0:32]
println(' Private key (Base64): ${base64.encode(priv.seed())}') // priv[0:32]
println(' Private key (Base64) Full key: ${base64.encode(priv)}')
println(' Private key (Full key in Hex): ${hex.encode(priv)}')
@@ -34,7 +34,7 @@ fn main() {
rtn := ed25519.verify(publ, m, sig)!
if rtn {
- println('Signature verified :$rtn')
+ println('Signature verified :${rtn}')
} else {
println('signature does not verify :${!rtn}')
}
diff --git a/vlib/crypto/rand/rand_linux.c.v b/vlib/crypto/rand/rand_linux.c.v
index fafacd6094..e6393931b9 100644
--- a/vlib/crypto/rand/rand_linux.c.v
+++ b/vlib/crypto/rand/rand_linux.c.v
@@ -33,7 +33,7 @@ pub fn read(bytes_needed int) ![]u8 {
fn getrandom(bytes_needed int, buffer voidptr) int {
if bytes_needed > rand.read_batch_size {
- panic('getrandom() dont request more than $rand.read_batch_size bytes at once.')
+ panic('getrandom() dont request more than ${rand.read_batch_size} bytes at once.')
}
return unsafe { C.syscall(C.SYS_getrandom, buffer, bytes_needed, 0) }
}
diff --git a/vlib/crypto/rand/rand_solaris.c.v b/vlib/crypto/rand/rand_solaris.c.v
index 31bf6bf991..53ab02cb0f 100644
--- a/vlib/crypto/rand/rand_solaris.c.v
+++ b/vlib/crypto/rand/rand_solaris.c.v
@@ -36,7 +36,7 @@ pub fn read(bytes_needed int) ![]u8 {
fn v_getrandom(bytes_needed int, buffer voidptr) int {
if bytes_needed > rand.read_batch_size {
- panic('getrandom() dont request more than $rand.read_batch_size bytes at once.')
+ panic('getrandom() dont request more than ${rand.read_batch_size} bytes at once.')
}
return C.getrandom(buffer, bytes_needed, 0)
}
diff --git a/vlib/datatypes/fsm/fsm.v b/vlib/datatypes/fsm/fsm.v
index 10506b4612..700e50d84a 100644
--- a/vlib/datatypes/fsm/fsm.v
+++ b/vlib/datatypes/fsm/fsm.v
@@ -32,7 +32,7 @@ pub fn (mut s StateMachine) set_state(name string) ? {
if name in s.states {
s.current_state = name
}
- return error('unknown state: $name')
+ return error('unknown state: ${name}')
}
pub fn (mut s StateMachine) get_state() string {
diff --git a/vlib/datatypes/fsm/tools/fsm_graph.v b/vlib/datatypes/fsm/tools/fsm_graph.v
index 63ffc17f1f..897ae9a4c8 100644
--- a/vlib/datatypes/fsm/tools/fsm_graph.v
+++ b/vlib/datatypes/fsm/tools/fsm_graph.v
@@ -4,7 +4,8 @@ import flag
pub fn read_file(file string) ?[]string {
if os.is_file(file) {
text := os.read_lines(file) or {
- return error(@MOD + '.' + @STRUCT + '.' + @FN + ' Could not read "$file": "$err.msg()"')
+ return error(@MOD + '.' + @STRUCT + '.' + @FN +
+ ' Could not read "${file}": "${err.msg()}"')
}
return text
}
diff --git a/vlib/dl/dl.v b/vlib/dl/dl.v
index 3035fe8cdc..307a5bb051 100644
--- a/vlib/dl/dl.v
+++ b/vlib/dl/dl.v
@@ -23,7 +23,7 @@ pub fn get_shared_library_extension() string {
// shared libraries.
[inline]
pub fn get_libname(libname string) string {
- return '$libname$dl.dl_ext'
+ return '${libname}${dl.dl_ext}'
}
// open_opt - loads the dynamic shared object.
diff --git a/vlib/dl/dl_windows.c.v b/vlib/dl/dl_windows.c.v
index 8f3156fbcb..0a396387d9 100644
--- a/vlib/dl/dl_windows.c.v
+++ b/vlib/dl/dl_windows.c.v
@@ -34,5 +34,5 @@ pub fn dlerror() string {
// https://docs.microsoft.com/en-us/windows/win32/api/errhandlingapi/nf-errhandlingapi-getlasterror
// Unlike dlerror(), GetLastError returns just an error code, that is function specific.
cerr := int(C.GetLastError())
- return 'error code $cerr'
+ return 'error code ${cerr}'
}
diff --git a/vlib/encoding/base32/base32.v b/vlib/encoding/base32/base32.v
index de3d17caad..69113ecd96 100644
--- a/vlib/encoding/base32/base32.v
+++ b/vlib/encoding/base32/base32.v
@@ -331,5 +331,5 @@ fn strip_newlines(src []u8) []u8 {
fn corrupt_input_error_msg(e int) string {
// return error('illegal base32 data at input byte ' + strconv.FormatInt(int64(e), 10)
- return 'illegal base32 data at input byte $e'
+ return 'illegal base32 data at input byte ${e}'
}
diff --git a/vlib/encoding/base32/base32_test.v b/vlib/encoding/base32/base32_test.v
index f29d8760dd..c79f8a2e31 100644
--- a/vlib/encoding/base32/base32_test.v
+++ b/vlib/encoding/base32/base32_test.v
@@ -8,7 +8,7 @@ fn test_encode_and_decode() {
encoded := base32.encode_string_to_string(input)
assert encoded == 'NBSWY3DPEB3A===='
- decoded := base32.decode_string_to_string(encoded) or { panic('error decoding: $err') }
+ decoded := base32.decode_string_to_string(encoded) or { panic('error decoding: ${err}') }
assert decoded == input
encoder_no_padding := base32.new_std_encoding_with_padding(base32.no_padding)
@@ -16,7 +16,7 @@ fn test_encode_and_decode() {
assert encoded2 == 'NBSWY3DPEB3A'
decoded2 := encoder_no_padding.decode_string_to_string(encoded2) or {
- panic('error decoding: $err')
+ panic('error decoding: ${err}')
}
assert decoded2 == input
}
diff --git a/vlib/encoding/base58/base58.v b/vlib/encoding/base58/base58.v
index a37d8e3994..b0a05ec4c1 100644
--- a/vlib/encoding/base58/base58.v
+++ b/vlib/encoding/base58/base58.v
@@ -164,10 +164,10 @@ pub fn decode_walpha_bytes(input []u8, alphabet Alphabet) ![]u8 {
for _, r in input {
if r > 127 {
panic(@MOD + '.' + @FN +
- ': high-bit set on invalid digit; outside of ascii range ($r). This should never happen.')
+ ': high-bit set on invalid digit; outside of ascii range (${r}). This should never happen.')
}
if alphabet.decode[r] == -1 {
- return error(@MOD + '.' + @FN + ': invalid base58 digit ($r)')
+ return error(@MOD + '.' + @FN + ': invalid base58 digit (${r})')
}
c = u64(alphabet.decode[r])
diff --git a/vlib/encoding/base58/base58_test.v b/vlib/encoding/base58/base58_test.v
index 23fde835c6..a70851cd8e 100644
--- a/vlib/encoding/base58/base58_test.v
+++ b/vlib/encoding/base58/base58_test.v
@@ -57,26 +57,26 @@ fn test_fails() ! {
a := -238
b := 0
if z := encode_int(a) {
- return error(@MOD + '.' + @FN + ': expected encode_int to fail, got $z')
+ return error(@MOD + '.' + @FN + ': expected encode_int to fail, got ${z}')
}
if z := encode_int(b) {
- return error(@MOD + '.' + @FN + ': expected encode_int to fail, got $z')
+ return error(@MOD + '.' + @FN + ': expected encode_int to fail, got ${z}')
}
c := '!'
if z := decode_int(c) {
- return error(@MOD + '.' + @FN + ': expected decode_int to fail, got $z')
+ return error(@MOD + '.' + @FN + ': expected decode_int to fail, got ${z}')
}
if z := decode(c) {
- return error(@MOD + '.' + @FN + ': expected decode to fail, got $z')
+ return error(@MOD + '.' + @FN + ': expected decode to fail, got ${z}')
}
// repeating character
if abc := new_alphabet('aaaaafghij\$lmnopqrstuvwxyz0123456789_ABCDEFGHIJLMNOPQRSTUV') {
- return error(@MOD + '.' + @FN + ': expected new_alphabet to fail, got $abc')
+ return error(@MOD + '.' + @FN + ': expected new_alphabet to fail, got ${abc}')
}
// more than 58 characters long
if abc := new_alphabet('abcdefghij\$lmnopqrstuvwxyz0123456789_ABCDEFGHIJLMNOPQRSTUVWXYZ') {
- return error(@MOD + '.' + @FN + ': expected new_alphabet to fail, got $abc')
+ return error(@MOD + '.' + @FN + ': expected new_alphabet to fail, got ${abc}')
}
}
diff --git a/vlib/encoding/base58/base58_usage_test.v b/vlib/encoding/base58/base58_usage_test.v
index 14af47a09a..3df0a8c811 100644
--- a/vlib/encoding/base58/base58_usage_test.v
+++ b/vlib/encoding/base58/base58_usage_test.v
@@ -9,7 +9,7 @@ fn test_encode() {
'\x00\x00hello world': '11StV1DL6CwTryKyV'
} {
output := base58.encode(input)
- println('> input: `$input` | $input.bytes().hex() | => output: `$output`')
+ println('> input: `${input}` | ${input.bytes().hex()} | => output: `${output}`')
assert output == expected
}
}
@@ -22,7 +22,7 @@ fn test_encode_int() {
0x636363: 'aPEr'
} {
output := base58.encode_int(input)!
- println('> input: 0x${input:x} | => output: `$output`')
+ println('> input: 0x${input:x} | => output: `${output}`')
assert output == expected
}
}
@@ -36,7 +36,7 @@ fn test_decode() {
'3vQB7B6MrGQZaxCuFg4oh': hex.decode('68656c6c6f20776f726c64bc62d4b8')?.bytestr()
} {
input := base58.decode(output)!
- println('> output: `$output` | decoded input: `$input` | bytes: $input.bytes().hex()')
+ println('> output: `${output}` | decoded input: `${input}` | bytes: ${input.bytes().hex()}')
assert input.bytes().hex() == expected.bytes().hex()
}
}
diff --git a/vlib/encoding/base64/base64_memory_test.v b/vlib/encoding/base64/base64_memory_test.v
index a6058706a2..959ab249ed 100644
--- a/vlib/encoding/base64/base64_memory_test.v
+++ b/vlib/encoding/base64/base64_memory_test.v
@@ -54,6 +54,6 @@ fn test_long_encoding() {
assert resultsize == s_decoded.len
}
- println('Final s: $s')
+ println('Final s: ${s}')
// assert s == 39147008
}
diff --git a/vlib/encoding/base64/base64_test.v b/vlib/encoding/base64/base64_test.v
index bb0bbd2705..19f50483ce 100644
--- a/vlib/encoding/base64/base64_test.v
+++ b/vlib/encoding/base64/base64_test.v
@@ -44,7 +44,7 @@ fn test_decode() {
for i, p in pairs {
got := base64.decode(p.encoded)
if got != p.decoded.bytes() {
- eprintln('pairs[$i]: expected = $p.decoded, got = $got')
+ eprintln('pairs[${i}]: expected = ${p.decoded}, got = ${got}')
assert false
}
}
@@ -61,7 +61,7 @@ fn test_decode_str() {
for i, p in pairs {
got := base64.decode_str(p.encoded)
if got != p.decoded {
- eprintln('pairs[$i]: expected = $p.decoded, got = $got')
+ eprintln('pairs[${i}]: expected = ${p.decoded}, got = ${got}')
assert false
}
}
@@ -73,7 +73,7 @@ fn test_encode() {
for i, p in pairs {
got := base64.encode(p.decoded.bytes())
if got != p.encoded {
- eprintln('pairs[$i]: expected = $p.encoded, got = $got')
+ eprintln('pairs[${i}]: expected = ${p.encoded}, got = ${got}')
assert false
}
}
@@ -85,7 +85,7 @@ fn test_encode_str() {
for i, p in pairs {
got := base64.encode_str(p.decoded)
if got != p.encoded {
- eprintln('pairs[$i]: expected = $p.encoded, got = $got')
+ eprintln('pairs[${i}]: expected = ${p.encoded}, got = ${got}')
assert false
}
}
diff --git a/vlib/encoding/hex/hex.v b/vlib/encoding/hex/hex.v
index 943b95860d..9040f8bc8e 100644
--- a/vlib/encoding/hex/hex.v
+++ b/vlib/encoding/hex/hex.v
@@ -57,6 +57,6 @@ fn char2nibble(b u8) !u8 {
`0`...`9` { return b - u8(`0`) }
`A`...`F` { return b - u8(`A`) + 10 }
`a`...`f` { return b - u8(`a`) + 10 }
- else { return error('invalid hex char $b.ascii_str()') }
+ else { return error('invalid hex char ${b.ascii_str()}') }
}
}
diff --git a/vlib/encoding/hex/hex_test.v b/vlib/encoding/hex/hex_test.v
index 769a36199c..b33240f97f 100644
--- a/vlib/encoding/hex/hex_test.v
+++ b/vlib/encoding/hex/hex_test.v
@@ -15,19 +15,19 @@ fn test_decode() {
fn test_decode_fails() ! {
if x := decode('foo') {
- return error('expected decode to fail, got $x')
+ return error('expected decode to fail, got ${x}')
}
if x := decode('g') {
- return error('expected decode to fail, got $x')
+ return error('expected decode to fail, got ${x}')
}
if x := decode('000000000g') {
- return error('expected decode to fail, got $x')
+ return error('expected decode to fail, got ${x}')
}
if x := decode('_') {
- return error('expected decode to fail, got $x')
+ return error('expected decode to fail, got ${x}')
}
if x := decode('!') {
- return error('expected decode to fail, got $x')
+ return error('expected decode to fail, got ${x}')
}
}
diff --git a/vlib/eventbus/README.md b/vlib/eventbus/README.md
index 93047db536..f71ea93731 100644
--- a/vlib/eventbus/README.md
+++ b/vlib/eventbus/README.md
@@ -76,7 +76,7 @@ fn main() {
// the event handler
fn on_error(receiver voidptr, e &Error, work &Work) {
- println('error occured on ${work.hours}. Error: $e.message')
+ println('error occured on ${work.hours}. Error: ${e.message}')
}
```
diff --git a/vlib/flag/README.md b/vlib/flag/README.md
index 1f221245ec..444cbf60de 100644
--- a/vlib/flag/README.md
+++ b/vlib/flag/README.md
@@ -37,7 +37,7 @@ fn main() {
println(fp.usage())
return
}
- println('an_int: $an_int | a_bool: $a_bool | a_float: $a_float | a_string: "$a_string" ')
+ println('an_int: ${an_int} | a_bool: ${a_bool} | a_float: ${a_float} | a_string: "${a_string}" ')
println(additional_args.join_lines())
}
```
diff --git a/vlib/flag/default_flag_options_test.v b/vlib/flag/default_flag_options_test.v
index 6efcec929b..d6c953528e 100644
--- a/vlib/flag/default_flag_options_test.v
+++ b/vlib/flag/default_flag_options_test.v
@@ -19,7 +19,7 @@ fn testsuite_end() {
fn check_program(opts string, extension string) {
result := source.replace('.v', extension)
- res := os.execute('${os.quoted_path(simple_flag_app_executable)} $opts')
+ res := os.execute('${os.quoted_path(simple_flag_app_executable)} ${opts}')
lines := os.read_lines(result) or { panic(err) }
assert res.exit_code == 0
assert res.output.split_into_lines() == lines
diff --git a/vlib/flag/flag.v b/vlib/flag/flag.v
index 967d43c529..22b85009cb 100644
--- a/vlib/flag/flag.v
+++ b/vlib/flag/flag.v
@@ -16,7 +16,7 @@ struct UnkownFlagError {
}
fn (err UnkownFlagError) msg() string {
- return 'Unknown flag `$err.flag`'
+ return 'Unknown flag `${err.flag}`'
}
struct ArgsCountError {
@@ -27,11 +27,11 @@ struct ArgsCountError {
fn (err ArgsCountError) msg() string {
if err.want == 0 {
- return 'Expected no arguments, but got $err.got'
+ return 'Expected no arguments, but got ${err.got}'
} else if err.got > err.want {
- return 'Expected at most $err.want arguments, but got $err.got'
+ return 'Expected at most ${err.want} arguments, but got ${err.got}'
} else {
- return 'Expected at least $err.want arguments, but got $err.got'
+ return 'Expected at least ${err.want} arguments, but got ${err.got}'
}
}
@@ -51,9 +51,9 @@ fn (mut f Flag) free() {
// str returns a string representation of the given Flag
pub fn (f Flag) str() string {
- return '' + ' flag:\n' + ' name: $f.name\n' +
- ' abbr: `$f.abbr.ascii_str()`\n' + ' usag: $f.usage\n' +
- ' desc: $f.val_desc'
+ return '' + ' flag:\n' + ' name: ${f.name}\n' +
+ ' abbr: `${f.abbr.ascii_str()}`\n' + ' usag: ${f.usage}\n' +
+ ' desc: ${f.val_desc}'
}
// str returns a string representation of the given array of Flags
@@ -181,7 +181,7 @@ pub fn (mut fs FlagParser) description(desc string) {
if fs.application_description.len == 0 {
fs.application_description = desc
} else {
- fs.application_description += '\n$desc'
+ fs.application_description += '\n${desc}'
}
}
@@ -220,7 +220,7 @@ fn (mut fs FlagParser) add_flag(name string, abbr u8, usage string, desc string)
// - found arguments and corresponding values are removed from args list
[manualfree]
fn (mut fs FlagParser) parse_value(longhand string, shorthand u8) []string {
- full := '--$longhand'
+ full := '--${longhand}'
defer {
unsafe { full.free() }
}
@@ -259,7 +259,7 @@ fn (mut fs FlagParser) parse_value(longhand string, shorthand u8) []string {
should_skip_one = true
continue
}
- if arg.len > full.len + 1 && arg[..full.len + 1] == '$full=' {
+ if arg.len > full.len + 1 && arg[..full.len + 1] == '${full}=' {
found_entries << arg[full.len + 1..]
to_delete << i
continue
@@ -280,7 +280,7 @@ fn (mut fs FlagParser) parse_value(longhand string, shorthand u8) []string {
// -> '--flag' is equal to '--flag=true'
fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand u8) !string {
{
- full := '--$longhand'
+ full := '--${longhand}'
for i, arg in fs.args {
if arg.len == 0 {
continue
@@ -299,7 +299,7 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand u8) !string {
return 'true'
}
}
- if arg.len > full.len + 1 && arg[..full.len + 1] == '$full=' {
+ if arg.len > full.len + 1 && arg[..full.len + 1] == '${full}=' {
// Flag abc=true
val := arg[full.len + 1..]
fs.args.delete(i)
@@ -311,7 +311,7 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand u8) !string {
}
}
}
- return error("parameter '$longhand' not found")
+ return error("parameter '${longhand}' not found")
}
// bool_opt returns an option with the bool value of the given command line flag, named `name`.
@@ -322,7 +322,7 @@ pub fn (mut fs FlagParser) bool_opt(name string, abbr u8, usage string) !bool {
{
fs.add_flag(name, abbr, usage, '')
parsed := fs.parse_bool_value(name, abbr) or {
- return error("parameter '$name' not provided")
+ return error("parameter '${name}' not provided")
}
res = parsed == 'true'
}
@@ -360,7 +360,7 @@ pub fn (mut fs FlagParser) int_opt(name string, abbr u8, usage string) !int {
fs.add_flag(name, abbr, usage, '')
parsed := fs.parse_value(name, abbr)
if parsed.len == 0 {
- return error("parameter '$name' not provided")
+ return error("parameter '${name}' not provided")
}
parsed0 := parsed[0]
res = parsed0.int()
@@ -399,7 +399,7 @@ pub fn (mut fs FlagParser) float_opt(name string, abbr u8, usage string) !f64 {
fs.add_flag(name, abbr, usage, '')
parsed := fs.parse_value(name, abbr)
if parsed.len == 0 {
- return error("parameter '$name' not provided")
+ return error("parameter '${name}' not provided")
}
res = parsed[0].f64()
}
@@ -432,7 +432,7 @@ pub fn (mut fs FlagParser) string_opt(name string, abbr u8, usage string) !strin
fs.add_flag(name, abbr, usage, '')
parsed := fs.parse_value(name, abbr)
if parsed.len == 0 {
- return error("parameter '$name' not provided")
+ return error("parameter '${name}' not provided")
}
res = parsed[0]
}
@@ -453,7 +453,7 @@ pub fn (mut fs FlagParser) string(name string, abbr u8, sdefault string, usage s
// the parser will return an error.
pub fn (mut fs FlagParser) limit_free_args_to_at_least(n int) ! {
if n > flag.max_args_number {
- return error('flag.limit_free_args_to_at_least expect n to be smaller than $flag.max_args_number')
+ return error('flag.limit_free_args_to_at_least expect n to be smaller than ${flag.max_args_number}')
}
if n <= 0 {
return error('flag.limit_free_args_to_at_least expect n to be a positive number')
@@ -466,7 +466,7 @@ pub fn (mut fs FlagParser) limit_free_args_to_at_least(n int) ! {
// the parser will return an error.
pub fn (mut fs FlagParser) limit_free_args_to_exactly(n int) ! {
if n > flag.max_args_number {
- return error('flag.limit_free_args_to_exactly expect n to be smaller than $flag.max_args_number')
+ return error('flag.limit_free_args_to_exactly expect n to be smaller than ${flag.max_args_number}')
}
if n < 0 {
return error('flag.limit_free_args_to_exactly expect n to be a non negative number')
@@ -480,7 +480,7 @@ pub fn (mut fs FlagParser) limit_free_args_to_exactly(n int) ! {
// the parser will return an error.
pub fn (mut fs FlagParser) limit_free_args(min int, max int) ! {
if min > max {
- return error('flag.limit_free_args expect min < max, got $min >= $max')
+ return error('flag.limit_free_args expect min < max, got ${min} >= ${max}')
}
fs.min_free_args = min
fs.max_free_args = max
@@ -505,23 +505,23 @@ pub fn (fs FlagParser) usage() string {
}
mut use := []string{}
if fs.application_version != '' {
- use << '$fs.application_name $fs.application_version'
- use << '$flag.underline'
+ use << '${fs.application_name} ${fs.application_version}'
+ use << '${flag.underline}'
}
if fs.usage_examples.len == 0 {
- use << 'Usage: $fs.application_name [options] $adesc'
+ use << 'Usage: ${fs.application_name} [options] ${adesc}'
} else {
for i, example in fs.usage_examples {
if i == 0 {
- use << 'Usage: $fs.application_name $example'
+ use << 'Usage: ${fs.application_name} ${example}'
} else {
- use << ' or: $fs.application_name $example'
+ use << ' or: ${fs.application_name} ${example}'
}
}
}
use << ''
if fs.application_description != '' {
- use << 'Description: $fs.application_description'
+ use << 'Description: ${fs.application_description}'
use << ''
}
// show a message about the [ARGS]:
@@ -532,16 +532,16 @@ pub fn (fs FlagParser) usage() string {
} else {
mut s := []string{}
if positive_min_arg {
- s << 'at least $fs.min_free_args'
+ s << 'at least ${fs.min_free_args}'
}
if positive_max_arg {
- s << 'at most $fs.max_free_args'
+ s << 'at most ${fs.max_free_args}'
}
if positive_min_arg && positive_max_arg && fs.min_free_args == fs.max_free_args {
- s = ['exactly $fs.min_free_args']
+ s = ['exactly ${fs.min_free_args}']
}
sargs := s.join(' and ')
- use << 'The arguments should be $sargs in number.'
+ use << 'The arguments should be ${sargs} in number.'
use << ''
}
}
@@ -550,23 +550,23 @@ pub fn (fs FlagParser) usage() string {
for f in fs.flags {
mut onames := []string{}
if f.abbr != 0 {
- onames << '-$f.abbr.ascii_str()'
+ onames << '-${f.abbr.ascii_str()}'
}
if f.name != '' {
if !f.val_desc.contains('') {
- onames << '--$f.name $f.val_desc'
+ onames << '--${f.name} ${f.val_desc}'
} else {
- onames << '--$f.name'
+ onames << '--${f.name}'
}
}
option_names := ' ' + onames.join(', ')
mut xspace := ''
if option_names.len > flag.space.len - 2 {
- xspace = '\n$flag.space'
+ xspace = '\n${flag.space}'
} else {
xspace = flag.space[option_names.len..]
}
- fdesc := '$option_names$xspace$f.usage'
+ fdesc := '${option_names}${xspace}${f.usage}'
use << fdesc
}
}
@@ -604,7 +604,7 @@ fn (mut fs FlagParser) handle_builtin_options() {
exit(0)
}
if show_version {
- println('$fs.application_name $fs.application_version')
+ println('${fs.application_name} ${fs.application_version}')
exit(0)
}
}
diff --git a/vlib/flag/flag_test.v b/vlib/flag/flag_test.v
index 1c3ac7af26..4f060567c5 100644
--- a/vlib/flag/flag_test.v
+++ b/vlib/flag/flag_test.v
@@ -172,7 +172,7 @@ fn test_allow_to_build_usage_message() {
'The arguments should be at least 1 and at most 4 in number.', 'Usage', 'Options:',
'Description:', 'some short information about this tool'] {
if !usage.contains(s) {
- eprintln(" missing '$s' in usage message")
+ eprintln(" missing '${s}' in usage message")
all_strings_found = false
}
}
diff --git a/vlib/flag/usage_example_test.v b/vlib/flag/usage_example_test.v
index 5e63e6a846..4e7651d7a4 100644
--- a/vlib/flag/usage_example_test.v
+++ b/vlib/flag/usage_example_test.v
@@ -21,7 +21,7 @@ fn normalise_lines(lines []string) string {
fn check_program(opts string, extension string) {
result := the_source.replace('.v', extension)
- res := os.execute('${os.quoted_path(the_executable)} $opts')
+ res := os.execute('${os.quoted_path(the_executable)} ${opts}')
assert res.exit_code == 0
assert normalise_lines(res.output.split_into_lines()) == normalise_lines(os.read_lines(result) or {
panic(err)
diff --git a/vlib/gg/draw_fns_api_test.v b/vlib/gg/draw_fns_api_test.v
index 69d76d48a7..1ab2948614 100644
--- a/vlib/gg/draw_fns_api_test.v
+++ b/vlib/gg/draw_fns_api_test.v
@@ -3,22 +3,22 @@ import os
fn test_all_samples_can_be_compiled() {
vexe := @VEXE
vroot := os.dir(vexe)
- samples := os.walk_ext('$vroot/vlib/gg/testdata', '.vv')
+ samples := os.walk_ext('${vroot}/vlib/gg/testdata', '.vv')
mut fails := []string{}
for program_source in samples {
compile_cmd := '${os.quoted_path(vexe)} ${os.quoted_path(program_source)}'
res := os.execute(compile_cmd)
if res.exit_code != 0 {
- eprintln('>>> FAIL $compile_cmd')
+ eprintln('>>> FAIL ${compile_cmd}')
eprintln(res.output)
fails << compile_cmd
}
- println('OK $compile_cmd')
+ println('OK ${compile_cmd}')
}
if fails.len > 0 {
eprintln('> Failed summary:')
for f in fails {
- eprintln(' failed cmd: $f')
+ eprintln(' failed cmd: ${f}')
}
assert false
}
diff --git a/vlib/gg/gg.c.v b/vlib/gg/gg.c.v
index ad8f05870e..9f3d2d14e7 100644
--- a/vlib/gg/gg.c.v
+++ b/vlib/gg/gg.c.v
@@ -240,7 +240,7 @@ fn gg_init_sokol_window(user_data voidptr) {
} else {
sfont := font.default()
if ctx.config.font_path != '' {
- eprintln('font file "$ctx.config.font_path" does not exist, the system font ($sfont) was used instead.')
+ eprintln('font file "${ctx.config.font_path}" does not exist, the system font (${sfont}) was used instead.')
}
ctx.ft = new_ft(
@@ -441,7 +441,7 @@ fn gg_fail_fn(msg &char, user_data voidptr) {
if ctx.config.fail_fn != unsafe { nil } {
ctx.config.fail_fn(vmsg, ctx.config.user_data)
} else {
- eprintln('gg error: $vmsg')
+ eprintln('gg error: ${vmsg}')
}
}
diff --git a/vlib/gg/image.c.v b/vlib/gg/image.c.v
index 39db8f0b6b..ac6cc2dd0a 100644
--- a/vlib/gg/image.c.v
+++ b/vlib/gg/image.c.v
@@ -98,7 +98,7 @@ pub fn (mut img Image) init_sokol_image() &Image {
pub fn (ctx &Context) draw_image(x f32, y f32, width f32, height f32, img_ &Image) {
$if macos {
if img_.id >= ctx.image_cache.len {
- eprintln('gg: draw_image() bad img id $img_.id (img cache len = $ctx.image_cache.len)')
+ eprintln('gg: draw_image() bad img id ${img_.id} (img cache len = ${ctx.image_cache.len})')
return
}
if ctx.native_rendering {
@@ -203,7 +203,7 @@ pub fn (mut ctx Context) create_image_with_size(file string, width int, height i
// TODO remove this
fn create_image(file string) Image {
if !os.exists(file) {
- println('gg.create_image(): file not found: $file')
+ println('gg.create_image(): file not found: ${file}')
return Image{} // none
}
stb_img := stbi.load(file) or { return Image{} }
@@ -262,7 +262,7 @@ pub struct StreamingImageConfig {
pub fn (ctx &Context) draw_image_with_config(config DrawImageConfig) {
id := if !isnil(config.img) { config.img.id } else { config.img_id }
if id >= ctx.image_cache.len {
- eprintln('gg: draw_image() bad img id $id (img cache len = $ctx.image_cache.len)')
+ eprintln('gg: draw_image() bad img id ${id} (img cache len = ${ctx.image_cache.len})')
return
}
diff --git a/vlib/gg/recorder.c.v b/vlib/gg/recorder.c.v
index 1d5cff595c..0b086d6d1e 100644
--- a/vlib/gg/recorder.c.v
+++ b/vlib/gg/recorder.c.v
@@ -8,16 +8,16 @@ import os
[if gg_record ?]
pub fn (mut ctx Context) record_frame() {
if ctx.frame in gg.recorder_settings.screenshot_frames {
- screenshot_file_path := '$gg.recorder_settings.screenshot_prefix${ctx.frame}.png'
+ screenshot_file_path := '${gg.recorder_settings.screenshot_prefix}${ctx.frame}.png'
$if gg_record_trace ? {
- eprintln('>>> screenshoting $screenshot_file_path')
+ eprintln('>>> screenshoting ${screenshot_file_path}')
}
sapp.screenshot_png(screenshot_file_path) or { panic(err) }
}
if ctx.frame == gg.recorder_settings.stop_at_frame {
$if gg_record_trace ? {
- eprintln('>>> exiting at frame $ctx.frame')
+ eprintln('>>> exiting at frame ${ctx.frame}')
}
exit(0)
}
diff --git a/vlib/gg/text_rendering.c.v b/vlib/gg/text_rendering.c.v
index 406f54bae6..01cf4666b2 100644
--- a/vlib/gg/text_rendering.c.v
+++ b/vlib/gg/text_rendering.c.v
@@ -58,7 +58,7 @@ fn new_ft(c FTConfig) ?&FT {
if c.font_path == '' || !os.exists(c.font_path) {
$if !android {
- println('failed to load font "$c.font_path"')
+ println('failed to load font "${c.font_path}"')
return none
}
}
@@ -71,13 +71,13 @@ fn new_ft(c FTConfig) ?&FT {
if bytes.len == 0 {
// ... then try the APK asset path
bytes = os.read_apk_asset(c.font_path) or {
- println('failed to load font "$c.font_path"')
+ println('failed to load font "${c.font_path}"')
return none
}
}
} $else {
bytes = os.read_bytes(c.font_path) or {
- println('failed to load font "$c.font_path"')
+ println('failed to load font "${c.font_path}"')
return none
}
}
@@ -87,27 +87,27 @@ fn new_ft(c FTConfig) ?&FT {
font.get_path_variant(c.font_path, .bold)
}
bytes_bold := os.read_bytes(bold_path) or {
- debug_font_println('failed to load font "$bold_path"')
+ debug_font_println('failed to load font "${bold_path}"')
bold_path = c.font_path
bytes
}
mut mono_path := font.get_path_variant(c.font_path, .mono)
bytes_mono := os.read_bytes(mono_path) or {
- debug_font_println('failed to load font "$mono_path"')
+ debug_font_println('failed to load font "${mono_path}"')
mono_path = c.font_path
bytes
}
mut italic_path := font.get_path_variant(c.font_path, .italic)
bytes_italic := os.read_bytes(italic_path) or {
- debug_font_println('failed to load font "$italic_path"')
+ debug_font_println('failed to load font "${italic_path}"')
italic_path = c.font_path
bytes
}
fons := sfons.create(512, 512, 1)
- debug_font_println('Font used for font_normal : $normal_path')
- debug_font_println('Font used for font_bold : $bold_path')
- debug_font_println('Font used for font_mono : $mono_path')
- debug_font_println('Font used for font_italic : $italic_path')
+ debug_font_println('Font used for font_normal : ${normal_path}')
+ debug_font_println('Font used for font_bold : ${bold_path}')
+ debug_font_println('Font used for font_mono : ${mono_path}')
+ debug_font_println('Font used for font_italic : ${italic_path}')
return &FT{
fons: fons
font_normal: fons.add_font_mem('sans', bytes, false)
diff --git a/vlib/gx/color.v b/vlib/gx/color.v
index a2fa9e7f8a..783fa5c50d 100644
--- a/vlib/gx/color.v
+++ b/vlib/gx/color.v
@@ -248,7 +248,7 @@ pub fn (c Color) eq(c2 Color) bool {
// str returns a string representation of the Color `c`
pub fn (c Color) str() string {
- return 'Color{$c.r, $c.g, $c.b, $c.a}'
+ return 'Color{${c.r}, ${c.g}, ${c.b}, ${c.a}}'
}
// rgba8 - convert a color value to an int in the RGBA8 order.
@@ -305,5 +305,5 @@ pub fn color_from_string(s string) Color {
// to_css_string returns a CSS compatible string e.g. `rgba(10,11,12,13)` of the color `c`.
pub fn (c Color) to_css_string() string {
- return 'rgba($c.r,$c.g,$c.b,$c.a)'
+ return 'rgba(${c.r},${c.g},${c.b},${c.a})'
}
diff --git a/vlib/gx/text.v b/vlib/gx/text.v
index 7ae7a65821..f32d28d8d9 100644
--- a/vlib/gx/text.v
+++ b/vlib/gx/text.v
@@ -31,5 +31,5 @@ pub fn (cfg TextCfg) to_css_string() string {
if cfg.italic {
font_style += 'italic '
}
- return '$font_style ${cfg.size}px $cfg.family'
+ return '${font_style} ${cfg.size}px ${cfg.family}'
}
diff --git a/vlib/io/custom_string_reading_test.v b/vlib/io/custom_string_reading_test.v
index 983980c3e1..eb215d81e4 100644
--- a/vlib/io/custom_string_reading_test.v
+++ b/vlib/io/custom_string_reading_test.v
@@ -12,7 +12,7 @@ fn imin(a int, b int) int {
fn (mut s StringReader) read(mut buf []u8) !int {
$if debug {
- eprintln('>>>> StringReader.read output buf.len: $buf.len')
+ eprintln('>>>> StringReader.read output buf.len: ${buf.len}')
}
if s.place > s.text.len + 1 {
return io.Eof{}
@@ -37,7 +37,7 @@ fn read_from_string(text string, capacity int) []u8 {
z := stream.read(mut buf) or { break }
res << buf
$if debug {
- println('capacity: $capacity, i: $i, buf: $buf | z: $z')
+ println('capacity: ${capacity}, i: ${i}, buf: ${buf} | z: ${z}')
}
i++
}
diff --git a/vlib/io/util/util.v b/vlib/io/util/util.v
index 6b4f830861..bace828397 100644
--- a/vlib/io/util/util.v
+++ b/vlib/io/util/util.v
@@ -21,10 +21,10 @@ pub fn temp_file(tfo TempFileOptions) !(os.File, string) {
}
os.ensure_folder_is_writable(d) or {
return error(@FN +
- ' could not create temporary file in "$d". Please ensure write permissions.')
+ ' could not create temporary file in "${d}". Please ensure write permissions.')
}
d = d.trim_right(os.path_separator)
- prefix, suffix := prefix_and_suffix(tfo.pattern) or { return error(@FN + ' $err.msg()') }
+ prefix, suffix := prefix_and_suffix(tfo.pattern) or { return error(@FN + ' ${err.msg()}') }
for retry := 0; retry < util.retries; retry++ {
path := os.join_path(d, prefix + random_number() + suffix)
mut mode := 'rw+'
@@ -37,7 +37,7 @@ pub fn temp_file(tfo TempFileOptions) !(os.File, string) {
}
}
return error(@FN +
- ' could not create temporary file in "$d". Retry limit ($util.retries) exhausted. Please ensure write permissions.')
+ ' could not create temporary file in "${d}". Retry limit (${util.retries}) exhausted. Please ensure write permissions.')
}
[params]
@@ -47,7 +47,7 @@ pub struct TempDirOptions {
}
fn error_for_temporary_folder(fn_name string, d string) !string {
- return error('$fn_name could not create temporary directory "$d". Please ensure you have write permissions for it.')
+ return error('${fn_name} could not create temporary directory "${d}". Please ensure you have write permissions for it.')
}
// temp_dir returns an uniquely named, writable, directory path
@@ -58,7 +58,7 @@ pub fn temp_dir(tdo TempFileOptions) !string {
}
os.ensure_folder_is_writable(d) or { return error_for_temporary_folder(@FN, d) }
d = d.trim_right(os.path_separator)
- prefix, suffix := prefix_and_suffix(tdo.pattern) or { return error(@FN + ' $err.msg()') }
+ prefix, suffix := prefix_and_suffix(tdo.pattern) or { return error(@FN + ' ${err.msg()}') }
for retry := 0; retry < util.retries; retry++ {
path := os.join_path(d, prefix + random_number() + suffix)
os.mkdir_all(path) or { continue }
@@ -67,7 +67,7 @@ pub fn temp_dir(tdo TempFileOptions) !string {
return path
}
}
- return error('${@FN} could not create temporary directory "$d". Retry limit ($util.retries) exhausted.')
+ return error('${@FN} could not create temporary directory "${d}". Retry limit (${util.retries}) exhausted.')
}
// * Utility functions
@@ -79,7 +79,7 @@ fn random_number() string {
fn prefix_and_suffix(pattern string) ?(string, string) {
mut pat := pattern
if pat.contains(os.path_separator) {
- return error('pattern cannot contain path separators ($os.path_separator).')
+ return error('pattern cannot contain path separators (${os.path_separator}).')
}
pos := pat.last_index('*') or { -1 }
mut prefix := ''
diff --git a/vlib/io/util/util_test.v b/vlib/io/util/util_test.v
index df9fdc063a..3ece4fa7f3 100644
--- a/vlib/io/util/util_test.v
+++ b/vlib/io/util/util_test.v
@@ -9,7 +9,7 @@ const (
)
fn testsuite_begin() {
- eprintln('testsuite_begin, tfolder = $tfolder')
+ eprintln('testsuite_begin, tfolder = ${tfolder}')
os.rmdir_all(tfolder) or {}
assert !os.is_dir(tfolder)
os.mkdir_all(tfolder) or { panic(err) }
diff --git a/vlib/json/README.md b/vlib/json/README.md
index 07684bf4a6..47d5130a0a 100644
--- a/vlib/json/README.md
+++ b/vlib/json/README.md
@@ -25,7 +25,7 @@ fn main() {
println(x)
//
s := json.encode(x)
- println('Employee x: $s')
+ println('Employee x: ${s}')
assert s == '{"name":"Peter","age":28,"salary":95000.5,"title":2}'
//
y := json.decode(Employee, s)!
diff --git a/vlib/json/json_decode_with_optional_arg_test.v b/vlib/json/json_decode_with_optional_arg_test.v
index 2e2599954c..bc84c92d1c 100644
--- a/vlib/json/json_decode_with_optional_arg_test.v
+++ b/vlib/json/json_decode_with_optional_arg_test.v
@@ -15,5 +15,5 @@ fn test_json_decode_with_optional_arg() {
fn print_info() !string {
dbconf := json.decode(DbConfig, os.read_file('dbconf.json')!)!
println(dbconf)
- return '$dbconf'
+ return '${dbconf}'
}
diff --git a/vlib/json/json_test.v b/vlib/json/json_test.v
index 4d5134f6d2..60fef96d85 100644
--- a/vlib/json/json_test.v
+++ b/vlib/json/json_test.v
@@ -95,7 +95,7 @@ fn test_encode_decode_sumtype() {
enc := json.encode(game)
// eprintln('Encoded Game: $enc')
- assert enc == '{"title":"Super Mega Game","player":{"name":"Monke","_type":"Human"},"other":[{"tag":"Pen","_type":"Item"},{"tag":"Cookie","_type":"Item"},1,"Stool",{"_type":"Time","value":$t.unix_time()}]}'
+ assert enc == '{"title":"Super Mega Game","player":{"name":"Monke","_type":"Human"},"other":[{"tag":"Pen","_type":"Item"},{"tag":"Cookie","_type":"Item"},1,"Stool",{"_type":"Time","value":${t.unix_time()}}]}'
dec := json.decode(SomeGame, enc)!
// eprintln('Decoded Game: $dec')
@@ -421,15 +421,15 @@ struct Info {
fn test_decode_null_object() {
info := json.decode(Info, '{"id": 22, "items": null, "maps": null}')!
assert info.id == 22
- assert '$info.items' == '[]'
- assert '$info.maps' == '{}'
+ assert '${info.items}' == '[]'
+ assert '${info.maps}' == '{}'
}
fn test_decode_missing_maps_field() {
info := json.decode(Info, '{"id": 22, "items": null}')!
assert info.id == 22
- assert '$info.items' == '[]'
- assert '$info.maps' == '{}'
+ assert '${info.items}' == '[]'
+ assert '${info.maps}' == '{}'
}
struct Foo2 {
diff --git a/vlib/log/log.v b/vlib/log/log.v
index bf3abd7760..f7e8b4f6f3 100644
--- a/vlib/log/log.v
+++ b/vlib/log/log.v
@@ -128,7 +128,7 @@ pub fn (mut l Log) set_output_path(output_file_path string) {
l.output_target = .file
l.output_file_name = os.join_path(os.real_path(output_file_path), l.output_label)
ofile := os.open_append(l.output_file_name) or {
- panic('error while opening log file $l.output_file_name for appending')
+ panic('error while opening log file ${l.output_file_name} for appending')
}
l.ofile = ofile
}
@@ -156,14 +156,14 @@ pub fn (mut l Log) close() {
fn (mut l Log) log_file(s string, level Level) {
timestamp := time.now().format_ss()
e := tag_to_file(level)
- l.ofile.writeln('$timestamp [$e] $s') or { panic(err) }
+ l.ofile.writeln('${timestamp} [${e}] ${s}') or { panic(err) }
}
// log_cli writes log line `s` with `level` to stdout.
fn (l &Log) log_cli(s string, level Level) {
timestamp := time.now().format_ss()
e := tag_to_cli(level)
- println('$timestamp [$e] $s')
+ println('${timestamp} [${e}] ${s}')
}
// send_output writes log line `s` with `level` to either the log file or the console
@@ -185,7 +185,7 @@ pub fn (mut l Log) fatal(s string) {
l.send_output(s, .fatal)
l.ofile.close()
}
- panic('$l.output_label: $s')
+ panic('${l.output_label}: ${s}')
}
// error logs line `s` via `send_output` if `Log.level` is greater than or equal to the `Level.error` category.
diff --git a/vlib/maps/maps_test.v b/vlib/maps/maps_test.v
index e93ec9ff4f..90091f2983 100644
--- a/vlib/maps/maps_test.v
+++ b/vlib/maps/maps_test.v
@@ -32,7 +32,7 @@ fn test_to_array() {
`g`: 'hi'
}
assert to_array(m1, fn (k rune, v string) string {
- return '$k$v'
+ return '${k}${v}'
}) == ['abc', 'def', 'ghi']
}
diff --git a/vlib/math/big/integer.v b/vlib/math/big/integer.v
index 19bc3d7ab5..e56855a73c 100644
--- a/vlib/math/big/integer.v
+++ b/vlib/math/big/integer.v
@@ -189,10 +189,10 @@ fn validate_string(characters string, radix u32) ! {
value := big.digit_array.index(digit)
if value == -1 {
- return error('Invalid character $digit')
+ return error('Invalid character ${digit}')
}
if value >= radix {
- return error('Invalid character $digit for base $radix')
+ return error('Invalid character ${digit} for base ${radix}')
}
}
}
diff --git a/vlib/math/big/large_number_power_and_string_conversion_test.v b/vlib/math/big/large_number_power_and_string_conversion_test.v
index de0b456975..7ed1a9e717 100644
--- a/vlib/math/big/large_number_power_and_string_conversion_test.v
+++ b/vlib/math/big/large_number_power_and_string_conversion_test.v
@@ -11,7 +11,7 @@ fn timed_println(msg string) {
}
fn timed_println_extended(t time.StopWatch, msg string) {
- println('${t.elapsed().microseconds():12} | $msg')
+ println('${t.elapsed().microseconds():12} | ${msg}')
}
fn f(x big.Integer, y int) big.Integer {
@@ -40,7 +40,7 @@ fn calculate_and_measure(calc_label string, cb fn () big.Integer) string {
sw := time.new_stopwatch()
timed_println_extended(sw, 'start')
a := cb()
- timed_println_extended(sw, 'done $calc_label')
+ timed_println_extended(sw, 'done ${calc_label}')
timed_println_extended(sw, 'a.bit_len(): ${a.bit_len():12}')
timed_println_extended(sw, 'before a.str()')
diff --git a/vlib/math/floor.v b/vlib/math/floor.v
index bfaf6607e2..4e930fa572 100644
--- a/vlib/math/floor.v
+++ b/vlib/math/floor.v
@@ -78,7 +78,7 @@ pub fn round(x f64) f64 {
// Returns the rounded float, with sig_digits of precision.
// i.e `assert round_sig(4.3239437319748394,6) == 4.323944`
pub fn round_sig(x f64, sig_digits int) f64 {
- mut ret_str := '$x'
+ mut ret_str := '${x}'
match sig_digits {
0 { ret_str = '${x:0.0f}' }
@@ -98,7 +98,7 @@ pub fn round_sig(x f64, sig_digits int) f64 {
14 { ret_str = '${x:0.14f}' }
15 { ret_str = '${x:0.15f}' }
16 { ret_str = '${x:0.16f}' }
- else { ret_str = '$x' }
+ else { ret_str = '${x}' }
}
return ret_str.f64()
diff --git a/vlib/math/fractions/fraction.v b/vlib/math/fractions/fraction.v
index eabe60964e..015de84def 100644
--- a/vlib/math/fractions/fraction.v
+++ b/vlib/math/fractions/fraction.v
@@ -42,7 +42,7 @@ pub fn fraction(n i64, d i64) Fraction {
// To String method
pub fn (f Fraction) str() string {
- return '$f.n/$f.d'
+ return '${f.n}/${f.d}'
}
//
diff --git a/vlib/math/math.v b/vlib/math/math.v
index 5c404d0a4e..4686b64d2b 100644
--- a/vlib/math/math.v
+++ b/vlib/math/math.v
@@ -69,7 +69,7 @@ pub fn digits(num i64, params DigitParams) []int {
// set base to 10 initially and change only if base is explicitly set.
mut b := params.base
if b < 2 {
- panic('digits: Cannot find digits of n with base $b')
+ panic('digits: Cannot find digits of n with base ${b}')
}
mut n := num
mut sign := 1
diff --git a/vlib/math/math_test.v b/vlib/math/math_test.v
index 41e41bb6d2..5636173c9f 100644
--- a/vlib/math/math_test.v
+++ b/vlib/math/math_test.v
@@ -421,11 +421,11 @@ fn test_abs() {
fn test_abs_zero() {
ret1 := abs(0)
println(ret1)
- assert '$ret1' == '0'
+ assert '${ret1}' == '0'
ret2 := abs(0.0)
println(ret2)
- assert '$ret2' == '0.0'
+ assert '${ret2}' == '0.0'
}
fn test_floor() {
diff --git a/vlib/math/unsigned/uint128.v b/vlib/math/unsigned/uint128.v
index 9c2884adda..b08852feba 100644
--- a/vlib/math/unsigned/uint128.v
+++ b/vlib/math/unsigned/uint128.v
@@ -422,7 +422,7 @@ pub fn uint128_from_dec_str(value string) ?Uint128 {
for b_ in value.bytes() {
b := b_ - '0'.bytes()[0]
if b > 9 {
- return error('invalid character "$b"')
+ return error('invalid character "${b}"')
}
r, overflow := res.overflowing_mul_64(10)
diff --git a/vlib/math/unsigned/uint256.v b/vlib/math/unsigned/uint256.v
index 37645b6717..5d49a0ed2a 100644
--- a/vlib/math/unsigned/uint256.v
+++ b/vlib/math/unsigned/uint256.v
@@ -387,7 +387,7 @@ pub fn uint256_from_dec_str(value string) ?Uint256 {
for b_ in value.bytes() {
b := b_ - '0'.bytes()[0]
if b > 9 {
- return error('invalid character "$b"')
+ return error('invalid character "${b}"')
}
r := res.mul_128(uint128_from_64(10))
diff --git a/vlib/mssql/config.v b/vlib/mssql/config.v
index 7f26d6f91d..5f02645466 100644
--- a/vlib/mssql/config.v
+++ b/vlib/mssql/config.v
@@ -12,9 +12,9 @@ pub:
}
pub fn (cfg Config) get_conn_str() string {
- mut str := 'Driver=$cfg.driver;Server=$cfg.server;UID=$cfg.uid;PWD=$cfg.pwd'
+ mut str := 'Driver=${cfg.driver};Server=${cfg.server};UID=${cfg.uid};PWD=${cfg.pwd}'
if cfg.dbname != '' {
- str += ';Database=$cfg.dbname'
+ str += ';Database=${cfg.dbname}'
}
return str
}
diff --git a/vlib/mssql/mssql.v b/vlib/mssql/mssql.v
index 1dbe221a18..f0d9ea3c64 100644
--- a/vlib/mssql/mssql.v
+++ b/vlib/mssql/mssql.v
@@ -117,7 +117,7 @@ fn extract_error(fnName string, handle C.SQLHANDLE, tp C.SQLSMALLINT) string {
state_str := (&sql_state[0]).vstring()
native_error_code := int(native_error)
txt_str := (&message_text[0]).vstring()
- err_str += '\n\todbc=$state_str:$i:$native_error_code:$txt_str'
+ err_str += '\n\todbc=${state_str}:${i}:${native_error_code}:${txt_str}'
}
}
}
diff --git a/vlib/mysql/orm.v b/vlib/mysql/orm.v
index c7c3a406e5..9808e66f44 100644
--- a/vlib/mysql/orm.v
+++ b/vlib/mysql/orm.v
@@ -167,7 +167,7 @@ pub fn (db Connection) create(table string, fields []orm.TableField) ! {
}
pub fn (db Connection) drop(table string) ! {
- query := 'DROP TABLE `$table`;'
+ query := 'DROP TABLE `${table}`;'
mysql_stmt_worker(db, query, orm.QueryData{}, orm.QueryData{})!
}
@@ -336,7 +336,7 @@ fn mysql_type_from_v(typ int) !string {
}
}
if str == '' {
- return error('Unknown type $typ')
+ return error('Unknown type ${typ}')
}
return str
}
@@ -364,7 +364,7 @@ fn (db Connection) factory_orm_primitive_converted_from_sql(table string, data o
}
fn (db Connection) get_table_data_type_map(table string) !map[string]string {
- data_type_querys := "SELECT COLUMN_NAME, DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '$table'"
+ data_type_querys := "SELECT COLUMN_NAME, DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '${table}'"
mut map_val := map[string]string{}
results := db.query(data_type_querys)!
diff --git a/vlib/mysql/result.v b/vlib/mysql/result.v
index c1e7abb8ec..0969c75c20 100644
--- a/vlib/mysql/result.v
+++ b/vlib/mysql/result.v
@@ -122,26 +122,26 @@ pub fn (r Result) fields() []Field {
pub fn (f Field) str() string {
return '
{
- name: "$f.name"
- org_name: "$f.org_name"
- table: "$f.table"
- org_table: "$f.org_table"
- db: "$f.db"
- catalog: "$f.catalog"
- def: "$f.def"
- length: $f.length
- max_length: $f.max_length
- name_length: $f.name_length
- org_name_length: $f.org_name_length
- table_length: $f.table_length
- org_table_length: $f.org_table_length
- db_length: $f.db_length
- catalog_length: $f.catalog_length
- def_length: $f.def_length
- flags: $f.flags
- decimals: $f.decimals
- charsetnr: $f.charsetnr
- type: $f.type_.str()
+ name: "${f.name}"
+ org_name: "${f.org_name}"
+ table: "${f.table}"
+ org_table: "${f.org_table}"
+ db: "${f.db}"
+ catalog: "${f.catalog}"
+ def: "${f.def}"
+ length: ${f.length}
+ max_length: ${f.max_length}
+ name_length: ${f.name_length}
+ org_name_length: ${f.org_name_length}
+ table_length: ${f.table_length}
+ org_table_length: ${f.org_table_length}
+ db_length: ${f.db_length}
+ catalog_length: ${f.catalog_length}
+ def_length: ${f.def_length}
+ flags: ${f.flags}
+ decimals: ${f.decimals}
+ charsetnr: ${f.charsetnr}
+ type: ${f.type_.str()}
}
'
}
diff --git a/vlib/mysql/stmt.c.v b/vlib/mysql/stmt.c.v
index 487ac9ea43..f54dc457f5 100644
--- a/vlib/mysql/stmt.c.v
+++ b/vlib/mysql/stmt.c.v
@@ -140,7 +140,7 @@ fn (stmt Stmt) get_error_msg() string {
pub fn (stmt Stmt) error(code int) IError {
msg := stmt.get_error_msg()
return &SQLError{
- msg: '$msg ($code) ($stmt.query)'
+ msg: '${msg} (${code}) (${stmt.query})'
code: code
}
}
diff --git a/vlib/net/address.v b/vlib/net/address.v
index 370b97b31b..fc7ce5a2ef 100644
--- a/vlib/net/address.v
+++ b/vlib/net/address.v
@@ -77,7 +77,7 @@ fn (a Ip) str() string {
saddr := unsafe { cstring_to_vstring(&buf[0]) }
port := C.ntohs(a.port)
- return '$saddr:$port'
+ return '${saddr}:${port}'
}
fn (a Ip6) str() string {
@@ -92,7 +92,7 @@ fn (a Ip6) str() string {
saddr := unsafe { cstring_to_vstring(&buf[0]) }
port := C.ntohs(a.port)
- return '[$saddr]:$port'
+ return '[${saddr}]:${port}'
}
const aoffset = __offsetof(Addr, addr)
@@ -187,7 +187,7 @@ pub fn resolve_ipaddrs(addr string, family AddrFamily, typ SocketType) ![]Addr {
results := &C.addrinfo(0)
- sport := '$port'
+ sport := '${port}'
// This might look silly but is recommended by MSDN
$if windows {
@@ -230,7 +230,7 @@ pub fn resolve_ipaddrs(addr string, family AddrFamily, typ SocketType) ![]Addr {
addresses << new_addr
}
else {
- panic('Unexpected address family $result.ai_family')
+ panic('Unexpected address family ${result.ai_family}')
}
}
}
diff --git a/vlib/net/errors.v b/vlib/net/errors.v
index 9a89263fed..a03b058ce7 100644
--- a/vlib/net/errors.v
+++ b/vlib/net/errors.v
@@ -19,7 +19,7 @@ pub const (
)
pub fn socket_error_message(potential_code int, s string) !int {
- return socket_error(potential_code) or { return error('$err.msg(); $s') }
+ return socket_error(potential_code) or { return error('${err.msg()}; ${s}') }
}
pub fn socket_error(potential_code int) !int {
@@ -27,13 +27,13 @@ pub fn socket_error(potential_code int) !int {
if potential_code < 0 {
last_error_int := C.WSAGetLastError()
last_error := wsa_error(last_error_int)
- return error_with_code('net: socket error: ($last_error_int) $last_error',
+ return error_with_code('net: socket error: (${last_error_int}) ${last_error}',
int(last_error))
}
} $else {
if potential_code < 0 {
last_error := error_code()
- return error_with_code('net: socket error: $last_error', last_error)
+ return error_with_code('net: socket error: ${last_error}', last_error)
}
}
@@ -46,9 +46,9 @@ pub fn wrap_error(error_code int) ! {
}
$if windows {
enum_error := wsa_error(error_code)
- return error_with_code('net: socket error: $enum_error', error_code)
+ return error_with_code('net: socket error: ${enum_error}', error_code)
} $else {
- return error_with_code('net: socket error: $error_code', error_code)
+ return error_with_code('net: socket error: ${error_code}', error_code)
}
}
diff --git a/vlib/net/ftp/ftp.v b/vlib/net/ftp/ftp.v
index 219bca2505..fe73fc4403 100644
--- a/vlib/net/ftp/ftp.v
+++ b/vlib/net/ftp/ftp.v
@@ -77,15 +77,15 @@ pub fn new() FTP {
fn (mut zftp FTP) write(data string) !int {
$if debug {
- println('FTP.v >>> $data')
+ println('FTP.v >>> ${data}')
}
- return zftp.conn.write('$data\r\n'.bytes())
+ return zftp.conn.write('${data}\r\n'.bytes())
}
fn (mut zftp FTP) read() !(int, string) {
mut data := zftp.reader.read_line()!
$if debug {
- println('FTP.v <<< $data')
+ println('FTP.v <<< ${data}')
}
if data.len < 5 {
return 0, ''
@@ -104,7 +104,7 @@ fn (mut zftp FTP) read() !(int, string) {
// connect establishes an FTP connection to the host at `ip` port 21.
pub fn (mut zftp FTP) connect(ip string) !bool {
- zftp.conn = net.dial_tcp('$ip:21')!
+ zftp.conn = net.dial_tcp('${ip}:21')!
zftp.reader = io.new_buffered_reader(reader: zftp.conn)
code, _ := zftp.read()!
if code == ftp.connected {
@@ -115,7 +115,7 @@ pub fn (mut zftp FTP) connect(ip string) !bool {
// login sends the "USER `user`" and "PASS `passwd`" commands to the remote host.
pub fn (mut zftp FTP) login(user string, passwd string) !bool {
- zftp.write('USER $user') or {
+ zftp.write('USER ${user}') or {
$if debug {
println('ERROR sending user')
}
@@ -128,7 +128,7 @@ pub fn (mut zftp FTP) login(user string, passwd string) !bool {
if code != ftp.specify_password {
return false
}
- zftp.write('PASS $passwd') or {
+ zftp.write('PASS ${passwd}') or {
$if debug {
println('ERROR sending password')
}
@@ -160,12 +160,12 @@ pub fn (mut zftp FTP) pwd() !string {
// cd changes the current working directory to the specified remote directory `dir`.
pub fn (mut zftp FTP) cd(dir string) ! {
- zftp.write('CWD $dir') or { return }
+ zftp.write('CWD ${dir}') or { return }
mut code, mut data := zftp.read()!
match int(code) {
ftp.denied {
$if debug {
- println('CD $dir denied!')
+ println('CD ${dir} denied!')
}
}
ftp.complete {
@@ -174,7 +174,7 @@ pub fn (mut zftp FTP) cd(dir string) ! {
else {}
}
$if debug {
- println('CD $data')
+ println('CD ${data}')
}
}
@@ -188,7 +188,7 @@ fn new_dtp(msg string) !&DTP {
port: port
conn: 0
}
- conn := net.dial_tcp('$ip:$port') or { return error('Cannot connect to the data channel') }
+ conn := net.dial_tcp('${ip}:${port}') or { return error('Cannot connect to the data channel') }
dtp.conn = conn
dtp.reader = io.new_buffered_reader(reader: dtp.conn)
return dtp
@@ -198,7 +198,7 @@ fn (mut zftp FTP) pasv() !&DTP {
zftp.write('PASV')!
code, data := zftp.read()!
$if debug {
- println('pass: $data')
+ println('pass: ${data}')
}
if code != ftp.passive_mode {
return error('pasive mode not allowed')
@@ -237,7 +237,7 @@ pub fn (mut zftp FTP) dir() ![]string {
// get retrieves `file` from the remote host.
pub fn (mut zftp FTP) get(file string) ![]u8 {
mut dtp := zftp.pasv() or { return error('Cannot stablish data connection') }
- zftp.write('RETR $file')!
+ zftp.write('RETR ${file}')!
code, _ := zftp.read()!
if code == ftp.denied {
return error('Permission denied')
diff --git a/vlib/net/html/dom.v b/vlib/net/html/dom.v
index 6c4444a982..7571f3f462 100644
--- a/vlib/net/html/dom.v
+++ b/vlib/net/html/dom.v
@@ -141,8 +141,8 @@ fn (mut dom DocumentObjectModel) construct(tag_list []&Tag) {
dom.print_debug("Added ${tag.name} as child of '" + tag_list[temp_int].name +
"' which now has ${dom.btree.get_children().len} childrens")
*/
- dom.print_debug("Added $tag.name as child of '" + temp_tag.name +
- "' which now has $temp_tag.children.len childrens")
+ dom.print_debug("Added ${tag.name} as child of '" + temp_tag.name +
+ "' which now has ${temp_tag.children.len} childrens")
} else { // dom.new_root(tag)
stack.push(root_index)
}
diff --git a/vlib/net/html/dom_test.v b/vlib/net/html/dom_test.v
index 3d37be0c4d..18dd613a1a 100644
--- a/vlib/net/html/dom_test.v
+++ b/vlib/net/html/dom_test.v
@@ -6,8 +6,8 @@ fn generate_temp_html() string {
mut temp_html := strings.new_builder(200)
temp_html.write_string('Giant String')
for counter := 0; counter < 4; counter++ {
- temp_html.write_string("Look at $counter
")
+ temp_html.write_string("Look at ${counter}
")
}
temp_html.write_string('')
return temp_html.str()
diff --git a/vlib/net/html/parser.v b/vlib/net/html/parser.v
index 350af89170..2dd17adee4 100644
--- a/vlib/net/html/parser.v
+++ b/vlib/net/html/parser.v
@@ -123,7 +123,7 @@ pub fn (mut parser Parser) split_parse(data string) {
} else if is_quote {
parser.lexical_attributes.open_string = string_code
} else if chr == `>` { // only execute verification if is a > // here will verify < to know if code tag is finished
- name_close_tag := '$parser.lexical_attributes.opened_code_type>'
+ name_close_tag := '${parser.lexical_attributes.opened_code_type}>'
if parser.builder_str().to_lower().ends_with(name_close_tag) {
parser.lexical_attributes.open_code = false
// need to modify lexeme_builder to add script text as a content in next loop (not gave error in dom)
diff --git a/vlib/net/html/parser_test.v b/vlib/net/html/parser_test.v
index 274a47c2ab..6d0d1d5f2b 100644
--- a/vlib/net/html/parser_test.v
+++ b/vlib/net/html/parser_test.v
@@ -25,7 +25,7 @@ fn test_giant_string() {
mut parser := Parser{}
temp_html.write_string('Giant String')
for counter := 0; counter < 2000; counter++ {
- temp_html.write_string("Look at $counter
")
+ temp_html.write_string("Look at ${counter}
")
}
temp_html.write_string('')
parser.parse_html(temp_html.str())
@@ -35,7 +35,7 @@ fn test_giant_string() {
fn test_script_tag() {
mut parser := Parser{}
script_content := "\nvar googletag = googletag || {};\ngoogletag.cmd = googletag.cmd || [];if(3 > 5) {console.log('Birl');}\n"
- temp_html := ''
+ temp_html := ''
parser.parse_html(temp_html)
assert parser.tags[2].content.len == script_content.replace('\n', '').len
}
diff --git a/vlib/net/html/tag.v b/vlib/net/html/tag.v
index c704cce3b5..dde3a46efd 100644
--- a/vlib/net/html/tag.v
+++ b/vlib/net/html/tag.v
@@ -47,11 +47,11 @@ pub fn (tag Tag) text() string {
pub fn (tag &Tag) str() string {
mut html_str := strings.new_builder(200)
- html_str.write_string('<$tag.name')
+ html_str.write_string('<${tag.name}')
for key, value in tag.attributes {
- html_str.write_string(' $key')
+ html_str.write_string(' ${key}')
if value.len > 0 {
- html_str.write_string('="$value"')
+ html_str.write_string('="${value}"')
}
}
html_str.write_string(if tag.closed && tag.close_type == .in_name { '/>' } else { '>' })
@@ -62,7 +62,7 @@ pub fn (tag &Tag) str() string {
}
}
if !tag.closed || tag.close_type == .new_tag {
- html_str.write_string('$tag.name>')
+ html_str.write_string('${tag.name}>')
}
return html_str.str()
}
diff --git a/vlib/net/http/backend_nix.c.v b/vlib/net/http/backend_nix.c.v
index abf2b08741..84cd059f13 100644
--- a/vlib/net/http/backend_nix.c.v
+++ b/vlib/net/http/backend_nix.c.v
@@ -18,7 +18,7 @@ fn (req &Request) ssl_do(port int, method Method, host_name string, path string)
req_headers := req.build_request_headers(method, host_name, path)
$if trace_http_request ? {
- eprintln('> $req_headers')
+ eprintln('> ${req_headers}')
}
// println(req_headers)
ssl_conn.write_string(req_headers) or { return err }
@@ -31,7 +31,7 @@ fn (req &Request) ssl_do(port int, method Method, host_name string, path string)
readcounter++
len := ssl_conn.socket_read_into_ptr(bp, bufsize) or { break }
$if debug_http ? {
- eprintln('ssl_do, read ${readcounter:4d} | len: $len')
+ eprintln('ssl_do, read ${readcounter:4d} | len: ${len}')
eprintln('-'.repeat(20))
eprintln(unsafe { tos(bp, len) })
eprintln('-'.repeat(20))
@@ -41,7 +41,7 @@ fn (req &Request) ssl_do(port int, method Method, host_name string, path string)
ssl_conn.shutdown()!
response_text := content.str()
$if trace_http_response ? {
- eprintln('< $response_text')
+ eprintln('< ${response_text}')
}
return parse_response(response_text)
}
diff --git a/vlib/net/http/backend_windows.c.v b/vlib/net/http/backend_windows.c.v
index 6371c7fafd..be13651b18 100644
--- a/vlib/net/http/backend_windows.c.v
+++ b/vlib/net/http/backend_windows.c.v
@@ -20,13 +20,13 @@ fn (req &Request) ssl_do(port int, method Method, host_name string, path string)
addr := host_name
sdata := req.build_request_headers(method, host_name, path)
$if trace_http_request ? {
- eprintln('> $sdata')
+ eprintln('> ${sdata}')
}
length := C.request(&ctx, port, addr.to_wide(), sdata.str, &buff)
C.vschannel_cleanup(&ctx)
response_text := unsafe { buff.vstring_with_len(length) }
$if trace_http_response ? {
- eprintln('< $response_text')
+ eprintln('< ${response_text}')
}
return parse_response(response_text)
}
diff --git a/vlib/net/http/cookie.v b/vlib/net/http/cookie.v
index af8e3c1a28..41a2bc2d35 100644
--- a/vlib/net/http/cookie.v
+++ b/vlib/net/http/cookie.v
@@ -143,7 +143,7 @@ pub fn (c &Cookie) str() string {
}
if c.expires.year > 1600 {
e := c.expires
- time_str := '$e.weekday_str(), $e.day.str() $e.smonth() $e.year $e.hhmmss() GMT'
+ time_str := '${e.weekday_str()}, ${e.day.str()} ${e.smonth()} ${e.year} ${e.hhmmss()} GMT'
b.write_string('; expires=')
b.write_string(time_str)
}
@@ -214,7 +214,7 @@ pub fn sanitize_cookie_value(v string) string {
}
// Check for the existence of a space or comma
if val.starts_with(' ') || val.ends_with(' ') || val.starts_with(',') || val.ends_with(',') {
- return '"$v"'
+ return '"${v}"'
}
return v
}
diff --git a/vlib/net/http/download.v b/vlib/net/http/download.v
index d750c7718b..d365f36ac5 100644
--- a/vlib/net/http/download.v
+++ b/vlib/net/http/download.v
@@ -9,14 +9,14 @@ import os
// and saves it in the output file path `out_file_path`.
pub fn download_file(url string, out_file_path string) ! {
$if debug_http ? {
- println('http.download_file url=$url out_file_path=$out_file_path')
+ println('http.download_file url=${url} out_file_path=${out_file_path}')
}
s := get(url) or { return err }
if s.status() != .ok {
- return error('received http code $s.status_code')
+ return error('received http code ${s.status_code}')
}
$if debug_http ? {
- println('http.download_file saving $s.body.len bytes')
+ println('http.download_file saving ${s.body.len} bytes')
}
os.write_file(out_file_path, s.body)!
}
diff --git a/vlib/net/http/header.v b/vlib/net/http/header.v
index db296e1d6c..ac82813338 100644
--- a/vlib/net/http/header.v
+++ b/vlib/net/http/header.v
@@ -598,7 +598,7 @@ pub fn (h Header) join(other Header) Header {
for v in other.custom_values(k, exact: true) {
combined.add_custom(k, v) or {
// panic because this should never fail
- panic('unexpected error: $err')
+ panic('unexpected error: ${err}')
}
}
}
@@ -634,7 +634,7 @@ struct HeaderKeyError {
}
pub fn (err HeaderKeyError) msg() string {
- return "Invalid header key: '$err.header'"
+ return "Invalid header key: '${err.header}'"
}
pub fn (err HeaderKeyError) code() int {
diff --git a/vlib/net/http/header_test.v b/vlib/net/http/header_test.v
index 57d491214e..0013d36fa9 100644
--- a/vlib/net/http/header_test.v
+++ b/vlib/net/http/header_test.v
@@ -374,7 +374,7 @@ fn test_parse_headers() ? {
assert parse_headers('foo: bar\r\nfoo:baz')?.custom_values('foo') == ['bar', 'baz']
if x := parse_headers(' oops: oh no') {
- return error('should have errored, but got $x')
+ return error('should have errored, but got ${x}')
}
}
diff --git a/vlib/net/http/http.v b/vlib/net/http/http.v
index f9f2242125..80a01f2250 100644
--- a/vlib/net/http/http.v
+++ b/vlib/net/http/http.v
@@ -93,7 +93,7 @@ pub mut:
pub fn post_multipart_form(url string, conf PostMultipartFormConfig) !Response {
body, boundary := multipart_form_body(conf.form, conf.files)
mut header := conf.header
- header.set(.content_type, 'multipart/form-data; boundary="$boundary"')
+ header.set(.content_type, 'multipart/form-data; boundary="${boundary}"')
return fetch(
method: .post
url: url
@@ -137,7 +137,7 @@ pub fn fetch(config FetchConfig) !Response {
if config.url == '' {
return error('http.fetch: empty url')
}
- url := build_url_from_fetch(config) or { return error('http.fetch: invalid url $config.url') }
+ url := build_url_from_fetch(config) or { return error('http.fetch: invalid url ${config.url}') }
req := Request{
method: config.method
url: url
@@ -170,7 +170,7 @@ pub fn url_encode_form_data(data map[string]string) string {
for key_, value_ in data {
key := urllib.query_escape(key_)
value := urllib.query_escape(value_)
- pieces << '$key=$value'
+ pieces << '${key}=${value}'
}
return pieces.join('&')
}
@@ -189,7 +189,7 @@ fn build_url_from_fetch(config FetchConfig) !string {
}
mut pieces := []string{cap: config.params.len}
for key, val in config.params {
- pieces << '$key=$val'
+ pieces << '${key}=${val}'
}
mut query := pieces.join('&')
if url.raw_query.len > 1 {
diff --git a/vlib/net/http/http_test.v b/vlib/net/http/http_test.v
index 2ce5d19abf..950ffa1a93 100644
--- a/vlib/net/http/http_test.v
+++ b/vlib/net/http/http_test.v
@@ -14,12 +14,12 @@ fn test_http_get_from_vlang_utc_now() {
}
urls := ['http://vlang.io/utc_now', 'https://vlang.io/utc_now']
for url in urls {
- println('Test getting current time from $url by http.get')
+ println('Test getting current time from ${url} by http.get')
res := http.get(url) or { panic(err) }
assert res.status() == .ok
assert res.body.len > 0
assert res.body.int() > 1566403696
- println('Current time is: $res.body.int()')
+ println('Current time is: ${res.body.int()}')
}
}
@@ -36,7 +36,7 @@ fn test_public_servers() {
// 'https://yahoo.com/robots.txt',
]
for url in urls {
- println('Testing http.get on public url: $url ')
+ println('Testing http.get on public url: ${url} ')
res := http.get(url) or { panic(err) }
assert res.status() == .ok
assert res.body.len > 0
diff --git a/vlib/net/http/mime/build.vsh b/vlib/net/http/mime/build.vsh
index 2251714af6..a30e9e7958 100755
--- a/vlib/net/http/mime/build.vsh
+++ b/vlib/net/http/mime/build.vsh
@@ -25,8 +25,8 @@ fn main() {
// FILE AUTOGENERATED BY `build.vsh` - DO NOT MANUALLY EDIT
const (
- db = $mt_map
- ext_to_mt_str = $ext_to_mt_str
+ db = ${mt_map}
+ ext_to_mt_str = ${ext_to_mt_str}
)
')!
execute('${@VEXE} fmt -w db.v')
diff --git a/vlib/net/http/mime/mime.v b/vlib/net/http/mime/mime.v
index 4aa02ff947..7f150919fb 100644
--- a/vlib/net/http/mime/mime.v
+++ b/vlib/net/http/mime/mime.v
@@ -21,7 +21,7 @@ pub fn get_mime_type(ext string) string {
pub fn get_content_type(mt string) string {
mt_struct := db[mt]
charset := if mt_struct.charset.len > 0 { mt_struct.charset.to_lower() } else { 'utf-8' }
- return '$mt; charset=$charset'
+ return '${mt}; charset=${charset}'
}
// returns the default extension for the given MIME type
diff --git a/vlib/net/http/request.v b/vlib/net/http/request.v
index 0e6fcf30a8..1a57e80237 100644
--- a/vlib/net/http/request.v
+++ b/vlib/net/http/request.v
@@ -54,13 +54,13 @@ pub fn (mut req Request) add_custom_header(key string, val string) ! {
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
pub fn (req &Request) do() !Response {
- mut url := urllib.parse(req.url) or { return error('http.Request.do: invalid url $req.url') }
+ mut url := urllib.parse(req.url) or { return error('http.Request.do: invalid url ${req.url}') }
mut rurl := url
mut resp := Response{}
mut no_redirects := 0
for {
if no_redirects == max_redirects {
- return error('http.request.do: maximum number of redirects reached ($max_redirects)')
+ return error('http.request.do: maximum number of redirects reached (${max_redirects})')
}
qresp := req.method_and_url_to_response(req.method, rurl)!
resp = qresp
@@ -75,12 +75,12 @@ pub fn (req &Request) do() !Response {
mut redirect_url := resp.header.get(.location) or { '' }
if redirect_url.len > 0 && redirect_url[0] == `/` {
url.set_path(redirect_url) or {
- return error('http.request.do: invalid path in redirect: "$redirect_url"')
+ return error('http.request.do: invalid path in redirect: "${redirect_url}"')
}
redirect_url = url.str()
}
qrurl := urllib.parse(redirect_url) or {
- return error('http.request.do: invalid URL in redirect "$redirect_url"')
+ return error('http.request.do: invalid URL in redirect "${redirect_url}"')
}
rurl = qrurl
no_redirects++
@@ -92,7 +92,7 @@ fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) !Res
host_name := url.hostname()
scheme := url.scheme
p := url.escaped_path().trim_left('/')
- path := if url.query().len > 0 { '/$p?$url.query().encode()' } else { '/$p' }
+ path := if url.query().len > 0 { '/${p}?${url.query().encode()}' } else { '/${p}' }
mut nport := url.port().int()
if nport == 0 {
if scheme == 'http' {
@@ -109,34 +109,35 @@ fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) !Res
return res
} else if scheme == 'http' {
// println('http_do( $nport, $method, $host_name, $path )')
- res := req.http_do('$host_name:$nport', method, path)!
+ res := req.http_do('${host_name}:${nport}', method, path)!
return res
}
- return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
+ return error('http.request.method_and_url_to_response: unsupported scheme: "${scheme}"')
}
fn (req &Request) build_request_headers(method Method, host_name string, path string) string {
ua := req.user_agent
mut uheaders := []string{}
if !req.header.contains(.host) {
- uheaders << 'Host: $host_name\r\n'
+ uheaders << 'Host: ${host_name}\r\n'
}
if !req.header.contains(.user_agent) {
- uheaders << 'User-Agent: $ua\r\n'
+ uheaders << 'User-Agent: ${ua}\r\n'
}
if req.data.len > 0 && !req.header.contains(.content_length) {
- uheaders << 'Content-Length: $req.data.len\r\n'
+ uheaders << 'Content-Length: ${req.data.len}\r\n'
}
for key in req.header.keys() {
if key == CommonHeader.cookie.str() {
continue
}
val := req.header.custom_values(key).join('; ')
- uheaders << '$key: $val\r\n'
+ uheaders << '${key}: ${val}\r\n'
}
uheaders << req.build_request_cookies_header()
version := if req.version == .unknown { Version.v1_1 } else { req.version }
- return '$method $path $version\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' + req.data
+ return '${method} ${path} ${version}\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' +
+ req.data
}
fn (req &Request) build_request_cookies_header() string {
@@ -145,7 +146,7 @@ fn (req &Request) build_request_cookies_header() string {
}
mut cookie := []string{}
for key, val in req.cookies {
- cookie << '$key=$val'
+ cookie << '${key}=${val}'
}
cookie << req.header.values(.cookie)
return 'Cookie: ' + cookie.join('; ') + '\r\n'
@@ -160,13 +161,13 @@ fn (req &Request) http_do(host string, method Method, path string) !Response {
// TODO this really needs to be exposed somehow
client.write(s.bytes())!
$if trace_http_request ? {
- eprintln('> $s')
+ eprintln('> ${s}')
}
mut bytes := io.read_all(reader: client)!
client.close()!
response_text := bytes.bytestr()
$if trace_http_response ? {
- eprintln('< $response_text')
+ eprintln('< ${response_text}')
}
return parse_response(response_text)
}
@@ -286,7 +287,7 @@ pub struct UnexpectedExtraAttributeError {
}
pub fn (err UnexpectedExtraAttributeError) msg() string {
- return 'Encountered unexpected extra attributes: $err.attributes'
+ return 'Encountered unexpected extra attributes: ${err.attributes}'
}
pub struct MultiplePathAttributesError {
diff --git a/vlib/net/http/request_test.v b/vlib/net/http/request_test.v
index 2b503a2373..d62828b807 100644
--- a/vlib/net/http/request_test.v
+++ b/vlib/net/http/request_test.v
@@ -35,7 +35,7 @@ fn test_parse_request_not_http() {
fn test_parse_request_no_headers() {
mut reader_ := reader('GET / HTTP/1.1\r\n\r\n')
- req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ req := parse_request(mut reader_) or { panic('did not parse: ${err}') }
assert req.method == .get
assert req.url == '/'
assert req.version == .v1_1
@@ -43,27 +43,27 @@ fn test_parse_request_no_headers() {
fn test_parse_request_two_headers() {
mut reader_ := reader('GET / HTTP/1.1\r\nTest1: a\r\nTest2: B\r\n\r\n')
- req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ req := parse_request(mut reader_) or { panic('did not parse: ${err}') }
assert req.header.custom_values('Test1') == ['a']
assert req.header.custom_values('Test2') == ['B']
}
fn test_parse_request_two_header_values() {
mut reader_ := reader('GET / HTTP/1.1\r\nTest1: a; b\r\nTest2: c\r\nTest2: d\r\n\r\n')
- req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ req := parse_request(mut reader_) or { panic('did not parse: ${err}') }
assert req.header.custom_values('Test1') == ['a; b']
assert req.header.custom_values('Test2') == ['c', 'd']
}
fn test_parse_request_body() {
mut reader_ := reader('GET / HTTP/1.1\r\nTest1: a\r\nTest2: b\r\nContent-Length: 4\r\n\r\nbodyabc')
- req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ req := parse_request(mut reader_) or { panic('did not parse: ${err}') }
assert req.data == 'body'
}
fn test_parse_request_line() {
method, target, version := parse_request_line('GET /target HTTP/1.1') or {
- panic('did not parse: $err')
+ panic('did not parse: ${err}')
}
assert method == .get
assert target.str() == '/target'
@@ -127,16 +127,16 @@ fn test_parse_multipart_form() {
file := 'bar.v'
ct := 'application/octet-stream'
contents := ['baz', 'buzz']
- data := "--$boundary
-Content-Disposition: form-data; name=\"${names[0]}\"; filename=\"$file\"\r
-Content-Type: $ct\r
+ data := "--${boundary}
+Content-Disposition: form-data; name=\"${names[0]}\"; filename=\"${file}\"\r
+Content-Type: ${ct}\r
\r
${contents[0]}\r
---$boundary\r
+--${boundary}\r
Content-Disposition: form-data; name=\"${names[1]}\"\r
\r
${contents[1]}\r
---$boundary--\r
+--${boundary}--\r
"
form, files := parse_multipart_form(data, boundary)
assert files == {
@@ -176,7 +176,7 @@ fn test_multipart_form_body() {
fn test_parse_large_body() {
body := 'A'.repeat(101) // greater than max_bytes
- req := 'GET / HTTP/1.1\r\nContent-Length: $body.len\r\n\r\n$body'
+ req := 'GET / HTTP/1.1\r\nContent-Length: ${body.len}\r\n\r\n${body}'
mut reader_ := reader(req)
result := parse_request(mut reader_)!
assert result.data.len == body.len
diff --git a/vlib/net/http/response.v b/vlib/net/http/response.v
index 33682d1848..6bc737b776 100644
--- a/vlib/net/http/response.v
+++ b/vlib/net/http/response.v
@@ -29,7 +29,7 @@ pub fn (resp Response) bytes() []u8 {
// Formats resp to a string suitable for HTTP response transmission
pub fn (resp Response) bytestr() string {
- return 'HTTP/$resp.http_version $resp.status_code $resp.status_msg\r\n' + '${resp.header.render(
+ return 'HTTP/${resp.http_version} ${resp.status_code} ${resp.status_msg}\r\n' + '${resp.header.render(
version: resp.version()
)}\r\n' + resp.body
}
@@ -98,7 +98,7 @@ pub fn (mut r Response) set_status(s Status) {
// version parses the version
pub fn (r Response) version() Version {
- return version_from_str('HTTP/$r.http_version')
+ return version_from_str('HTTP/${r.http_version}')
}
// set_version sets the http_version string of the response
@@ -108,7 +108,7 @@ pub fn (mut r Response) set_version(v Version) {
return
}
maj, min := v.protos()
- r.http_version = '${maj}.$min'
+ r.http_version = '${maj}.${min}'
}
pub struct ResponseConfig {
diff --git a/vlib/net/http/response_test.v b/vlib/net/http/response_test.v
index 732e7b9d98..0a582b3ef5 100644
--- a/vlib/net/http/response_test.v
+++ b/vlib/net/http/response_test.v
@@ -30,7 +30,7 @@ fn check_headers(expected []string, found []string) ! {
assert expected.len == found.len
for header in expected {
if !found.contains(header) {
- return error('expected header "$header" not in $found')
+ return error('expected header "${header}" not in ${found}')
}
}
}
diff --git a/vlib/net/http/server.v b/vlib/net/http/server.v
index aa3da31687..8c556282e9 100644
--- a/vlib/net/http/server.v
+++ b/vlib/net/http/server.v
@@ -38,12 +38,12 @@ pub fn (mut s Server) listen_and_serve() {
if s.handler is DebugHandler {
eprintln('Server handler not set, using debug handler')
}
- s.listener = net.listen_tcp(.ip6, ':$s.port') or {
- eprintln('Listening on :$s.port failed')
+ s.listener = net.listen_tcp(.ip6, ':${s.port}') or {
+ eprintln('Listening on :${s.port} failed')
return
}
s.listener.set_accept_timeout(s.accept_timeout)
- eprintln('Listening on :$s.port')
+ eprintln('Listening on :${s.port}')
s.state = .running
for {
// break if we have a stop signal
@@ -55,7 +55,7 @@ pub fn (mut s Server) listen_and_serve() {
// just skip network timeouts, they are normal
continue
}
- eprintln('accept() failed, reason: $err; skipping')
+ eprintln('accept() failed, reason: ${err}; skipping')
continue
}
conn.set_read_timeout(s.read_timeout)
@@ -88,7 +88,7 @@ pub fn (s &Server) status() ServerStatus {
fn (mut s Server) parse_and_respond(mut conn net.TcpConn) {
defer {
- conn.close() or { eprintln('close() failed: $err') }
+ conn.close() or { eprintln('close() failed: ${err}') }
}
mut reader := io.new_buffered_reader(reader: conn)
@@ -100,7 +100,7 @@ fn (mut s Server) parse_and_respond(mut conn net.TcpConn) {
req := parse_request(mut reader) or {
$if debug {
// only show in debug mode to prevent abuse
- eprintln('error parsing request: $err')
+ eprintln('error parsing request: ${err}')
}
return
}
@@ -108,7 +108,7 @@ fn (mut s Server) parse_and_respond(mut conn net.TcpConn) {
if resp.version() == .unknown {
resp.set_version(req.version)
}
- conn.write(resp.bytes()) or { eprintln('error sending response: $err') }
+ conn.write(resp.bytes()) or { eprintln('error sending response: ${err}') }
}
// DebugHandler implements the Handler interface by echoing the request
@@ -117,9 +117,9 @@ struct DebugHandler {}
fn (d DebugHandler) handle(req Request) Response {
$if debug {
- eprintln('[$time.now()] $req.method $req.url\n\r$req.header\n\r$req.data - 200 OK')
+ eprintln('[${time.now()}] ${req.method} ${req.url}\n\r${req.header}\n\r${req.data} - 200 OK')
} $else {
- eprintln('[$time.now()] $req.method $req.url - 200')
+ eprintln('[${time.now()}] ${req.method} ${req.url} - 200')
}
mut r := Response{
body: req.data
diff --git a/vlib/net/http/server_test.v b/vlib/net/http/server_test.v
index 99ae5fdd22..a85b9cda75 100644
--- a/vlib/net/http/server_test.v
+++ b/vlib/net/http/server_test.v
@@ -41,7 +41,7 @@ fn (mut handler MyHttpHandler) handle(req http.Request) http.Response {
handler.counter++
// eprintln('$time.now() | counter: $handler.counter | $req.method $req.url\n$req.header\n$req.data - 200 OK\n')
mut r := http.Response{
- body: req.data + ', $req.url'
+ body: req.data + ', ${req.url}'
header: req.header
}
match req.url.all_before('?') {
@@ -71,17 +71,17 @@ fn test_server_custom_handler() {
for server.status() != .running {
time.sleep(10 * time.millisecond)
}
- x := http.fetch(url: 'http://localhost:$cport/endpoint?abc=xyz', data: 'my data')!
+ x := http.fetch(url: 'http://localhost:${cport}/endpoint?abc=xyz', data: 'my data')!
assert x.body == 'my data, /endpoint?abc=xyz'
assert x.status_code == 200
assert x.http_version == '1.1'
- y := http.fetch(url: 'http://localhost:$cport/another/endpoint', data: 'abcde')!
+ y := http.fetch(url: 'http://localhost:${cport}/another/endpoint', data: 'abcde')!
assert y.body == 'abcde, /another/endpoint'
assert y.status_code == 200
assert y.status() == .ok
assert y.http_version == '1.1'
//
- http.fetch(url: 'http://localhost:$cport/something/else')!
+ http.fetch(url: 'http://localhost:${cport}/something/else')!
server.stop()
t.wait()
assert handler.counter == 3
diff --git a/vlib/net/mbedtls/ssl_connection.v b/vlib/net/mbedtls/ssl_connection.v
index 8fa50bfe71..cca0455e2e 100644
--- a/vlib/net/mbedtls/ssl_connection.v
+++ b/vlib/net/mbedtls/ssl_connection.v
@@ -16,7 +16,7 @@ fn init() {
0, 0)
if ret != 0 {
C.mbedtls_ctr_drbg_free(&mbedtls.ctr_drbg)
- panic('Failed to seed ssl context: $ret')
+ panic('Failed to seed ssl context: ${ret}')
}
}
diff --git a/vlib/net/openssl/openssl.v b/vlib/net/openssl/openssl.v
index 6aa96b79ff..af5f15a159 100644
--- a/vlib/net/openssl/openssl.v
+++ b/vlib/net/openssl/openssl.v
@@ -5,10 +5,10 @@ fn ssl_error(ret int, ssl voidptr) !SSLError {
res := C.SSL_get_error(ssl, ret)
match unsafe { SSLError(res) } {
.ssl_error_syscall {
- return error_with_code('unrecoverable syscall ($res)', res)
+ return error_with_code('unrecoverable syscall (${res})', res)
}
.ssl_error_ssl {
- return error_with_code('unrecoverable ssl protocol error ($res)', res)
+ return error_with_code('unrecoverable ssl protocol error (${res})', res)
}
else {
return unsafe { SSLError(res) }
diff --git a/vlib/net/openssl/ssl_connection.v b/vlib/net/openssl/ssl_connection.v
index d38709cb68..7c9e291355 100644
--- a/vlib/net/openssl/ssl_connection.v
+++ b/vlib/net/openssl/ssl_connection.v
@@ -77,7 +77,7 @@ pub fn (mut s SSLConn) shutdown() ! {
if s.sslctx != 0 {
C.SSL_CTX_free(s.sslctx)
}
- return error('unexepedted ssl error $err_res')
+ return error('unexepedted ssl error ${err_res}')
}
if s.ssl != 0 {
unsafe { C.SSL_free(voidptr(s.ssl)) }
@@ -85,7 +85,7 @@ pub fn (mut s SSLConn) shutdown() ! {
if s.sslctx != 0 {
C.SSL_CTX_free(s.sslctx)
}
- return error('Could not connect using SSL. ($err_res),err')
+ return error('Could not connect using SSL. (${err_res}),err')
} else if res == 0 {
continue
} else if res == 1 {
@@ -155,14 +155,14 @@ fn (mut s SSLConn) init() ! {
if s.config.cert != '' {
res = C.SSL_CTX_use_certificate_file(voidptr(s.sslctx), &char(cert.str), C.SSL_FILETYPE_PEM)
if s.config.validate && res != 1 {
- return error('http: openssl: SSL_CTX_use_certificate_file failed, res: $res')
+ return error('http: openssl: SSL_CTX_use_certificate_file failed, res: ${res}')
}
}
if s.config.cert_key != '' {
res = C.SSL_CTX_use_PrivateKey_file(voidptr(s.sslctx), &char(cert_key.str),
C.SSL_FILETYPE_PEM)
if s.config.validate && res != 1 {
- return error('http: openssl: SSL_CTX_use_PrivateKey_file failed, res: $res')
+ return error('http: openssl: SSL_CTX_use_PrivateKey_file failed, res: ${res}')
}
}
@@ -194,7 +194,7 @@ pub fn (mut s SSLConn) connect(mut tcp_conn net.TcpConn, hostname string) ! {
// dial opens an ssl connection on hostname:port
pub fn (mut s SSLConn) dial(hostname string, port int) ! {
s.owns_socket = true
- mut tcp_conn := net.dial_tcp('$hostname:$port') or { return err }
+ mut tcp_conn := net.dial_tcp('${hostname}:${port}') or { return err }
$if macos {
tcp_conn.set_blocking(true) or { return err }
}
@@ -223,7 +223,7 @@ fn (mut s SSLConn) complete_connect() ! {
}
continue
}
- return error('Could not connect using SSL. ($err_res),err')
+ return error('Could not connect using SSL. (${err_res}),err')
}
break
}
@@ -250,7 +250,7 @@ fn (mut s SSLConn) complete_connect() ! {
}
continue
}
- return error('Could not validate SSL certificate. ($err_res),err')
+ return error('Could not validate SSL certificate. (${err_res}),err')
}
break
}
@@ -294,7 +294,7 @@ pub fn (mut s SSLConn) socket_read_into_ptr(buf_ptr &u8, len int) !int {
return 0
}
else {
- return error('Could not read using SSL. ($err_res)')
+ return error('Could not read using SSL. (${err_res})')
}
}
}
@@ -336,7 +336,7 @@ pub fn (mut s SSLConn) write_ptr(bytes &u8, len int) !int {
} else if err_res == .ssl_error_zero_return {
return error('ssl write on closed connection') // Todo error_with_code close
}
- return error_with_code('Could not write SSL. ($err_res),err', int(err_res))
+ return error_with_code('Could not write SSL. (${err_res}),err', int(err_res))
}
total_sent += sent
}
diff --git a/vlib/net/smtp/smtp.v b/vlib/net/smtp/smtp.v
index fb1c5a6ad9..5724583c18 100644
--- a/vlib/net/smtp/smtp.v
+++ b/vlib/net/smtp/smtp.v
@@ -77,7 +77,7 @@ pub fn (mut c Client) reconnect() ! {
return error('Already connected to server')
}
- conn := net.dial_tcp('$c.server:$c.port') or { return error('Connecting to server failed') }
+ conn := net.dial_tcp('${c.server}:${c.port}') or { return error('Connecting to server failed') }
c.conn = conn
if c.ssl {
@@ -128,7 +128,7 @@ pub fn (mut c Client) quit() ! {
fn (mut c Client) connect_ssl() ! {
c.ssl_conn = ssl.new_ssl_conn()!
c.ssl_conn.connect(mut c.conn, c.server) or {
- return error('Connecting to server using OpenSSL failed: $err')
+ return error('Connecting to server using OpenSSL failed: ${err}')
}
c.reader = io.new_buffered_reader(reader: c.ssl_conn)
@@ -141,7 +141,7 @@ fn (mut c Client) expect_reply(expected ReplyCode) ! {
for {
str = c.reader.read_line()!
if str.len < 4 {
- return error('Invalid SMTP response: $str')
+ return error('Invalid SMTP response: ${str}')
}
if str.runes()[3] == `-` {
@@ -159,10 +159,10 @@ fn (mut c Client) expect_reply(expected ReplyCode) ! {
if str.len >= 3 {
status := str[..3].int()
if unsafe { ReplyCode(status) } != expected {
- return error('Received unexpected status code $status, expecting $expected')
+ return error('Received unexpected status code ${status}, expecting ${expected}')
}
} else {
- return error('Recieved unexpected SMTP data: $str')
+ return error('Recieved unexpected SMTP data: ${str}')
}
}
@@ -183,7 +183,7 @@ fn (mut c Client) send_str(s string) ! {
[inline]
fn (mut c Client) send_ehlo() ! {
- c.send_str('EHLO $c.server\r\n')!
+ c.send_str('EHLO ${c.server}\r\n')!
c.expect_reply(.action_ok)!
}
@@ -211,13 +211,13 @@ fn (mut c Client) send_auth() ! {
}
fn (mut c Client) send_mailfrom(from string) ! {
- c.send_str('MAIL FROM: <$from>\r\n')!
+ c.send_str('MAIL FROM: <${from}>\r\n')!
c.expect_reply(.action_ok)!
}
fn (mut c Client) send_mailto(to string) ! {
for rcpt in to.split(';') {
- c.send_str('RCPT TO: <$rcpt>\r\n')!
+ c.send_str('RCPT TO: <${rcpt}>\r\n')!
c.expect_reply(.action_ok)!
}
}
@@ -232,16 +232,16 @@ fn (mut c Client) send_body(cfg Mail) ! {
date := cfg.date.custom_format('ddd, D MMM YYYY HH:mm ZZ')
nonascii_subject := cfg.subject.bytes().any(it < u8(` `) || it > u8(`~`))
mut sb := strings.new_builder(200)
- sb.write_string('From: $cfg.from\r\n')
+ sb.write_string('From: ${cfg.from}\r\n')
sb.write_string('To: <${cfg.to.split(';').join('>; <')}>\r\n')
sb.write_string('Cc: <${cfg.cc.split(';').join('>; <')}>\r\n')
sb.write_string('Bcc: <${cfg.bcc.split(';').join('>; <')}>\r\n')
- sb.write_string('Date: $date\r\n')
+ sb.write_string('Date: ${date}\r\n')
if nonascii_subject {
// handle UTF-8 subjects according RFC 1342
sb.write_string('Subject: =?utf-8?B?' + base64.encode_str(cfg.subject) + '?=\r\n')
} else {
- sb.write_string('Subject: $cfg.subject\r\n')
+ sb.write_string('Subject: ${cfg.subject}\r\n')
}
if is_html {
diff --git a/vlib/net/tcp.v b/vlib/net/tcp.v
index cb27dd319c..0bc97f1961 100644
--- a/vlib/net/tcp.v
+++ b/vlib/net/tcp.v
@@ -23,7 +23,7 @@ mut:
pub fn dial_tcp(address string) !&TcpConn {
addrs := resolve_addrs_fuzzy(address, .tcp) or {
- return error('$err.msg(); could not resolve address $address in dial_tcp')
+ return error('${err.msg()}; could not resolve address ${address} in dial_tcp')
}
// Keep track of dialing errors that take place
@@ -32,7 +32,7 @@ pub fn dial_tcp(address string) !&TcpConn {
// Very simple dialer
for addr in addrs {
mut s := new_tcp_socket(addr.family()) or {
- return error('$err.msg(); could not create new tcp socket in dial_tcp')
+ return error('${err.msg()}; could not create new tcp socket in dial_tcp')
}
s.connect(addr) or {
errs << err
@@ -51,12 +51,12 @@ pub fn dial_tcp(address string) !&TcpConn {
// Once we've failed now try and explain why we failed to connect
// to any of these addresses
mut err_builder := strings.new_builder(1024)
- err_builder.write_string('dial_tcp failed for address $address\n')
+ err_builder.write_string('dial_tcp failed for address ${address}\n')
err_builder.write_string('tried addrs:\n')
for i := 0; i < errs.len; i++ {
addr := addrs[i]
why := errs[i]
- err_builder.write_string('\t$addr: $why\n')
+ err_builder.write_string('\t${addr}: ${why}\n')
}
// failed
@@ -66,13 +66,13 @@ pub fn dial_tcp(address string) !&TcpConn {
// bind local address and dial.
pub fn dial_tcp_with_bind(saddr string, laddr string) !&TcpConn {
addrs := resolve_addrs_fuzzy(saddr, .tcp) or {
- return error('$err.msg(); could not resolve address $saddr in dial_tcp_with_bind')
+ return error('${err.msg()}; could not resolve address ${saddr} in dial_tcp_with_bind')
}
// Very simple dialer
for addr in addrs {
mut s := new_tcp_socket(addr.family()) or {
- return error('$err.msg(); could not create new tcp socket in dial_tcp_with_bind')
+ return error('${err.msg()}; could not create new tcp socket in dial_tcp_with_bind')
}
s.bind(laddr) or {
s.close() or { continue }
@@ -91,7 +91,7 @@ pub fn dial_tcp_with_bind(saddr string, laddr string) !&TcpConn {
}
}
// failed
- return error('dial_tcp_with_bind failed for address $saddr')
+ return error('dial_tcp_with_bind failed for address ${saddr}')
}
pub fn (mut c TcpConn) close() ! {
@@ -104,7 +104,7 @@ pub fn (mut c TcpConn) close() ! {
pub fn (c TcpConn) read_ptr(buf_ptr &u8, len int) !int {
mut res := wrap_read_result(C.recv(c.sock.handle, voidptr(buf_ptr), len, 0))!
$if trace_tcp ? {
- eprintln('<<< TcpConn.read_ptr | c.sock.handle: $c.sock.handle | buf_ptr: ${ptr_str(buf_ptr)} len: $len | res: $res')
+ eprintln('<<< TcpConn.read_ptr | c.sock.handle: ${c.sock.handle} | buf_ptr: ${ptr_str(buf_ptr)} len: ${len} | res: ${res}')
}
if res > 0 {
$if trace_tcp_data_read ? {
@@ -118,7 +118,7 @@ pub fn (c TcpConn) read_ptr(buf_ptr &u8, len int) !int {
c.wait_for_read()!
res = wrap_read_result(C.recv(c.sock.handle, voidptr(buf_ptr), len, 0))!
$if trace_tcp ? {
- eprintln('<<< TcpConn.read_ptr | c.sock.handle: $c.sock.handle | buf_ptr: ${ptr_str(buf_ptr)} len: $len | res: $res')
+ eprintln('<<< TcpConn.read_ptr | c.sock.handle: ${c.sock.handle} | buf_ptr: ${ptr_str(buf_ptr)} len: ${len} | res: ${res}')
}
$if trace_tcp_data_read ? {
if res > 0 {
@@ -153,7 +153,7 @@ pub fn (mut c TcpConn) read_deadline() !time.Time {
pub fn (mut c TcpConn) write_ptr(b &u8, len int) !int {
$if trace_tcp ? {
eprintln(
- '>>> TcpConn.write_ptr | c.sock.handle: $c.sock.handle | b: ${ptr_str(b)} len: $len |\n' +
+ '>>> TcpConn.write_ptr | c.sock.handle: ${c.sock.handle} | b: ${ptr_str(b)} len: ${len} |\n' +
unsafe { b.vstring_with_len(len) })
}
$if trace_tcp_data_write ? {
@@ -257,7 +257,7 @@ pub fn (c &TcpConn) addr() !Addr {
pub fn (c TcpConn) str() string {
s := c.sock.str().replace('\n', ' ').replace(' ', ' ')
- return 'TcpConn{ write_deadline: $c.write_deadline, read_deadline: $c.read_deadline, read_timeout: $c.read_timeout, write_timeout: $c.write_timeout, sock: $s }'
+ return 'TcpConn{ write_deadline: ${c.write_deadline}, read_deadline: ${c.read_deadline}, read_timeout: ${c.read_timeout}, write_timeout: ${c.write_timeout}, sock: ${s} }'
}
pub struct TcpListener {
@@ -269,10 +269,10 @@ mut:
}
pub fn listen_tcp(family AddrFamily, saddr string) !&TcpListener {
- s := new_tcp_socket(family) or { return error('$err.msg(); could not create new socket') }
+ s := new_tcp_socket(family) or { return error('${err.msg()}; could not create new socket') }
addrs := resolve_addrs(saddr, family, .tcp) or {
- return error('$err.msg(); could not resolve address $saddr')
+ return error('${err.msg()}; could not resolve address ${saddr}')
}
// TODO(logic to pick here)
@@ -280,8 +280,8 @@ pub fn listen_tcp(family AddrFamily, saddr string) !&TcpListener {
// cast to the correct type
alen := addr.len()
- socket_error_message(C.bind(s.handle, voidptr(&addr), alen), 'binding to $saddr failed')!
- socket_error_message(C.listen(s.handle, 128), 'listening on $saddr failed')!
+ socket_error_message(C.bind(s.handle, voidptr(&addr), alen), 'binding to ${saddr} failed')!
+ socket_error_message(C.listen(s.handle, 128), 'listening on ${saddr} failed')!
return &TcpListener{
sock: s
accept_deadline: no_deadline
@@ -434,7 +434,7 @@ pub fn (mut s TcpSocket) set_option_int(opt SocketOption, value int) ! {
// bind a local rddress for TcpSocket
pub fn (mut s TcpSocket) bind(addr string) ! {
addrs := resolve_addrs(addr, AddrFamily.ip, .tcp) or {
- return error('$err.msg(); could not resolve address $addr')
+ return error('${err.msg()}; could not resolve address ${addr}')
}
// TODO(logic to pick here)
@@ -442,7 +442,7 @@ pub fn (mut s TcpSocket) bind(addr string) ! {
// cast to the correct type
alen := a.len()
- socket_error_message(C.bind(s.handle, voidptr(&a), alen), 'binding to $addr failed') or {
+ socket_error_message(C.bind(s.handle, voidptr(&a), alen), 'binding to ${addr} failed') or {
return err
}
}
diff --git a/vlib/net/tcp_self_dial_from_many_clients_test.v b/vlib/net/tcp_self_dial_from_many_clients_test.v
index 43cb5f0f96..40109e5393 100644
--- a/vlib/net/tcp_self_dial_from_many_clients_test.v
+++ b/vlib/net/tcp_self_dial_from_many_clients_test.v
@@ -23,7 +23,7 @@ mut:
}
fn elog(msg string) {
- eprintln('$time.now().format_ss_micro() | $msg')
+ eprintln('${time.now().format_ss_micro()} | ${msg}')
}
fn receive_data(mut con net.TcpConn, shared ctx Context) {
@@ -52,16 +52,16 @@ fn receive_data(mut con net.TcpConn, shared ctx Context) {
fn start_server(schannel chan int, shared ctx Context) {
elog('server: start_server')
- mut tcp_listener := net.listen_tcp(net.AddrFamily.ip, ':$xport') or {
- elog('server: start server error $err')
+ mut tcp_listener := net.listen_tcp(net.AddrFamily.ip, ':${xport}') or {
+ elog('server: start server error ${err}')
return
}
- elog('server: server started listening at port :$xport')
+ elog('server: server started listening at port :${xport}')
schannel <- 0
for {
mut tcp_con := tcp_listener.accept() or {
- elog('server: accept error: $err')
+ elog('server: accept error: ${err}')
lock ctx {
ctx.fail_server_accepts++
}
@@ -71,15 +71,15 @@ fn start_server(schannel chan int, shared ctx Context) {
lock ctx {
ctx.ok_server_accepts++
}
- elog('server: new tcp connection con.sock.handle: $tcp_con.sock.handle')
+ elog('server: new tcp connection con.sock.handle: ${tcp_con.sock.handle}')
continue
}
}
fn start_client(i int, shared ctx Context) {
- elog('client [$i]: start')
- mut tcp_con := net.dial_tcp('127.0.0.1:$xport') or {
- elog('client [$i]: net.dial_tcp err $err')
+ elog('client [${i}]: start')
+ mut tcp_con := net.dial_tcp('127.0.0.1:${xport}') or {
+ elog('client [${i}]: net.dial_tcp err ${err}')
lock ctx {
ctx.fail_client_dials++
}
@@ -88,12 +88,12 @@ fn start_client(i int, shared ctx Context) {
lock ctx {
ctx.ok_client_dials++
}
- elog('client [$i]: conn is connected, con.sock.handle: $tcp_con.sock.handle')
- tcp_con.write([u8(i)]) or { elog('client [$i]: write failed, err: $err') }
+ elog('client [${i}]: conn is connected, con.sock.handle: ${tcp_con.sock.handle}')
+ tcp_con.write([u8(i)]) or { elog('client [${i}]: write failed, err: ${err}') }
time.sleep(1 * time.second)
- elog('client [$i]: closing connection...')
+ elog('client [${i}]: closing connection...')
tcp_con.close() or {
- elog('client [$i]: close failed, err: $err')
+ elog('client [${i}]: close failed, err: ${err}')
lock ctx {
ctx.fail_client_close++
}
@@ -114,7 +114,7 @@ fn test_tcp_self_dialing() {
elog('>>> server was started: ${svalue}. Starting clients:')
for i := int(0); i < 20; i++ {
spawn start_client(i, shared ctx)
- elog('>>> started client $i')
+ elog('>>> started client ${i}')
// time.sleep(2 * time.millisecond)
}
max_dt := 5 * time.second
@@ -122,12 +122,12 @@ fn test_tcp_self_dialing() {
t := time.now()
dt := t - start_time
if dt > max_dt {
- elog('>>> exiting after $dt.milliseconds() ms ...')
+ elog('>>> exiting after ${dt.milliseconds()} ms ...')
lock ctx {
// TODO: fix `dump(ctx)`, when `shared ctx := Type{}`
final_value_for_ctx := ctx // make a value copy as a temporary workaround. TODO: remove when dump(ctx) works.
dump(final_value_for_ctx)
- assert ctx.fail_client_dials < 2, 'allowed failed client dials, from $ctx.ok_server_accepts connections'
+ assert ctx.fail_client_dials < 2, 'allowed failed client dials, from ${ctx.ok_server_accepts} connections'
assert ctx.received.len > ctx.ok_server_accepts / 2, 'at least half the clients sent some data, that was later received by the server'
}
elog('>>> goodbye')
diff --git a/vlib/net/tcp_simple_client_server_test.v b/vlib/net/tcp_simple_client_server_test.v
index 2125ee1fc5..a19ed36026 100644
--- a/vlib/net/tcp_simple_client_server_test.v
+++ b/vlib/net/tcp_simple_client_server_test.v
@@ -15,12 +15,12 @@ fn setup() (&net.TcpListener, &net.TcpConn, &net.TcpConn) {
c := chan &net.TcpConn{}
spawn accept(mut server, c)
- mut client := net.dial_tcp('localhost$server_port') or { panic(err) }
+ mut client := net.dial_tcp('localhost${server_port}') or { panic(err) }
socket := <-c
$if debug_peer_ip ? {
- eprintln('$server.addr()\n$client.peer_addr(), $client.addr()\n$socket.peer_addr(), $socket.addr()')
+ eprintln('${server.addr()}\n${client.peer_addr()}, ${client.addr()}\n${socket.peer_addr()}, ${socket.addr()}')
}
assert true
return server, client, socket
@@ -44,10 +44,10 @@ fn test_socket() {
}
assert true
$if debug {
- println('message send: $message')
+ println('message send: ${message}')
}
$if debug {
- println('send socket: $socket.sock.handle')
+ println('send socket: ${socket.sock.handle}')
}
mut buf := []u8{len: 1024}
nbytes := client.read(mut buf) or {
@@ -56,10 +56,10 @@ fn test_socket() {
}
received := buf[0..nbytes].bytestr()
$if debug {
- println('message received: $received')
+ println('message received: ${received}')
}
$if debug {
- println('client: $client.sock.handle')
+ println('client: ${client.sock.handle}')
}
assert message == received
}
@@ -87,7 +87,7 @@ fn test_socket_read_line() {
cleanup(mut server, mut client, mut socket)
}
message1, message2 := 'message1', 'message2'
- message := '$message1\n$message2\n'
+ message := '${message1}\n${message2}\n'
socket.write_string(message) or { assert false }
assert true
//
@@ -124,7 +124,7 @@ fn test_socket_write_fail_without_panic() {
// TODO: fix segfaulting on Solaris and FreeBSD
for i := 0; i < 3; i++ {
socket.write_string(message2) or {
- println('write to a socket without a recipient should produce an option fail: $err | $message2')
+ println('write to a socket without a recipient should produce an option fail: ${err} | ${message2}')
assert true
}
}
diff --git a/vlib/net/tcp_test.v b/vlib/net/tcp_test.v
index 2c6c9e2f5d..044fb00116 100644
--- a/vlib/net/tcp_test.v
+++ b/vlib/net/tcp_test.v
@@ -25,7 +25,7 @@ fn one_shot_echo_server(mut l net.TcpListener, ch_started chan int) ? {
eprintln('> one_shot_echo_server')
ch_started <- 1
mut new_conn := l.accept() or { return error('could not accept') }
- eprintln(' > new_conn: $new_conn')
+ eprintln(' > new_conn: ${new_conn}')
handle_conn(mut new_conn)
new_conn.close() or {}
}
@@ -47,13 +47,13 @@ fn echo(address string) ? {
for i := 0; i < read; i++ {
assert buf[i] == data[i]
}
- println('Got "$buf.bytestr()"')
+ println('Got "${buf.bytestr()}"')
}
fn test_tcp_ip6() {
eprintln('\n>>> ${@FN}')
- address := 'localhost:$test_port'
- mut l := net.listen_tcp(.ip6, ':$test_port') or { panic(err) }
+ address := 'localhost:${test_port}'
+ mut l := net.listen_tcp(.ip6, ':${test_port}') or { panic(err) }
dump(l)
start_echo_server(mut l)
echo(address) or { panic(err) }
@@ -70,7 +70,7 @@ fn start_echo_server(mut l net.TcpListener) {
fn test_tcp_ip() {
eprintln('\n>>> ${@FN}')
- address := 'localhost:$test_port'
+ address := 'localhost:${test_port}'
mut l := net.listen_tcp(.ip, address) or { panic(err) }
dump(l)
start_echo_server(mut l)
@@ -86,7 +86,7 @@ fn test_tcp_unix() {
$if !windows {
address := os.real_path('tcp-test.sock')
// address := 'tcp-test.sock'
- println('$address')
+ println('${address}')
mut l := net.listen_tcp(.unix, address) or { panic(err) }
start_echo_server(mut l)
diff --git a/vlib/net/udp_test.v b/vlib/net/udp_test.v
index a86ce5ddb0..4bc7220eac 100644
--- a/vlib/net/udp_test.v
+++ b/vlib/net/udp_test.v
@@ -4,10 +4,10 @@ import time
fn echo_server(mut c net.UdpConn) {
mut count := 0
for {
- eprintln('> echo_server loop count: $count')
+ eprintln('> echo_server loop count: ${count}')
mut buf := []u8{len: 100}
read, addr := c.read(mut buf) or { continue }
- eprintln('Server got addr $addr, read: $read | buf: $buf')
+ eprintln('Server got addr ${addr}, read: ${read} | buf: ${buf}')
c.write_to(addr, buf[..read]) or {
println('Server: connection dropped')
return
@@ -25,19 +25,19 @@ fn echo_server(mut c net.UdpConn) {
const server_addr = '127.0.0.1:40003'
fn echo() ! {
- mut c := net.dial_udp(server_addr) or { panic('could not net.dial_udp: $err') }
+ mut c := net.dial_udp(server_addr) or { panic('could not net.dial_udp: ${err}') }
defer {
c.close() or {}
}
data := 'Hello from vlib/net!'
- c.write_string(data) or { panic('could not write_string: $err') }
+ c.write_string(data) or { panic('could not write_string: ${err}') }
mut buf := []u8{len: 100, init: 0}
- read, addr := c.read(mut buf) or { panic('could not read: $err') }
+ read, addr := c.read(mut buf) or { panic('could not read: ${err}') }
assert read == data.len
- println('Got address $addr')
+ println('Got address ${addr}')
// Can't test this here because loopback addresses
// are mapped to other addresses
// assert addr.str() == '127.0.0.1:30001'
@@ -46,16 +46,16 @@ fn echo() ! {
assert buf[i] == data[i]
}
- println('Got "$buf.bytestr()"')
+ println('Got "${buf.bytestr()}"')
c.close()!
}
fn test_udp() {
- mut l := net.listen_udp(server_addr) or { panic('could not listen_udp: $err') }
+ mut l := net.listen_udp(server_addr) or { panic('could not listen_udp: ${err}') }
spawn echo_server(mut l)
- echo() or { panic('could not echo: $err') }
+ echo() or { panic('could not echo: ${err}') }
l.close() or {}
}
diff --git a/vlib/net/unix/stream_nix.v b/vlib/net/unix/stream_nix.v
index d13b0c7bd7..f924332d5c 100644
--- a/vlib/net/unix/stream_nix.v
+++ b/vlib/net/unix/stream_nix.v
@@ -173,7 +173,7 @@ pub fn (mut c StreamConn) close() ! {
pub fn (mut c StreamConn) write_ptr(b &u8, len int) !int {
$if trace_unix ? {
eprintln(
- '>>> StreamConn.write_ptr | c.sock.handle: $c.sock.handle | b: ${ptr_str(b)} len: $len |\n' +
+ '>>> StreamConn.write_ptr | c.sock.handle: ${c.sock.handle} | b: ${ptr_str(b)} len: ${len} |\n' +
unsafe { b.vstring_with_len(len) })
}
unsafe {
@@ -211,7 +211,7 @@ pub fn (mut c StreamConn) write_string(s string) !int {
pub fn (mut c StreamConn) read_ptr(buf_ptr &u8, len int) !int {
mut res := wrap_read_result(C.recv(c.sock.handle, voidptr(buf_ptr), len, 0))!
$if trace_unix ? {
- eprintln('<<< StreamConn.read_ptr | c.sock.handle: $c.sock.handle | buf_ptr: ${ptr_str(buf_ptr)} len: $len | res: $res')
+ eprintln('<<< StreamConn.read_ptr | c.sock.handle: ${c.sock.handle} | buf_ptr: ${ptr_str(buf_ptr)} len: ${len} | res: ${res}')
}
if res > 0 {
return res
@@ -221,7 +221,7 @@ pub fn (mut c StreamConn) read_ptr(buf_ptr &u8, len int) !int {
c.wait_for_read()!
res = wrap_read_result(C.recv(c.sock.handle, voidptr(buf_ptr), len, 0))!
$if trace_unix ? {
- eprintln('<<< StreamConn.read_ptr | c.sock.handle: $c.sock.handle | buf_ptr: ${ptr_str(buf_ptr)} len: $len | res: $res')
+ eprintln('<<< StreamConn.read_ptr | c.sock.handle: ${c.sock.handle} | buf_ptr: ${ptr_str(buf_ptr)} len: ${len} | res: ${res}')
}
return net.socket_error(res)
} else {
@@ -284,5 +284,5 @@ pub fn (mut c StreamConn) wait_for_write() ! {
pub fn (c StreamConn) str() string {
s := c.sock.str().replace('\n', ' ').replace(' ', ' ')
- return 'StreamConn{ write_deadline: $c.write_deadline, read_deadline: $c.read_deadline, read_timeout: $c.read_timeout, write_timeout: $c.write_timeout, sock: $s }'
+ return 'StreamConn{ write_deadline: ${c.write_deadline}, read_deadline: ${c.read_deadline}, read_timeout: ${c.read_timeout}, write_timeout: ${c.write_timeout}, sock: ${s} }'
}
diff --git a/vlib/net/unix/unix_test.v b/vlib/net/unix/unix_test.v
index 7526a1469c..f41247b7c7 100644
--- a/vlib/net/unix/unix_test.v
+++ b/vlib/net/unix/unix_test.v
@@ -47,7 +47,7 @@ fn echo() ! {
for i := 0; i < read; i++ {
assert buf[i] == data[i]
}
- println('Got "$buf.bytestr()"')
+ println('Got "${buf.bytestr()}"')
return
}
diff --git a/vlib/net/unix/use_net_and_net_unix_together_test.v b/vlib/net/unix/use_net_and_net_unix_together_test.v
index 358ae799b9..2a835a62e2 100644
--- a/vlib/net/unix/use_net_and_net_unix_together_test.v
+++ b/vlib/net/unix/use_net_and_net_unix_together_test.v
@@ -45,7 +45,7 @@ fn handle_conn(mut c unix.StreamConn) ! {
for {
mut buf := []u8{len: 100, init: 0}
read := c.read(mut buf) or { return perror('Server: connection dropped') }
- eprintln('> server read ${read:3}, buf: |$buf.bytestr()|')
+ eprintln('> server read ${read:3}, buf: |${buf.bytestr()}|')
c.write(buf[..read]) or { return perror('Server: connection dropped') }
}
}
diff --git a/vlib/net/urllib/urllib.v b/vlib/net/urllib/urllib.v
index 1b322a24be..46eb58267e 100644
--- a/vlib/net/urllib/urllib.v
+++ b/vlib/net/urllib/urllib.v
@@ -26,9 +26,9 @@ const (
)
fn error_msg(message string, val string) string {
- mut msg := 'net.urllib.$message'
+ mut msg := 'net.urllib.${message}'
if val != '' {
- msg = '$msg ($val)'
+ msg = '${msg} (${val})'
}
return msg
}
@@ -211,7 +211,7 @@ fn unescape(s_ string, mode EncodingMode) !string {
}
}
if n == 0 && !has_plus {
- return '$s' // TODO `return s` once an autofree bug is fixed
+ return '${s}' // TODO `return s` once an autofree bug is fixed
}
if s.len < 2 * n {
return error(error_msg('unescape: invalid escape sequence', ''))
@@ -331,7 +331,7 @@ pub mut:
// debug returns a string representation of *ALL* the fields of the given URL
pub fn (url &URL) debug() string {
- return 'URL{\n scheme: $url.scheme\n opaque: $url.opaque\n user: $url.user\n host: $url.host\n path: $url.path\n raw_path: $url.raw_path\n force_query: $url.force_query\n raw_query: $url.raw_query\n fragment: $url.fragment\n}'
+ return 'URL{\n scheme: ${url.scheme}\n opaque: ${url.opaque}\n user: ${url.user}\n host: ${url.host}\n path: ${url.path}\n raw_path: ${url.raw_path}\n force_query: ${url.force_query}\n raw_query: ${url.raw_query}\n fragment: ${url.fragment}\n}'
}
// user returns a Userinfo containing the provided username
@@ -584,7 +584,7 @@ fn parse_host(host string) !string {
}
mut colon_port := host[i + 1..]
if !valid_optional_port(colon_port) {
- return error(error_msg('parse_host: invalid port $colon_port after host ',
+ return error(error_msg('parse_host: invalid port ${colon_port} after host ',
''))
}
// RFC 6874 defines that %25 (%-encoded percent) introduces
@@ -602,7 +602,7 @@ fn parse_host(host string) !string {
} else if i := host.last_index(':') {
colon_port := host[i..]
if !valid_optional_port(colon_port) {
- return error(error_msg('parse_host: invalid port $colon_port after host ',
+ return error(error_msg('parse_host: invalid port ${colon_port} after host ',
''))
}
}
diff --git a/vlib/net/websocket/events.v b/vlib/net/websocket/events.v
index 6f04809750..688298dc67 100644
--- a/vlib/net/websocket/events.v
+++ b/vlib/net/websocket/events.v
@@ -171,10 +171,12 @@ fn (mut ws Client) send_message_event(msg &Message) {
ws.debug_log('sending on_message event')
for ev_handler in ws.message_callbacks {
if !ev_handler.is_ref {
- ev_handler.handler(mut ws, msg) or { ws.logger.error('send_message_event error: $err') }
+ ev_handler.handler(mut ws, msg) or {
+ ws.logger.error('send_message_event error: ${err}')
+ }
} else {
ev_handler.handler2(mut ws, msg, ev_handler.ref) or {
- ws.logger.error('send_message_event error: $err')
+ ws.logger.error('send_message_event error: ${err}')
}
}
}
@@ -186,11 +188,11 @@ fn (mut ws Client) send_error_event(error string) {
for ev_handler in ws.error_callbacks {
if !ev_handler.is_ref {
ev_handler.handler(mut ws, error) or {
- ws.logger.error('send_error_event error: $error, err: $err')
+ ws.logger.error('send_error_event error: ${error}, err: ${err}')
}
} else {
ev_handler.handler2(mut ws, error, ev_handler.ref) or {
- ws.logger.error('send_error_event error: $error, err: $err')
+ ws.logger.error('send_error_event error: ${error}, err: ${err}')
}
}
}
@@ -202,11 +204,11 @@ fn (mut ws Client) send_close_event(code int, reason string) {
for ev_handler in ws.close_callbacks {
if !ev_handler.is_ref {
ev_handler.handler(mut ws, code, reason) or {
- ws.logger.error('send_close_event error: $err')
+ ws.logger.error('send_close_event error: ${err}')
}
} else {
ev_handler.handler2(mut ws, code, reason, ev_handler.ref) or {
- ws.logger.error('send_close_event error: $err')
+ ws.logger.error('send_close_event error: ${err}')
}
}
}
@@ -217,10 +219,10 @@ fn (mut ws Client) send_open_event() {
ws.debug_log('sending on_open event')
for ev_handler in ws.open_callbacks {
if !ev_handler.is_ref {
- ev_handler.handler(mut ws) or { ws.logger.error('send_open_event error: $err') }
+ ev_handler.handler(mut ws) or { ws.logger.error('send_open_event error: ${err}') }
} else {
ev_handler.handler2(mut ws, ev_handler.ref) or {
- ws.logger.error('send_open_event error: $err')
+ ws.logger.error('send_open_event error: ${err}')
}
}
}
diff --git a/vlib/net/websocket/handshake.v b/vlib/net/websocket/handshake.v
index 82d05b731d..8734b8b4ab 100644
--- a/vlib/net/websocket/handshake.v
+++ b/vlib/net/websocket/handshake.v
@@ -25,7 +25,7 @@ fn (mut ws Client) handshake() ! {
sb.write_string('\r\nSec-WebSocket-Version: 13')
for key in ws.header.keys() {
val := ws.header.custom_values(key).join(',')
- sb.write_string('\r\n$key:$val')
+ sb.write_string('\r\n${key}:${val}')
}
sb.write_string('\r\n\r\n')
handshake := sb.str()
@@ -33,7 +33,7 @@ fn (mut ws Client) handshake() ! {
unsafe { handshake.free() }
}
handshake_bytes := handshake.bytes()
- ws.debug_log('sending handshake: $handshake')
+ ws.debug_log('sending handshake: ${handshake}')
ws.socket_write(handshake_bytes)!
ws.read_handshake(seckey)!
unsafe { handshake_bytes.free() }
@@ -49,18 +49,18 @@ fn (mut s Server) handle_server_handshake(mut c Client) !(string, &ServerClient)
// parse_client_handshake parses result from handshake process
fn (mut s Server) parse_client_handshake(client_handshake string, mut c Client) !(string, &ServerClient) {
- s.logger.debug('server-> client handshake:\n$client_handshake')
+ s.logger.debug('server-> client handshake:\n${client_handshake}')
lines := client_handshake.split_into_lines()
get_tokens := lines[0].split(' ')
if get_tokens.len < 3 {
- return error_with_code('unexpected get operation, $get_tokens', 1)
+ return error_with_code('unexpected get operation, ${get_tokens}', 1)
}
if get_tokens[0].trim_space() != 'GET' {
return error_with_code("unexpected request '${get_tokens[0]}', expected 'GET'",
2)
}
if get_tokens[2].trim_space() != 'HTTP/1.1' {
- return error_with_code("unexpected request $get_tokens, expected 'HTTP/1.1'",
+ return error_with_code("unexpected request ${get_tokens}, expected 'HTTP/1.1'",
3)
}
mut seckey := ''
@@ -80,9 +80,9 @@ fn (mut s Server) parse_client_handshake(client_handshake string, mut c Client)
}
'Sec-WebSocket-Key', 'sec-websocket-key' {
key = keys[1].trim_space()
- s.logger.debug('server-> got key: $key')
+ s.logger.debug('server-> got key: ${key}')
seckey = create_key_challenge_response(key)!
- s.logger.debug('server-> challenge: $seckey, response: ${keys[1]}')
+ s.logger.debug('server-> challenge: ${seckey}, response: ${keys[1]}')
flags << .has_accept
}
else {
@@ -92,9 +92,9 @@ fn (mut s Server) parse_client_handshake(client_handshake string, mut c Client)
unsafe { keys.free() }
}
if flags.len < 3 {
- return error_with_code('invalid client handshake, $client_handshake', 4)
+ return error_with_code('invalid client handshake, ${client_handshake}', 4)
}
- server_handshake := 'HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: $seckey\r\n\r\n'
+ server_handshake := 'HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: ${seckey}\r\n\r\n'
server_client := &ServerClient{
resource_name: get_tokens[1]
client_key: key
@@ -143,11 +143,11 @@ fn (mut ws Client) read_handshake(seckey string) ! {
// check_handshake_response checks the response from handshake and returns
// the response and secure key provided by the websocket client
fn (mut ws Client) check_handshake_response(handshake_response string, seckey string) ! {
- ws.debug_log('handshake response:\n$handshake_response')
+ ws.debug_log('handshake response:\n${handshake_response}')
lines := handshake_response.split_into_lines()
header := lines[0]
if !header.starts_with('HTTP/1.1 101') && !header.starts_with('HTTP/1.0 101') {
- return error_with_code('handshake_handler: invalid HTTP status response code, $header',
+ return error_with_code('handshake_handler: invalid HTTP status response code, ${header}',
6)
}
for i in 1 .. lines.len {
@@ -163,9 +163,9 @@ fn (mut ws Client) check_handshake_response(handshake_response string, seckey st
ws.flags << .has_connection
}
'Sec-WebSocket-Accept', 'sec-websocket-accept' {
- ws.debug_log('seckey: $seckey')
+ ws.debug_log('seckey: ${seckey}')
challenge := create_key_challenge_response(seckey)!
- ws.debug_log('challenge: $challenge, response: ${keys[1]}')
+ ws.debug_log('challenge: ${challenge}, response: ${keys[1]}')
if keys[1].trim_space() != challenge {
return error_with_code('handshake_handler: Sec-WebSocket-Accept header does not match computed sha1/base64 response.',
7)
diff --git a/vlib/net/websocket/io.v b/vlib/net/websocket/io.v
index ac2b56ae7f..6d456aa8ff 100644
--- a/vlib/net/websocket/io.v
+++ b/vlib/net/websocket/io.v
@@ -72,7 +72,7 @@ fn (mut ws Client) shutdown_socket() ! {
// dial_socket connects tcp socket and initializes default configurations
fn (mut ws Client) dial_socket() !&net.TcpConn {
- tcp_address := '$ws.uri.hostname:$ws.uri.port'
+ tcp_address := '${ws.uri.hostname}:${ws.uri.port}'
mut t := net.dial_tcp(tcp_address)!
optval := int(1)
t.sock.set_option_int(.keep_alive, optval)!
diff --git a/vlib/net/websocket/message.v b/vlib/net/websocket/message.v
index 2f90f48bee..6ba9aec70a 100644
--- a/vlib/net/websocket/message.v
+++ b/vlib/net/websocket/message.v
@@ -64,7 +64,7 @@ pub fn (mut ws Client) validate_frame(frame &Frame) ! {
}
}
if frame.fin == false && ws.fragments.len == 0 && frame.opcode == .continuation {
- err_msg := 'unexecpected continuation, there are no frames to continue, $frame'
+ err_msg := 'unexecpected continuation, there are no frames to continue, ${frame}'
ws.close(1002, err_msg)!
return error(err_msg)
}
@@ -111,7 +111,7 @@ fn (mut ws Client) read_payload(frame &Frame) ![]u8 {
// - Future implementation needs to support fail fast utf errors for strict autobahn conformance
fn (mut ws Client) validate_utf_8(opcode OPCode, payload []u8) ! {
if opcode in [.text_frame, .close] && !utf8.validate(payload.data, payload.len) {
- ws.logger.error('malformed utf8 payload, payload len: ($payload.len)')
+ ws.logger.error('malformed utf8 payload, payload len: (${payload.len})')
ws.send_error_event('Recieved malformed utf8.')
ws.close(1007, 'malformed utf8 payload')!
return error('malformed utf8 payload')
@@ -146,8 +146,8 @@ pub fn (mut ws Client) read_next_message() !Message {
}
if ws.fragments.len == 0 {
ws.validate_utf_8(frame.opcode, frame_payload) or {
- ws.logger.error('UTF8 validation error: $err, len of payload($frame_payload.len)')
- ws.send_error_event('UTF8 validation error: $err, len of payload($frame_payload.len)')
+ ws.logger.error('UTF8 validation error: ${err}, len of payload(${frame_payload.len})')
+ ws.send_error_event('UTF8 validation error: ${err}, len of payload(${frame_payload.len})')
return err
}
msg := Message{
diff --git a/vlib/net/websocket/tests/autobahn/autobahn_client.v b/vlib/net/websocket/tests/autobahn/autobahn_client.v
index 739f42b812..352df94aa3 100644
--- a/vlib/net/websocket/tests/autobahn/autobahn_client.v
+++ b/vlib/net/websocket/tests/autobahn/autobahn_client.v
@@ -5,8 +5,8 @@ import net.websocket
fn main() {
for i in 1 .. 304 {
- println('\ncase: $i')
- handle_case(i) or { println('error should be ok: $err') }
+ println('\ncase: ${i}')
+ handle_case(i) or { println('error should be ok: ${err}') }
}
// update the reports
uri := 'ws://autobahn_server:9001/updateReports?agent=v-client'
@@ -16,7 +16,7 @@ fn main() {
}
fn handle_case(case_nr int) ! {
- uri := 'ws://autobahn_server:9001/runCase?case=$case_nr&agent=v-client'
+ uri := 'ws://autobahn_server:9001/runCase?case=${case_nr}&agent=v-client'
mut ws := websocket.new_client(uri)!
ws.on_message(on_message)
ws.connect()!
diff --git a/vlib/net/websocket/tests/autobahn/autobahn_client_wss.v b/vlib/net/websocket/tests/autobahn/autobahn_client_wss.v
index 887ea80bd3..4a03f83da4 100644
--- a/vlib/net/websocket/tests/autobahn/autobahn_client_wss.v
+++ b/vlib/net/websocket/tests/autobahn/autobahn_client_wss.v
@@ -5,8 +5,8 @@ import net.websocket
fn main() {
for i in 1 .. 304 {
- println('\ncase: $i')
- handle_case(i) or { println('error should be ok: $err') }
+ println('\ncase: ${i}')
+ handle_case(i) or { println('error should be ok: ${err}') }
}
// update the reports
// uri := 'wss://localhost:9002/updateReports?agent=v-client'
@@ -17,7 +17,7 @@ fn main() {
}
fn handle_case(case_nr int) ! {
- uri := 'wss://autobahn_server_wss:9002/runCase?case=$case_nr&agent=v-client'
+ uri := 'wss://autobahn_server_wss:9002/runCase?case=${case_nr}&agent=v-client'
// uri := 'wss://localhost:9002/runCase?case=$case_nr&agent=v-client'
mut ws := websocket.new_client(uri)!
ws.on_message(on_message)
diff --git a/vlib/net/websocket/tests/autobahn/autobahn_server.v b/vlib/net/websocket/tests/autobahn/autobahn_server.v
index 96cfc1669f..b9d6fa1247 100644
--- a/vlib/net/websocket/tests/autobahn/autobahn_server.v
+++ b/vlib/net/websocket/tests/autobahn/autobahn_server.v
@@ -10,7 +10,7 @@ fn main() {
}
fn handle_case(case_nr int) ! {
- uri := 'ws://localhost:9002/runCase?case=$case_nr&agent=v-client'
+ uri := 'ws://localhost:9002/runCase?case=${case_nr}&agent=v-client'
mut ws := websocket.new_client(uri)!
ws.on_message(on_message)
ws.connect()!
diff --git a/vlib/net/websocket/tests/autobahn/local_run/autobahn_client.v b/vlib/net/websocket/tests/autobahn/local_run/autobahn_client.v
index 5e78aa9512..f10eaee7cd 100644
--- a/vlib/net/websocket/tests/autobahn/local_run/autobahn_client.v
+++ b/vlib/net/websocket/tests/autobahn/local_run/autobahn_client.v
@@ -5,8 +5,8 @@ import net.websocket
fn main() {
for i in 1 .. 304 {
- println('\ncase: $i')
- handle_case(i) or { println('error should be ok: $err') }
+ println('\ncase: ${i}')
+ handle_case(i) or { println('error should be ok: ${err}') }
}
// update the reports
uri := 'ws://localhost:9001/updateReports?agent=v-client'
@@ -16,7 +16,7 @@ fn main() {
}
fn handle_case(case_nr int) ! {
- uri := 'ws://localhost:9001/runCase?case=$case_nr&agent=v-client'
+ uri := 'ws://localhost:9001/runCase?case=${case_nr}&agent=v-client'
mut ws := websocket.new_client(uri)!
ws.on_message(on_message)
ws.connect()!
diff --git a/vlib/net/websocket/tests/autobahn/local_run/autobahn_client_wss.v b/vlib/net/websocket/tests/autobahn/local_run/autobahn_client_wss.v
index 887ea80bd3..4a03f83da4 100644
--- a/vlib/net/websocket/tests/autobahn/local_run/autobahn_client_wss.v
+++ b/vlib/net/websocket/tests/autobahn/local_run/autobahn_client_wss.v
@@ -5,8 +5,8 @@ import net.websocket
fn main() {
for i in 1 .. 304 {
- println('\ncase: $i')
- handle_case(i) or { println('error should be ok: $err') }
+ println('\ncase: ${i}')
+ handle_case(i) or { println('error should be ok: ${err}') }
}
// update the reports
// uri := 'wss://localhost:9002/updateReports?agent=v-client'
@@ -17,7 +17,7 @@ fn main() {
}
fn handle_case(case_nr int) ! {
- uri := 'wss://autobahn_server_wss:9002/runCase?case=$case_nr&agent=v-client'
+ uri := 'wss://autobahn_server_wss:9002/runCase?case=${case_nr}&agent=v-client'
// uri := 'wss://localhost:9002/runCase?case=$case_nr&agent=v-client'
mut ws := websocket.new_client(uri)!
ws.on_message(on_message)
diff --git a/vlib/net/websocket/websocket_client.v b/vlib/net/websocket/websocket_client.v
index 6c01168592..d7cc1087ea 100644
--- a/vlib/net/websocket/websocket_client.v
+++ b/vlib/net/websocket/websocket_client.v
@@ -109,21 +109,21 @@ pub fn new_client(address string, opt ClientOpt) !&Client {
pub fn (mut ws Client) connect() ! {
ws.assert_not_connected()!
ws.set_state(.connecting)
- ws.logger.info('connecting to host $ws.uri')
+ ws.logger.info('connecting to host ${ws.uri}')
ws.conn = ws.dial_socket()!
ws.handshake()!
ws.set_state(.open)
- ws.logger.info('successfully connected to host $ws.uri')
+ ws.logger.info('successfully connected to host ${ws.uri}')
ws.send_open_event()
}
// listen listens and processes incoming messages
pub fn (mut ws Client) listen() ! {
- mut log := 'Starting client listener, server($ws.is_server)...'
+ mut log := 'Starting client listener, server(${ws.is_server})...'
ws.logger.info(log)
unsafe { log.free() }
defer {
- ws.logger.info('Quit client listener, server($ws.is_server)...')
+ ws.logger.info('Quit client listener, server(${ws.is_server})...')
if ws.state == .open {
ws.close(1000, 'closed by client') or {}
}
@@ -133,14 +133,14 @@ pub fn (mut ws Client) listen() ! {
if ws.state in [.closed, .closing] {
return
}
- ws.debug_log('failed to read next message: $err')
- ws.send_error_event('failed to read next message: $err')
+ ws.debug_log('failed to read next message: ${err}')
+ ws.send_error_event('failed to read next message: ${err}')
return err
}
if ws.state in [.closed, .closing] {
return
}
- ws.debug_log('got message: $msg.opcode')
+ ws.debug_log('got message: ${msg.opcode}')
match msg.opcode {
.text_frame {
log = 'read: text'
@@ -157,8 +157,8 @@ pub fn (mut ws Client) listen() ! {
.ping {
ws.debug_log('read: ping, sending pong')
ws.send_control_frame(.pong, 'PONG', msg.payload) or {
- ws.logger.error('error in message callback sending PONG: $err')
- ws.send_error_event('error in message callback sending PONG: $err')
+ ws.logger.error('error in message callback sending PONG: ${err}')
+ ws.send_error_event('error in message callback sending PONG: ${err}')
if ws.panic_on_callback {
panic(err)
}
@@ -190,8 +190,8 @@ pub fn (mut ws Client) listen() ! {
}
code := u16(msg.payload[0]) << 8 | u16(msg.payload[1])
if code in invalid_close_codes {
- ws.close(1002, 'invalid close code: $code')!
- return error('invalid close code: $code')
+ ws.close(1002, 'invalid close code: ${code}')!
+ return error('invalid close code: ${code}')
}
reason := if msg.payload.len > 2 { msg.payload[2..] } else { []u8{} }
if reason.len > 0 {
@@ -199,7 +199,7 @@ pub fn (mut ws Client) listen() ! {
}
if ws.state !in [.closing, .closed] {
// sending close back according to spec
- ws.debug_log('close with reason, code: $code, reason: $reason')
+ ws.debug_log('close with reason, code: ${code}, reason: ${reason}')
r := reason.bytestr()
ws.close(code, r)!
}
@@ -328,10 +328,10 @@ pub fn (mut ws Client) write_string(str string) !int {
// close closes the websocket connection
pub fn (mut ws Client) close(code int, message string) ! {
- ws.debug_log('sending close, $code, $message')
+ ws.debug_log('sending close, ${code}, ${message}')
if ws.state in [.closed, .closing] || ws.conn.sock.handle <= 1 {
- ws.debug_log('close: Websocket allready closed ($ws.state), $message, $code handle($ws.conn.sock.handle)')
- err_msg := 'Socket allready closed: $code'
+ ws.debug_log('close: Websocket allready closed (${ws.state}), ${message}, ${code} handle(${ws.conn.sock.handle})')
+ err_msg := 'Socket allready closed: ${code}'
return error(err_msg)
}
defer {
@@ -360,7 +360,7 @@ pub fn (mut ws Client) close(code int, message string) ! {
// send_control_frame sends a control frame to the server
fn (mut ws Client) send_control_frame(code OPCode, frame_typ string, payload []u8) ! {
- ws.debug_log('send control frame $code, frame_type: $frame_typ')
+ ws.debug_log('send control frame ${code}, frame_type: ${frame_typ}')
if ws.state !in [.open, .closing] && ws.conn.sock.handle > 1 {
return error('socket is not connected')
}
@@ -414,7 +414,7 @@ fn (mut ws Client) send_control_frame(code OPCode, frame_typ string, payload []u
}
}
ws.socket_write(control_frame) or {
- return error('send_control_frame: error sending $frame_typ control frame.')
+ return error('send_control_frame: error sending ${frame_typ} control frame.')
}
}
@@ -474,9 +474,9 @@ fn (mut ws Client) reset_state() ! {
// debug_log handles debug logging output for client and server
fn (mut ws Client) debug_log(text string) {
if ws.is_server {
- ws.logger.debug('server-> $text')
+ ws.logger.debug('server-> ${text}')
} else {
- ws.logger.debug('client-> $text')
+ ws.logger.debug('client-> ${text}')
}
}
diff --git a/vlib/net/websocket/websocket_server.v b/vlib/net/websocket/websocket_server.v
index 71da1b095c..e8fb444daf 100644
--- a/vlib/net/websocket/websocket_server.v
+++ b/vlib/net/websocket/websocket_server.v
@@ -61,15 +61,15 @@ pub fn (mut s Server) set_ping_interval(seconds int) {
// listen start listen and process to incoming connections from websocket clients
pub fn (mut s Server) listen() ! {
- s.logger.info('websocket server: start listen on port $s.port')
- s.ls = net.listen_tcp(s.family, ':$s.port')!
+ s.logger.info('websocket server: start listen on port ${s.port}')
+ s.ls = net.listen_tcp(s.family, ':${s.port}')!
s.set_state(.open)
spawn s.handle_ping()
for {
mut c := s.accept_new_client() or { continue }
spawn s.serve_client(mut c)
}
- s.logger.info('websocket server: end listen on port $s.port')
+ s.logger.info('websocket server: end listen on port ${s.port}')
}
// Close closes server (not implemented yet)
@@ -111,9 +111,9 @@ fn (mut s Server) handle_ping() {
// serve_client accepts incoming connection and sets up the callbacks
fn (mut s Server) serve_client(mut c Client) ! {
- c.logger.debug('server-> Start serve client ($c.id)')
+ c.logger.debug('server-> Start serve client (${c.id})')
defer {
- c.logger.debug('server-> End serve client ($c.id)')
+ c.logger.debug('server-> End serve client (${c.id})')
}
mut handshake_response, mut server_client := s.handle_server_handshake(mut c)!
accept := s.send_connect_event(mut server_client)!
diff --git a/vlib/net/websocket/websocket_test.v b/vlib/net/websocket/websocket_test.v
index 4cffba4397..b0392c9e83 100644
--- a/vlib/net/websocket/websocket_test.v
+++ b/vlib/net/websocket/websocket_test.v
@@ -26,11 +26,11 @@ fn test_ws_ipv6() {
return
}
port := 30000 + rand.intn(1024) or { 0 }
- eprintln('> port ipv6: $port')
+ eprintln('> port ipv6: ${port}')
spawn start_server(.ip6, port)
time.sleep(1500 * time.millisecond)
- ws_test(.ip6, 'ws://localhost:$port') or {
- eprintln('> error while connecting .ip6, err: $err')
+ ws_test(.ip6, 'ws://localhost:${port}') or {
+ eprintln('> error while connecting .ip6, err: ${err}')
assert false
}
}
@@ -41,11 +41,11 @@ fn test_ws_ipv4() {
return
}
port := 30000 + rand.intn(1024) or { 0 }
- eprintln('> port ipv4: $port')
+ eprintln('> port ipv4: ${port}')
spawn start_server(.ip, port)
time.sleep(1500 * time.millisecond)
- ws_test(.ip, 'ws://localhost:$port') or {
- eprintln('> error while connecting .ip, err: $err')
+ ws_test(.ip, 'ws://localhost:${port}') or {
+ eprintln('> error while connecting .ip, err: ${err}')
assert false
}
}
@@ -74,12 +74,12 @@ fn start_server(family net.AddrFamily, listen_port int) ! {
s.on_close(fn (mut ws websocket.Client, code int, reason string) ! {
// not used
})
- s.listen() or { panic('websocket server could not listen, err: $err') }
+ s.listen() or { panic('websocket server could not listen, err: ${err}') }
}
// ws_test tests connect to the websocket server from websocket client
fn ws_test(family net.AddrFamily, uri string) ! {
- eprintln('connecting to $uri ...')
+ eprintln('connecting to ${uri} ...')
mut test_results := WebsocketTestResults{}
mut ws := websocket.new_client(uri)!
@@ -88,13 +88,13 @@ fn ws_test(family net.AddrFamily, uri string) ! {
assert true
})
ws.on_error(fn (mut ws websocket.Client, err string) ! {
- println('error: $err')
+ println('error: ${err}')
// this can be thrown by internet connection problems
assert false
})
ws.on_message_ref(fn (mut ws websocket.Client, msg &websocket.Message, mut res WebsocketTestResults) ! {
- println('client got type: $msg.opcode payload:\n$msg.payload')
+ println('client got type: ${msg.opcode} payload:\n${msg.payload}')
if msg.opcode == .text_frame {
smessage := msg.payload.bytestr()
match smessage {
@@ -109,14 +109,14 @@ fn ws_test(family net.AddrFamily, uri string) ! {
}
}
} else {
- println('Binary message: $msg')
+ println('Binary message: ${msg}')
}
}, test_results)
- ws.connect() or { panic('fail to connect, err: $err') }
+ ws.connect() or { panic('fail to connect, err: ${err}') }
spawn ws.listen()
text := ['a'].repeat(2)
for msg in text {
- ws.write(msg.bytes(), .text_frame) or { panic('fail to write to websocket, err: $err') }
+ ws.write(msg.bytes(), .text_frame) or { panic('fail to write to websocket, err: ${err}') }
// sleep to give time to recieve response before send a new one
time.sleep(100 * time.millisecond)
}
diff --git a/vlib/orm/orm.v b/vlib/orm/orm.v
index 0b56d9e882..bcffc14d57 100644
--- a/vlib/orm/orm.v
+++ b/vlib/orm/orm.v
@@ -212,22 +212,22 @@ pub fn orm_stmt_gen(table string, q string, kind StmtKind, num bool, qm string,
}
data_data << data.data[i]
}
- select_fields << '$q${data.fields[i]}$q'
+ select_fields << '${q}${data.fields[i]}${q}'
values << factory_insert_qm_value(num, qm, c)
data_fields << data.fields[i]
c++
}
- str += 'INSERT INTO $q$table$q ('
+ str += 'INSERT INTO ${q}${table}${q} ('
str += select_fields.join(', ')
str += ') VALUES ('
str += values.join(', ')
str += ')'
}
.update {
- str += 'UPDATE $q$table$q SET '
+ str += 'UPDATE ${q}${table}${q} SET '
for i, field in data.fields {
- str += '$q$field$q = '
+ str += '${q}${field}${q} = '
if data.data.len > i {
d := data.data[i]
if d is InfixType {
@@ -245,15 +245,15 @@ pub fn orm_stmt_gen(table string, q string, kind StmtKind, num bool, qm string,
'/'
}
}
- str += '$d.name $op $qm'
+ str += '${d.name} ${op} ${qm}'
} else {
- str += '$qm'
+ str += '${qm}'
}
} else {
- str += '$qm'
+ str += '${qm}'
}
if num {
- str += '$c'
+ str += '${c}'
c++
}
if i < data.fields.len - 1 {
@@ -263,7 +263,7 @@ pub fn orm_stmt_gen(table string, q string, kind StmtKind, num bool, qm string,
str += ' WHERE '
}
.delete {
- str += 'DELETE FROM $q$table$q WHERE '
+ str += 'DELETE FROM ${q}${table}${q} WHERE '
}
}
if kind == .update || kind == .delete {
@@ -279,9 +279,9 @@ pub fn orm_stmt_gen(table string, q string, kind StmtKind, num bool, qm string,
if pre_par {
str += '('
}
- str += '$q$field$q ${where.kinds[i].to_str()} $qm'
+ str += '${q}${field}${q} ${where.kinds[i].to_str()} ${qm}'
if num {
- str += '$c'
+ str += '${c}'
c++
}
if post_par {
@@ -313,14 +313,14 @@ pub fn orm_select_gen(orm SelectConfig, q string, num bool, qm string, start_pos
str += 'COUNT(*)'
} else {
for i, field in orm.fields {
- str += '$q$field$q'
+ str += '${q}${field}${q}'
if i < orm.fields.len - 1 {
str += ', '
}
}
}
- str += ' FROM $q$orm.table$q'
+ str += ' FROM ${q}${orm.table}${q}'
mut c := start_pos
@@ -338,9 +338,9 @@ pub fn orm_select_gen(orm SelectConfig, q string, num bool, qm string, start_pos
if pre_par {
str += '('
}
- str += '$q$field$q ${where.kinds[i].to_str()} $qm'
+ str += '${q}${field}${q} ${where.kinds[i].to_str()} ${qm}'
if num {
- str += '$c'
+ str += '${c}'
c++
}
if post_par {
@@ -360,22 +360,22 @@ pub fn orm_select_gen(orm SelectConfig, q string, num bool, qm string, start_pos
// ordering is *slow*, especially if there are no indexes!
if orm.has_order {
str += ' ORDER BY '
- str += '$q$orm.order$q '
+ str += '${q}${orm.order}${q} '
str += orm.order_type.to_str()
}
if orm.has_limit {
- str += ' LIMIT $qm'
+ str += ' LIMIT ${qm}'
if num {
- str += '$c'
+ str += '${c}'
c++
}
}
if orm.has_offset {
- str += ' OFFSET $qm'
+ str += ' OFFSET ${qm}'
if num {
- str += '$c'
+ str += '${c}'
c++
}
}
@@ -393,10 +393,10 @@ pub fn orm_select_gen(orm SelectConfig, q string, num bool, qm string, start_pos
// sql_from_v - Function which maps type indices to sql type names
// alternative - Needed for msdb
pub fn orm_table_gen(table string, q string, defaults bool, def_unique_len int, fields []TableField, sql_from_v fn (int) !string, alternative bool) !string {
- mut str := 'CREATE TABLE IF NOT EXISTS $q$table$q ('
+ mut str := 'CREATE TABLE IF NOT EXISTS ${q}${table}${q} ('
if alternative {
- str = 'IF NOT EXISTS (SELECT * FROM sysobjects WHERE name=$q$table$q and xtype=${q}U$q) CREATE TABLE $q$table$q ('
+ str = 'IF NOT EXISTS (SELECT * FROM sysobjects WHERE name=${q}${table}${q} and xtype=${q}U${q}) CREATE TABLE ${q}${table}${q} ('
}
mut fs := []string{}
@@ -444,13 +444,13 @@ pub fn orm_table_gen(table string, q string, defaults bool, def_unique_len int,
}
'sql_type' {
if attr.kind != .string {
- return error("sql_type attribute need be string. Try [sql_type: '$attr.arg'] instead of [sql_type: $attr.arg]")
+ return error("sql_type attribute need be string. Try [sql_type: '${attr.arg}'] instead of [sql_type: ${attr.arg}]")
}
ctyp = attr.arg
}
'default' {
if attr.kind != .string {
- return error("default attribute need be string. Try [default: '$attr.arg'] instead of [default: $attr.arg]")
+ return error("default attribute need be string. Try [default: '${attr.arg}'] instead of [default: ${attr.arg}]")
}
if default_val == '' {
default_val = attr.arg
@@ -464,22 +464,22 @@ pub fn orm_table_gen(table string, q string, defaults bool, def_unique_len int,
}
mut stmt := ''
if ctyp == '' {
- return error('Unknown type ($field.typ) for field $field.name in struct $table')
+ return error('Unknown type (${field.typ}) for field ${field.name} in struct ${table}')
}
- stmt = '$q$field_name$q $ctyp'
+ stmt = '${q}${field_name}${q} ${ctyp}'
if defaults && default_val != '' {
- stmt += ' DEFAULT $default_val'
+ stmt += ' DEFAULT ${default_val}'
}
if no_null {
stmt += ' NOT NULL'
}
if is_unique {
- mut f := 'UNIQUE($q$field_name$q'
+ mut f := 'UNIQUE(${q}${field_name}${q}'
if ctyp == 'TEXT' && def_unique_len > 0 {
if unique_len > 0 {
- f += '($unique_len)'
+ f += '(${unique_len})'
} else {
- f += '($def_unique_len)'
+ f += '(${def_unique_len})'
}
}
f += ')'
@@ -488,18 +488,18 @@ pub fn orm_table_gen(table string, q string, defaults bool, def_unique_len int,
fs << stmt
}
if primary == '' {
- return error('A primary key is required for $table')
+ return error('A primary key is required for ${table}')
}
if unique.len > 0 {
for k, v in unique {
mut tmp := []string{}
for f in v {
- tmp << '$q$f$q'
+ tmp << '${q}${f}${q}'
}
- fs << '/* $k */UNIQUE(${tmp.join(', ')})'
+ fs << '/* ${k} */UNIQUE(${tmp.join(', ')})'
}
}
- fs << 'PRIMARY KEY($q$primary$q)'
+ fs << 'PRIMARY KEY(${q}${primary}${q})'
fs << unique_fields
str += fs.join(', ')
str += ');'
@@ -597,8 +597,8 @@ pub fn infix_to_primitive(b InfixType) Primitive {
fn factory_insert_qm_value(num bool, qm string, c int) string {
if num {
- return '$qm$c'
+ return '${qm}${c}'
} else {
- return '$qm'
+ return '${qm}'
}
}
diff --git a/vlib/orm/orm_fn_test.v b/vlib/orm/orm_fn_test.v
index d4021a06d3..91f7548159 100644
--- a/vlib/orm/orm_fn_test.v
+++ b/vlib/orm/orm_fn_test.v
@@ -268,6 +268,6 @@ fn sql_type_from_v(typ int) !string {
} else if typ == -1 {
'SERIAL'
} else {
- error('Unknown type $typ')
+ error('Unknown type ${typ}')
}
}
diff --git a/vlib/orm/orm_sql_or_blocks_test.v b/vlib/orm/orm_sql_or_blocks_test.v
index f0d349e0db..eed6baa43a 100644
--- a/vlib/orm/orm_sql_or_blocks_test.v
+++ b/vlib/orm/orm_sql_or_blocks_test.v
@@ -41,7 +41,7 @@ fn test_sql_or_block_for_insert() {
sql db {
insert user into User
} or {
- println('user should have been inserted, but could not, err: $err')
+ println('user should have been inserted, but could not, err: ${err}')
assert false
}
@@ -50,7 +50,7 @@ fn test_sql_or_block_for_insert() {
insert user into User
} or {
assert true
- println('user could not be inserted, err: $err')
+ println('user could not be inserted, err: ${err}')
}
eprintln('LINE: ${@LINE}')
db.close()!
@@ -63,7 +63,7 @@ fn test_sql_or_block_for_select() {
single := sql db {
select from User where id == 1
} or {
- eprintln('could not select user, err: $err')
+ eprintln('could not select user, err: ${err}')
User{0, ''}
}
eprintln('LINE: ${@LINE}')
@@ -73,7 +73,7 @@ fn test_sql_or_block_for_select() {
failed := sql db {
select from User where id == 0
} or {
- eprintln('could not select user, err: $err')
+ eprintln('could not select user, err: ${err}')
User{0, ''}
}
eprintln('LINE: ${@LINE}')
@@ -86,7 +86,7 @@ fn test_sql_or_block_for_select() {
multiple := sql db {
select from User
} or {
- eprintln('could not users, err: $err')
+ eprintln('could not users, err: ${err}')
[]User{}
}
eprintln('LINE: ${@LINE}')
diff --git a/vlib/orm/orm_test.v b/vlib/orm/orm_test.v
index 50d4f4de97..ce5c8b2f76 100644
--- a/vlib/orm/orm_test.v
+++ b/vlib/orm/orm_test.v
@@ -83,19 +83,19 @@ fn test_orm() {
select count from User
}
assert nr_all_users == 3
- println('nr_all_users=$nr_all_users')
+ println('nr_all_users=${nr_all_users}')
//
nr_users1 := sql db {
select count from User where id == 1
}
assert nr_users1 == 1
- println('nr_users1=$nr_users1')
+ println('nr_users1=${nr_users1}')
//
nr_peters := sql db {
select count from User where id == 2 && name == 'Peter'
}
assert nr_peters == 1
- println('nr_peters=$nr_peters')
+ println('nr_peters=${nr_peters}')
//
nr_peters2 := sql db {
select count from User where id == 2 && name == name
diff --git a/vlib/os/dir_expansions_test.v b/vlib/os/dir_expansions_test.v
index eb6234b97a..878e703ac4 100644
--- a/vlib/os/dir_expansions_test.v
+++ b/vlib/os/dir_expansions_test.v
@@ -16,7 +16,7 @@ fn test_tmpdir() {
fn test_ensure_folder_is_writable() {
tmp := os.temp_dir()
os.ensure_folder_is_writable(tmp) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
}
}
@@ -35,7 +35,7 @@ fn test_expand_tilde_to_home() {
fn test_config_dir() {
cdir := os.config_dir()!
assert cdir.len > 0
- adir := '$cdir/test-v-config'
+ adir := '${cdir}/test-v-config'
os.mkdir_all(adir)!
os.rmdir(adir)!
assert os.is_dir(cdir)
diff --git a/vlib/os/environment.c.v b/vlib/os/environment.c.v
index f4889b33d4..a003f2ef92 100644
--- a/vlib/os/environment.c.v
+++ b/vlib/os/environment.c.v
@@ -45,7 +45,7 @@ pub fn getenv_opt(key string) ?string {
// os.setenv sets the value of an environment variable with `name` to `value`.
pub fn setenv(name string, value string, overwrite bool) int {
$if windows {
- format := '$name=$value'
+ format := '${name}=${value}'
if overwrite {
unsafe {
return C._putenv(&char(format.str))
@@ -68,7 +68,7 @@ pub fn setenv(name string, value string, overwrite bool) int {
// os.unsetenv clears an environment variable with `name`.
pub fn unsetenv(name string) int {
$if windows {
- format := '$name='
+ format := '${name}='
return C._putenv(&char(format.str))
} $else {
return C.unsetenv(&char(name.str))
diff --git a/vlib/os/environment_test.v b/vlib/os/environment_test.v
index 0c454bd14e..ade85ce3b7 100644
--- a/vlib/os/environment_test.v
+++ b/vlib/os/environment_test.v
@@ -43,12 +43,12 @@ fn test_environ() {
fn test_setenv_var_not_exists() {
key := time.new_time(time.now()).unix
- os.setenv('foo$key', 'bar', false)
- assert os.getenv('foo$key') == 'bar'
+ os.setenv('foo${key}', 'bar', false)
+ assert os.getenv('foo${key}') == 'bar'
}
fn test_getenv_empty_var() {
key := time.new_time(time.now()).unix
- os.setenv('empty$key', '""', false)
- assert os.getenv('empty$key') == '""'
+ os.setenv('empty${key}', '""', false)
+ assert os.getenv('empty${key}') == '""'
}
diff --git a/vlib/os/file.c.v b/vlib/os/file.c.v
index 8d234c8445..b7b789eb17 100644
--- a/vlib/os/file.c.v
+++ b/vlib/os/file.c.v
@@ -108,7 +108,7 @@ pub fn open_file(path string, mode string, options ...int) !File {
fdopen_mode := mode.replace('b', '')
cfile := C.fdopen(fd, &char(fdopen_mode.str))
if isnil(cfile) {
- return error('Failed to open or create file "$path"')
+ return error('Failed to open or create file "${path}"')
}
if seek_to_end {
// ensure appending will work, even on bsd/macos systems:
@@ -227,7 +227,7 @@ pub fn (mut f File) reopen(path string, mode string) ! {
cfile = C.freopen(&char(p.str), &char(mode.str), f.cfile)
}
if isnil(cfile) {
- return error('Failed to reopen file "$path"')
+ return error('Failed to reopen file "${path}"')
}
f.cfile = cfile
}
diff --git a/vlib/os/filelock/lib_nix.c.v b/vlib/os/filelock/lib_nix.c.v
index 99af905ccf..e8c95e095a 100644
--- a/vlib/os/filelock/lib_nix.c.v
+++ b/vlib/os/filelock/lib_nix.c.v
@@ -23,7 +23,7 @@ pub fn (mut l FileLock) acquire() ! {
}
fd := open_lockfile(l.name)
if fd == -1 {
- return error_with_code('cannot create lock file $l.name', -1)
+ return error_with_code('cannot create lock file ${l.name}', -1)
}
if C.flock(fd, C.LOCK_EX) == -1 {
C.close(fd)
@@ -66,7 +66,7 @@ pub fn (mut l FileLock) try_acquire() bool {
if l.fd != -1 {
return true
}
- fd := open_lockfile('$l.name')
+ fd := open_lockfile('${l.name}')
if fd != -1 {
err := C.flock(fd, C.LOCK_EX | C.LOCK_NB)
if err == -1 {
diff --git a/vlib/os/filelock/lib_windows.c.v b/vlib/os/filelock/lib_windows.c.v
index c81867c532..1ff1c38dbb 100644
--- a/vlib/os/filelock/lib_windows.c.v
+++ b/vlib/os/filelock/lib_windows.c.v
@@ -21,7 +21,7 @@ pub fn (mut l FileLock) acquire() ! {
}
fd := open(l.name)
if fd == -1 {
- return error_with_code('cannot create lock file $l.name', -1)
+ return error_with_code('cannot create lock file ${l.name}', -1)
}
l.fd = fd
}
diff --git a/vlib/os/filepath_test.v b/vlib/os/filepath_test.v
index 52b031a337..1769739de8 100644
--- a/vlib/os/filepath_test.v
+++ b/vlib/os/filepath_test.v
@@ -154,9 +154,9 @@ fn test_existing_path() {
assert existing_path('.') or { '' } == '.'
assert existing_path(wd) or { '' } == wd
assert existing_path('\\') or { '' } == '\\'
- assert existing_path('$wd\\.\\\\does/not/exist\\.\\') or { '' } == '$wd\\.\\\\'
- assert existing_path('$wd\\\\/\\.\\.\\/.') or { '' } == '$wd\\\\/\\.\\.\\/.'
- assert existing_path('$wd\\././/\\/oh') or { '' } == '$wd\\././/\\/'
+ assert existing_path('${wd}\\.\\\\does/not/exist\\.\\') or { '' } == '${wd}\\.\\\\'
+ assert existing_path('${wd}\\\\/\\.\\.\\/.') or { '' } == '${wd}\\\\/\\.\\.\\/.'
+ assert existing_path('${wd}\\././/\\/oh') or { '' } == '${wd}\\././/\\/'
return
}
assert existing_path('') or { '' } == ''
@@ -164,9 +164,9 @@ fn test_existing_path() {
assert existing_path('.') or { '' } == '.'
assert existing_path(wd) or { '' } == wd
assert existing_path('/') or { '' } == '/'
- assert existing_path('$wd/does/.///not/exist///.//') or { '' } == '$wd/'
- assert existing_path('$wd//././/.//') or { '' } == '$wd//././/.//'
- assert existing_path('$wd//././/.//oh') or { '' } == '$wd//././/.//'
+ assert existing_path('${wd}/does/.///not/exist///.//') or { '' } == '${wd}/'
+ assert existing_path('${wd}//././/.//') or { '' } == '${wd}//././/.//'
+ assert existing_path('${wd}//././/.//oh') or { '' } == '${wd}//././/.//'
}
fn test_windows_volume() {
diff --git a/vlib/os/find_abs_path_of_executable_test.v b/vlib/os/find_abs_path_of_executable_test.v
index 334b0fc543..14bf6c899a 100644
--- a/vlib/os/find_abs_path_of_executable_test.v
+++ b/vlib/os/find_abs_path_of_executable_test.v
@@ -41,7 +41,7 @@ fn test_find_abs_path_of_executable() {
os.setenv('PATH', original_path, true)
os.chdir(os.home_dir())! // change to a *completely* different folder, to avoid the original PATH containing `.`
if x := os.find_abs_path_of_executable('myclang') {
- eprintln('> find_abs_path_of_executable should have failed, but instead it found: $x')
+ eprintln('> find_abs_path_of_executable should have failed, but instead it found: ${x}')
assert false
}
}
diff --git a/vlib/os/font/font.v b/vlib/os/font/font.v
index cd0345afee..6f5bf1d064 100644
--- a/vlib/os/font/font.v
+++ b/vlib/os/font/font.v
@@ -36,7 +36,7 @@ pub fn default() string {
'/Library/Fonts/Arial.ttf']
for font in fonts {
if os.is_file(font) {
- debug_font_println('Using font "$font"')
+ debug_font_println('Using font "${font}"')
return font
}
}
@@ -58,7 +58,7 @@ pub fn default() string {
for location in font_locations {
candidate_path := os.join_path(location, candidate_font)
if os.is_file(candidate_path) && os.is_readable(candidate_path) {
- debug_font_println('Using font "$candidate_path"')
+ debug_font_println('Using font "${candidate_path}"')
return candidate_path
}
}
@@ -73,7 +73,7 @@ pub fn default() string {
lines := fm.output.split('\n')
for l in lines {
if !l.contains('.ttc') {
- debug_font_println('Using font "$l"')
+ debug_font_println('Using font "${l}"')
return l
}
}
diff --git a/vlib/os/inode_test.v b/vlib/os/inode_test.v
index 4bb1093b79..ab44d53731 100644
--- a/vlib/os/inode_test.v
+++ b/vlib/os/inode_test.v
@@ -8,7 +8,7 @@ const (
)
fn testsuite_begin() {
- eprintln('testsuite_begin, tfolder = $tfolder')
+ eprintln('testsuite_begin, tfolder = ${tfolder}')
os.rmdir_all(tfolder) or {}
assert !os.is_dir(tfolder)
os.mkdir_all(tfolder) or { panic(err) }
diff --git a/vlib/os/notify/notify_test.v b/vlib/os/notify/notify_test.v
index 1c945968e6..34205397c4 100644
--- a/vlib/os/notify/notify_test.v
+++ b/vlib/os/notify/notify_test.v
@@ -8,7 +8,7 @@ fn make_pipe() !(int, int) {
$if linux {
pipefd := [2]int{}
if C.pipe(&pipefd[0]) != 0 {
- return error('error $C.errno: ' + os.posix_get_error_msg(C.errno))
+ return error('error ${C.errno}: ' + os.posix_get_error_msg(C.errno))
}
return pipefd[0], pipefd[1]
}
diff --git a/vlib/os/open_uri_default.c.v b/vlib/os/open_uri_default.c.v
index 70d25fcd2e..1a51723987 100644
--- a/vlib/os/open_uri_default.c.v
+++ b/vlib/os/open_uri_default.c.v
@@ -23,8 +23,8 @@ pub fn open_uri(uri string) ! {
if vopen_uri_cmd == '' {
return error('unsupported platform')
}
- result := execute('$vopen_uri_cmd "$uri"')
+ result := execute('${vopen_uri_cmd} "${uri}"')
if result.exit_code != 0 {
- return error('unable to open url: $result.output')
+ return error('unable to open url: ${result.output}')
}
}
diff --git a/vlib/os/open_uri_windows.c.v b/vlib/os/open_uri_windows.c.v
index 2f3796b729..f786ba5ce1 100644
--- a/vlib/os/open_uri_windows.c.v
+++ b/vlib/os/open_uri_windows.c.v
@@ -7,9 +7,9 @@ type ShellExecuteWin = fn (voidptr, &u16, &u16, &u16, &u16, int)
pub fn open_uri(uri string) ! {
mut vopen_uri_cmd := getenv('VOPEN_URI_CMD')
if vopen_uri_cmd != '' {
- result := execute('$vopen_uri_cmd "$uri"')
+ result := execute('${vopen_uri_cmd} "${uri}"')
if result.exit_code != 0 {
- return error('unable to open url: $result.output')
+ return error('unable to open url: ${result.output}')
}
return
}
diff --git a/vlib/os/os.c.v b/vlib/os/os.c.v
index a6d3bc3b47..28d16c891b 100644
--- a/vlib/os/os.c.v
+++ b/vlib/os/os.c.v
@@ -71,7 +71,7 @@ fn find_cfile_size(fp &C.FILE) !int {
len := int(raw_fsize)
// For files > 2GB, C.ftell can return values that, when cast to `int`, can result in values below 0.
if i64(len) < raw_fsize {
- return error('int($raw_fsize) cast results in $len')
+ return error('int(${raw_fsize}) cast results in ${len}')
}
C.rewind(fp)
return len
@@ -222,12 +222,12 @@ pub fn mv(src string, dst string) ! {
w_dst := rdst.replace('/', '\\')
ret := C._wrename(w_src.to_wide(), w_dst.to_wide())
if ret != 0 {
- return error_with_code('failed to rename $src to $dst', int(ret))
+ return error_with_code('failed to rename ${src} to ${dst}', int(ret))
}
} $else {
ret := C.rename(&char(src.str), &char(rdst.str))
if ret != 0 {
- return error_with_code('failed to rename $src to $dst', ret)
+ return error_with_code('failed to rename ${src} to ${dst}', ret)
}
}
}
@@ -239,17 +239,17 @@ pub fn cp(src string, dst string) ! {
w_dst := dst.replace('/', '\\')
if C.CopyFile(w_src.to_wide(), w_dst.to_wide(), false) == 0 {
result := C.GetLastError()
- return error_with_code('failed to copy $src to $dst', int(result))
+ return error_with_code('failed to copy ${src} to ${dst}', int(result))
}
} $else {
fp_from := C.open(&char(src.str), C.O_RDONLY, 0)
if fp_from < 0 { // Check if file opened
- return error_with_code('cp: failed to open $src', int(fp_from))
+ return error_with_code('cp: failed to open ${src}', int(fp_from))
}
fp_to := C.open(&char(dst.str), C.O_WRONLY | C.O_CREAT | C.O_TRUNC, C.S_IWUSR | C.S_IRUSR)
if fp_to < 0 { // Check if file opened (permissions problems ...)
C.close(fp_from)
- return error_with_code('cp (permission): failed to write to $dst (fp_to: $fp_to)',
+ return error_with_code('cp (permission): failed to write to ${dst} (fp_to: ${fp_to})',
int(fp_to))
}
// TODO use defer{} to close files in case of error or return.
@@ -264,7 +264,7 @@ pub fn cp(src string, dst string) ! {
if C.write(fp_to, &buf[0], count) < 0 {
C.close(fp_to)
C.close(fp_from)
- return error_with_code('cp: failed to write to $dst', int(-1))
+ return error_with_code('cp: failed to write to ${dst}', int(-1))
}
}
from_attr := C.stat{}
@@ -274,7 +274,7 @@ pub fn cp(src string, dst string) ! {
if C.chmod(&char(dst.str), from_attr.st_mode) < 0 {
C.close(fp_to)
C.close(fp_from)
- return error_with_code('failed to set permissions for $dst', int(-1))
+ return error_with_code('failed to set permissions for ${dst}', int(-1))
}
C.close(fp_to)
C.close(fp_from)
@@ -295,7 +295,7 @@ pub fn vfopen(path string, mode string) !&C.FILE {
fp = C.fopen(&char(path.str), &char(mode.str))
}
if isnil(fp) {
- return error('failed to open file "$path"')
+ return error('failed to open file "${path}"')
} else {
return fp
}
@@ -373,7 +373,7 @@ pub fn system(cmd string) int {
mut ret := 0
$if windows {
// overcome bug in system & _wsystem (cmd) when first char is quote `"`
- wcmd := if cmd.len > 1 && cmd[0] == `"` && cmd[1] != `"` { '"$cmd"' } else { cmd }
+ wcmd := if cmd.len > 1 && cmd[0] == `"` && cmd[1] != `"` { '"${cmd}"' } else { cmd }
unsafe {
ret = C._wsystem(wcmd.to_wide())
}
@@ -487,7 +487,7 @@ pub fn rm(path string) ! {
rc = C.remove(&char(path.str))
}
if rc == -1 {
- return error('Failed to remove "$path": ' + posix_get_error_msg(C.errno))
+ return error('Failed to remove "${path}": ' + posix_get_error_msg(C.errno))
}
// C.unlink(path.cstr())
}
@@ -498,7 +498,7 @@ pub fn rmdir(path string) ! {
rc := C.RemoveDirectory(path.to_wide())
if !rc {
// https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-removedirectorya - 0 == false, is failure
- return error('Failed to remove "$path": ' + posix_get_error_msg(C.errno))
+ return error('Failed to remove "${path}": ' + posix_get_error_msg(C.errno))
}
} $else {
rc := C.rmdir(&char(path.str))
@@ -512,7 +512,7 @@ pub fn rmdir(path string) ! {
fn print_c_errno() {
e := C.errno
se := unsafe { tos_clone(&u8(C.strerror(e))) }
- println('errno=$e err=$se')
+ println('errno=${e} err=${se}')
}
// get_raw_line returns a one-line string from stdin along with '\n' if there is any.
@@ -612,7 +612,7 @@ pub fn read_file_array(path string) []T {
// On some systems C.ftell can return values in the 64-bit range
// that, when cast to `int`, can result in values below 0.
if i64(allocate) < fsize {
- panic('$fsize cast to int results in ${int(fsize)})')
+ panic('${fsize} cast to int results in ${int(fsize)})')
}
buf := unsafe {
malloc_noscan(allocate)
@@ -672,7 +672,7 @@ pub fn executable() string {
pid := C.getpid()
ret := proc_pidpath(pid, &result[0], max_path_len)
if ret <= 0 {
- eprintln('os.executable() failed at calling proc_pidpath with pid: $pid . proc_pidpath returned $ret ')
+ eprintln('os.executable() failed at calling proc_pidpath with pid: ${pid} . proc_pidpath returned ${ret} ')
return executable_fallback()
}
res := unsafe { tos_clone(&result[0]) }
@@ -985,7 +985,7 @@ pub fn open_append(path string) !File {
}
}
if isnil(file.cfile) {
- return error('failed to create(append) file "$path"')
+ return error('failed to create(append) file "${path}"')
}
file.is_opened = true
return file
diff --git a/vlib/os/os.v b/vlib/os/os.v
index d24f524181..7bacfe0800 100644
--- a/vlib/os/os.v
+++ b/vlib/os/os.v
@@ -396,7 +396,7 @@ pub fn user_names() ![]string {
$if windows {
result := execute('wmic useraccount get name')
if result.exit_code != 0 {
- return error('Failed to get user names. Exited with code $result.exit_code: $result.output')
+ return error('Failed to get user names. Exited with code ${result.exit_code}: ${result.output}')
}
mut users := result.output.split_into_lines()
// windows command prints an empty line at the end of output
@@ -662,7 +662,7 @@ pub fn mkdir_all(opath string, params MkdirParams) ! {
if exists(p) && is_dir(p) {
continue
}
- mkdir(p, params) or { return error('folder: $p, error: $err') }
+ mkdir(p, params) or { return error('folder: ${p}, error: ${err}') }
}
}
@@ -727,7 +727,7 @@ pub fn vtmp_dir() string {
return vtmp
}
uid := getuid()
- vtmp = join_path_single(temp_dir(), 'v_$uid')
+ vtmp = join_path_single(temp_dir(), 'v_${uid}')
if !exists(vtmp) || !is_dir(vtmp) {
// create a new directory, that is private to the user:
mkdir_all(vtmp, mode: 0o700) or { panic(err) }
@@ -817,8 +817,8 @@ pub mut:
pub fn execute_or_panic(cmd string) Result {
res := execute(cmd)
if res.exit_code != 0 {
- eprintln('failed cmd: $cmd')
- eprintln('failed code: $res.exit_code')
+ eprintln('failed cmd: ${cmd}')
+ eprintln('failed code: ${res.exit_code}')
panic(res.output)
}
return res
@@ -827,8 +827,8 @@ pub fn execute_or_panic(cmd string) Result {
pub fn execute_or_exit(cmd string) Result {
res := execute(cmd)
if res.exit_code != 0 {
- eprintln('failed cmd: $cmd')
- eprintln('failed code: $res.exit_code')
+ eprintln('failed cmd: ${cmd}')
+ eprintln('failed code: ${res.exit_code}')
eprintln(res.output)
exit(1)
}
@@ -838,9 +838,13 @@ pub fn execute_or_exit(cmd string) Result {
// quoted path - return a quoted version of the path, depending on the platform.
pub fn quoted_path(path string) string {
$if windows {
- return if path.ends_with(path_separator) { '"${path + path_separator}"' } else { '"$path"' }
+ return if path.ends_with(path_separator) {
+ '"${path + path_separator}"'
+ } else {
+ '"${path}"'
+ }
} $else {
- return "'$path'"
+ return "'${path}'"
}
}
diff --git a/vlib/os/os_android_outside_termux.c.v b/vlib/os/os_android_outside_termux.c.v
index d66d28a3ff..c4bd3cf174 100644
--- a/vlib/os/os_android_outside_termux.c.v
+++ b/vlib/os/os_android_outside_termux.c.v
@@ -40,7 +40,7 @@ fn C.AAssetManager_open(&C.AAssetManager, &char, int) &C.AAsset
pub fn (am &AssetManager) open(filename string, mode AssetMode) !&Asset {
asset := C.AAssetManager_open(am, filename.str, int(mode))
if isnil(asset) {
- return error('file `$filename` not found')
+ return error('file `${filename}` not found')
}
return asset
}
diff --git a/vlib/os/os_js.js.v b/vlib/os/os_js.js.v
index 43ec29034e..f4f91df0de 100644
--- a/vlib/os/os_js.js.v
+++ b/vlib/os/os_js.js.v
@@ -57,7 +57,7 @@ pub fn exists(path string) bool {
pub fn ls(path string) ![]string {
if !is_dir(path) {
- return error('ls(): cannot open dir $dir')
+ return error('ls(): cannot open dir ${dir}')
}
result := []string{}
diff --git a/vlib/os/os_nix.c.v b/vlib/os/os_nix.c.v
index 9d7333f9e4..47e55c621b 100644
--- a/vlib/os/os_nix.c.v
+++ b/vlib/os/os_nix.c.v
@@ -87,8 +87,8 @@ fn glob_match(dir string, pattern string, next_pattern string, mut matches []str
mode = GlobMatch.any
if next_pattern != pattern && next_pattern != '' {
for file in files {
- if is_dir('$dir/$file') {
- subdirs << '$dir/$file'
+ if is_dir('${dir}/${file}') {
+ subdirs << '${dir}/${file}'
}
}
return subdirs
@@ -115,7 +115,7 @@ fn glob_match(dir string, pattern string, next_pattern string, mut matches []str
pathwalk := file.split(os.path_separator)
pathwalk[pathwalk.len - 1]
} else {
- fpath = if dir == '.' { file } else { '$dir/$file' }
+ fpath = if dir == '.' { file } else { '${dir}/${file}' }
file
}
if f in ['.', '..'] || f == '' {
@@ -146,7 +146,7 @@ fn glob_match(dir string, pattern string, next_pattern string, mut matches []str
if is_dir(fpath) {
subdirs << fpath
if next_pattern == pattern && next_pattern != '' {
- matches << '$fpath$os.path_separator'
+ matches << '${fpath}${os.path_separator}'
}
} else {
matches << fpath
@@ -166,14 +166,14 @@ fn native_glob_pattern(pattern string, mut matches []string) ! {
if step == '' {
continue
}
- if is_dir('$cwd$os.path_separator$step') {
+ if is_dir('${cwd}${os.path_separator}${step}') {
dd := if cwd == '/' {
step
} else {
if cwd == '.' || cwd == '' {
step
} else {
- if step == '.' || step == '/' { cwd } else { '$cwd/$step' }
+ if step == '.' || step == '/' { cwd } else { '${cwd}/${step}' }
}
}
if i + 1 != steps.len {
@@ -190,7 +190,7 @@ fn native_glob_pattern(pattern string, mut matches []string) ! {
if cwd == '.' || cwd == '' {
sd
} else {
- if sd == '.' || sd == '/' { cwd } else { '$cwd/$sd' }
+ if sd == '.' || sd == '/' { cwd } else { '${cwd}/${sd}' }
}
}
subs << glob_match(d.replace('//', '/'), step, step2, mut matches)
@@ -259,7 +259,7 @@ pub fn ls(path string) ![]string {
mut res := []string{cap: 50}
dir := unsafe { C.opendir(&char(path.str)) }
if isnil(dir) {
- return error('ls() couldnt open dir "$path"')
+ return error('ls() couldnt open dir "${path}"')
}
mut ent := &C.dirent(0)
// mut ent := &C.dirent{!}
@@ -299,7 +299,7 @@ pub fn execute(cmd string) Result {
// if cmd.contains(';') || cmd.contains('&&') || cmd.contains('||') || cmd.contains('\n') {
// return Result{ exit_code: -1, output: ';, &&, || and \\n are not allowed in shell commands' }
// }
- pcmd := if cmd.contains('2>') { cmd.clone() } else { '$cmd 2>&1' }
+ pcmd := if cmd.contains('2>') { cmd.clone() } else { '${cmd} 2>&1' }
defer {
unsafe { pcmd.free() }
}
@@ -307,7 +307,7 @@ pub fn execute(cmd string) Result {
if isnil(f) {
return Result{
exit_code: -1
- output: 'exec("$cmd") failed'
+ output: 'exec("${cmd}") failed'
}
}
fd := fileno(f)
@@ -351,7 +351,7 @@ pub fn (mut c Command) start() ! {
}
c.f = vpopen(pcmd)
if isnil(c.f) {
- return error('exec("$c.path") failed')
+ return error('exec("${c.path}") failed')
}
}
@@ -437,10 +437,10 @@ fn C.mkstemp(stemplate &u8) int
[manualfree]
pub fn ensure_folder_is_writable(folder string) ! {
if !exists(folder) {
- return error_with_code('`$folder` does not exist', 1)
+ return error_with_code('`${folder}` does not exist', 1)
}
if !is_dir(folder) {
- return error_with_code('`$folder` is not a folder', 2)
+ return error_with_code('`${folder}` is not a folder', 2)
}
tmp_perm_check := join_path_single(folder, 'XXXXXX')
defer {
@@ -449,7 +449,7 @@ pub fn ensure_folder_is_writable(folder string) ! {
unsafe {
x := C.mkstemp(&char(tmp_perm_check.str))
if -1 == x {
- return error_with_code('folder `$folder` is not writable', 3)
+ return error_with_code('folder `${folder}` is not writable', 3)
}
C.close(x)
}
diff --git a/vlib/os/os_test.v b/vlib/os/os_test.v
index a37bec6214..ea1642f9f6 100644
--- a/vlib/os/os_test.v
+++ b/vlib/os/os_test.v
@@ -13,7 +13,7 @@ const (
const args_at_start = os.args.clone()
fn testsuite_begin() {
- eprintln('testsuite_begin, tfolder = $tfolder')
+ eprintln('testsuite_begin, tfolder = ${tfolder}')
os.rmdir_all(tfolder) or {}
assert !os.is_dir(tfolder)
os.mkdir_all(tfolder) or { panic(err) }
@@ -42,7 +42,7 @@ fn test_open_file() {
file.write_string(hello) or { panic(err) }
file.close()
assert u64(hello.len) == os.file_size(filename)
- read_hello := os.read_file(filename) or { panic('error reading file $filename') }
+ read_hello := os.read_file(filename) or { panic('error reading file ${filename}') }
assert hello == read_hello
os.rm(filename) or { panic(err) }
}
@@ -82,7 +82,7 @@ fn test_open_file_binary() {
unsafe { file.write_ptr(bytes.data, bytes.len) }
file.close()
assert u64(hello.len) == os.file_size(filename)
- read_hello := os.read_bytes(filename) or { panic('error reading file $filename') }
+ read_hello := os.read_bytes(filename) or { panic('error reading file ${filename}') }
assert bytes == read_hello
os.rm(filename) or { panic(err) }
}
@@ -162,7 +162,7 @@ fn test_write_and_read_string_to_file() {
hello := 'hello world!'
os.write_file(filename, hello) or { panic(err) }
assert u64(hello.len) == os.file_size(filename)
- read_hello := os.read_file(filename) or { panic('error reading file $filename') }
+ read_hello := os.read_file(filename) or { panic('error reading file ${filename}') }
assert hello == read_hello
os.rm(filename) or { panic(err) }
}
@@ -173,7 +173,7 @@ fn test_write_and_read_bytes() {
file_name := './byte_reader_writer.tst'
payload := [u8(`I`), `D`, `D`, `Q`, `D`]
mut file_write := os.create(os.real_path(file_name)) or {
- eprintln('failed to create file $file_name')
+ eprintln('failed to create file ${file_name}')
return
}
// We use the standard write_bytes function to write the payload and
@@ -182,7 +182,7 @@ fn test_write_and_read_bytes() {
file_write.close()
assert u64(payload.len) == os.file_size(file_name)
mut file_read := os.open(os.real_path(file_name)) or {
- eprintln('failed to open file $file_name')
+ eprintln('failed to open file ${file_name}')
return
}
// We only need to test read_bytes because this function calls
@@ -323,7 +323,7 @@ fn test_cp() {
old_file_name := 'cp_example.txt'
new_file_name := 'cp_new_example.txt'
os.write_file(old_file_name, 'Test data 1 2 3, V is awesome #$%^[]!~⭐') or { panic(err) }
- os.cp(old_file_name, new_file_name) or { panic('$err') }
+ os.cp(old_file_name, new_file_name) or { panic('${err}') }
old_file := os.read_file(old_file_name) or { panic(err) }
new_file := os.read_file(new_file_name) or { panic(err) }
assert old_file == new_file
diff --git a/vlib/os/os_windows.c.v b/vlib/os/os_windows.c.v
index c1adb9fe5b..8dfbd5aa62 100644
--- a/vlib/os/os_windows.c.v
+++ b/vlib/os/os_windows.c.v
@@ -167,11 +167,11 @@ pub fn ls(path string) ![]string {
// }
// C.FindClose(h_find_dir)
if !is_dir(path) {
- return error('ls() couldnt open dir "$path": directory does not exist')
+ return error('ls() couldnt open dir "${path}": directory does not exist')
}
// NOTE: Should eventually have path struct & os dependant path seperator (eg os.PATH_SEPERATOR)
// we need to add files to path eg. c:\windows\*.dll or :\windows\*
- path_files := '$path\\*'
+ path_files := '${path}\\*'
// NOTE:TODO: once we have a way to convert utf16 wide character to utf8
// we should use FindFirstFileW and FindNextFileW
h_find_files := C.FindFirstFile(path_files.to_wide(), voidptr(&find_file_data))
@@ -196,7 +196,7 @@ pub fn mkdir(path string, params MkdirParams) ! {
}
apath := real_path(path)
if !C.CreateDirectory(apath.to_wide(), 0) {
- return error('mkdir failed for "$apath", because CreateDirectory returned: ' +
+ return error('mkdir failed for "${apath}", because CreateDirectory returned: ' +
get_error_msg(int(C.GetLastError())))
}
}
@@ -311,7 +311,7 @@ pub fn raw_execute(cmd string) Result {
error_msg := get_error_msg(error_num)
return Result{
exit_code: error_num
- output: 'exec failed (CreatePipe): $error_msg'
+ output: 'exec failed (CreatePipe): ${error_msg}'
}
}
set_handle_info_ok := C.SetHandleInformation(child_stdout_read, C.HANDLE_FLAG_INHERIT,
@@ -321,7 +321,7 @@ pub fn raw_execute(cmd string) Result {
error_msg := get_error_msg(error_num)
return Result{
exit_code: error_num
- output: 'exec failed (SetHandleInformation): $error_msg'
+ output: 'exec failed (SetHandleInformation): ${error_msg}'
}
}
proc_info := ProcessInformation{}
@@ -345,7 +345,7 @@ pub fn raw_execute(cmd string) Result {
error_msg := get_error_msg(error_num)
return Result{
exit_code: error_num
- output: 'exec failed (CreateProcess) with code $error_num: $error_msg cmd: $cmd'
+ output: 'exec failed (CreateProcess) with code ${error_num}: ${error_msg} cmd: ${cmd}'
}
}
C.CloseHandle(child_stdin)
@@ -496,7 +496,7 @@ pub fn loginname() string {
// by creating an empty file in it, then deleting it.
pub fn ensure_folder_is_writable(folder string) ! {
if !exists(folder) {
- return error_with_code('`$folder` does not exist', 1)
+ return error_with_code('`${folder}` does not exist', 1)
}
if !is_dir(folder) {
return error_with_code('`folder` is not a folder', 2)
@@ -504,7 +504,7 @@ pub fn ensure_folder_is_writable(folder string) ! {
tmp_folder_name := 'tmp_perm_check_pid_' + getpid().str()
tmp_perm_check := join_path_single(folder, tmp_folder_name)
write_file(tmp_perm_check, 'test') or {
- return error_with_code('cannot write to folder "$folder": $err', 3)
+ return error_with_code('cannot write to folder "${folder}": ${err}', 3)
}
rm(tmp_perm_check)!
}
diff --git a/vlib/os/process.c.v b/vlib/os/process.c.v
index 293f118375..1497840805 100644
--- a/vlib/os/process.c.v
+++ b/vlib/os/process.c.v
@@ -93,7 +93,7 @@ fn (mut p Process) _spawn() int {
p.env = []string{}
current_environment := environ()
for k, v in current_environment {
- p.env << '$k=$v'
+ p.env << '${k}=${v}'
}
}
mut pid := 0
@@ -177,7 +177,7 @@ pub fn (mut p Process) stderr_read() string {
// _check_redirection_call - should be called just by stdxxx methods
fn (mut p Process) _check_redirection_call(fn_name string) {
if !p.use_stdio_ctl {
- panic('Call p.set_redirect_stdio() before calling p.$fn_name')
+ panic('Call p.set_redirect_stdio() before calling p.${fn_name}')
}
if p.status == .not_started {
panic('Call p.${fn_name}() after you have called p.run()')
diff --git a/vlib/os/process.js.v b/vlib/os/process.js.v
index d1b8338d54..24f7d6833f 100644
--- a/vlib/os/process.js.v
+++ b/vlib/os/process.js.v
@@ -114,7 +114,7 @@ pub fn (mut p Process) stdout_slurp() string {
// _check_redirection_call - should be called just by stdxxx methods
fn (mut p Process) check_redirection_call(fn_name string) {
if !p.use_stdio_ctl {
- panic('Call p.set_redirect_stdio() before calling p.$fn_name')
+ panic('Call p.set_redirect_stdio() before calling p.${fn_name}')
}
if p.status == .not_started {
panic('Call p.${fn_name}() after you have called p.run()')
diff --git a/vlib/os/process.v b/vlib/os/process.v
index 9ec776abd5..d4a6e7292d 100644
--- a/vlib/os/process.v
+++ b/vlib/os/process.v
@@ -64,7 +64,7 @@ pub fn (mut p Process) set_environment(envs map[string]string) {
p.env_is_custom = true
p.env = []string{}
for k, v in envs {
- p.env << '$k=$v'
+ p.env << '${k}=${v}'
}
return
}
diff --git a/vlib/os/process_test.v b/vlib/os/process_test.v
index 7c45b75f21..7a066f8d9b 100644
--- a/vlib/os/process_test.v
+++ b/vlib/os/process_test.v
@@ -32,7 +32,7 @@ fn testsuite_end() {
fn test_getpid() {
pid := os.getpid()
- eprintln('current pid: $pid')
+ eprintln('current pid: ${pid}')
assert pid != 0
}
@@ -49,7 +49,7 @@ fn test_run() {
break
}
$if trace_process_output ? {
- os.system('ps -opid= -oppid= -ouser= -onice= -of= -ovsz= -orss= -otime= -oargs= -p $p.pid')
+ os.system('ps -opid= -oppid= -ouser= -onice= -of= -ovsz= -orss= -otime= -oargs= -p ${p.pid}')
}
time.sleep(50 * time.millisecond)
i++
@@ -58,7 +58,7 @@ fn test_run() {
assert p.code == 0
assert p.status == .exited
//
- eprintln('polling iterations: $i')
+ eprintln('polling iterations: ${i}')
assert i < 50
p.close()
}
@@ -86,8 +86,8 @@ fn test_slurping_output() {
p.close()
$if trace_process_output ? {
eprintln('---------------------------')
- eprintln('p output: "$output"')
- eprintln('p errors: "$errors"')
+ eprintln('p output: "${output}"')
+ eprintln('p errors: "${errors}"')
eprintln('---------------------------')
}
// dump(output)
diff --git a/vlib/os/process_windows.c.v b/vlib/os/process_windows.c.v
index 34f12b8ed5..85cb66af76 100644
--- a/vlib/os/process_windows.c.v
+++ b/vlib/os/process_windows.c.v
@@ -25,7 +25,7 @@ fn failed_cfn_report_error(ok bool, label string) {
}
error_num := int(C.GetLastError())
error_msg := get_error_msg(error_num)
- eprintln('failed $label: $error_msg')
+ eprintln('failed ${label}: ${error_msg}')
exit(1)
}
@@ -94,7 +94,7 @@ fn (mut p Process) win_spawn_process() int {
start_info.h_std_error = wdata.child_stderr_write
start_info.dw_flags = u32(C.STARTF_USESTDHANDLES)
}
- cmd := '$p.filename ' + p.args.join(' ')
+ cmd := '${p.filename} ' + p.args.join(' ')
C.ExpandEnvironmentStringsW(cmd.to_wide(), voidptr(&wdata.command_line[0]), 32768)
mut creation_flags := int(C.NORMAL_PRIORITY_CLASS)
@@ -171,7 +171,7 @@ fn (mut p Process) win_is_alive() bool {
///////////////
fn (mut p Process) win_write_string(idx int, s string) {
- panic('Process.write_string $idx is not implemented yet')
+ panic('Process.write_string ${idx} is not implemented yet')
}
fn (mut p Process) win_read_string(idx int, maxbytes int) (string, int) {
diff --git a/vlib/os/signal.js.v b/vlib/os/signal.js.v
index cf6899f90a..85fe8a18c2 100644
--- a/vlib/os/signal.js.v
+++ b/vlib/os/signal.js.v
@@ -2,7 +2,7 @@ module os
fn signal_str(signal Signal) string {
mut result := signal.str().to_upper()
- result = 'SIG$result'
+ result = 'SIG${result}'
return result
}
@@ -103,7 +103,7 @@ fn signal_from_str(str JS.String) Signal {
Signal.sys
}
else {
- panic('unknown signal: $s')
+ panic('unknown signal: ${s}')
}
}
}
diff --git a/vlib/pg/orm.v b/vlib/pg/orm.v
index bbf653d7e5..f8ff566655 100644
--- a/vlib/pg/orm.v
+++ b/vlib/pg/orm.v
@@ -60,7 +60,7 @@ pub fn (db DB) create(table string, fields []orm.TableField) ! {
}
pub fn (db DB) drop(table string) ! {
- query := 'DROP TABLE "$table";'
+ query := 'DROP TABLE "${table}";'
pg_stmt_worker(db, query, orm.QueryData{}, orm.QueryData{})!
}
@@ -219,7 +219,7 @@ fn pg_type_from_v(typ int) !string {
}
}
if str == '' {
- return error('Unknown type $typ')
+ return error('Unknown type ${typ}')
}
return str
}
@@ -288,5 +288,5 @@ fn str_to_primitive(str string, typ int) !orm.Primitive {
}
else {}
}
- return error('Unknown field type $typ')
+ return error('Unknown field type ${typ}')
}
diff --git a/vlib/pg/pg.v b/vlib/pg/pg.v
index 5645126f2e..24d49ef246 100644
--- a/vlib/pg/pg.v
+++ b/vlib/pg/pg.v
@@ -77,7 +77,7 @@ fn C.PQfinish(voidptr)
// the parameters from the `Config` structure, returning
// a connection error when something goes wrong
pub fn connect(config Config) !DB {
- conninfo := 'host=$config.host port=$config.port user=$config.user dbname=$config.dbname password=$config.password'
+ conninfo := 'host=${config.host} port=${config.port} user=${config.user} dbname=${config.dbname} password=${config.password}'
conn := C.PQconnectdb(conninfo.str)
if conn == 0 {
return error('libpq memory allocation error')
@@ -88,9 +88,9 @@ pub fn connect(config Config) !DB {
// error message will be freed by the next `PQfinish`
// call
c_error_msg := unsafe { C.PQerrorMessage(conn).vstring() }
- error_msg := '$c_error_msg'
+ error_msg := '${c_error_msg}'
C.PQfinish(conn)
- return error('Connection to a PG database failed: $error_msg')
+ return error('Connection to a PG database failed: ${error_msg}')
}
return DB{
conn: conn
@@ -128,7 +128,7 @@ pub fn (db DB) close() {
pub fn (db DB) q_int(query string) !int {
rows := db.exec(query)!
if rows.len == 0 {
- return error('q_int "$query" not found')
+ return error('q_int "${query}" not found')
}
row := rows[0]
if row.vals.len == 0 {
@@ -145,7 +145,7 @@ pub fn (db DB) q_int(query string) !int {
pub fn (db DB) q_string(query string) !string {
rows := db.exec(query)!
if rows.len == 0 {
- return error('q_string "$query" not found')
+ return error('q_string "${query}" not found')
}
row := rows[0]
if row.vals.len == 0 {
@@ -180,7 +180,7 @@ pub fn (db DB) exec_one(query string) !Row {
res := C.PQexec(db.conn, query.str)
e := unsafe { C.PQerrorMessage(db.conn).vstring() }
if e != '' {
- return error('pg exec error: "$e"')
+ return error('pg exec error: "${e}"')
}
row := rows_first_or_empty(res_to_rows(res))!
return row
@@ -212,7 +212,7 @@ fn (db DB) handle_error_or_result(res voidptr, elabel string) ![]Row {
e := unsafe { C.PQerrorMessage(db.conn).vstring() }
if e != '' {
C.PQclear(res)
- return error('pg $elabel error:\n$e')
+ return error('pg ${elabel} error:\n${e}')
}
return res_to_rows(res)
}
@@ -229,7 +229,7 @@ pub fn (db DB) copy_expert(query string, mut file io.ReaderWriter) !int {
e := unsafe { C.PQerrorMessage(db.conn).vstring() }
if e != '' {
- return error('pg copy error:\n$e')
+ return error('pg copy error:\n${e}')
}
if status == C.PGRES_COPY_IN {
@@ -246,14 +246,14 @@ pub fn (db DB) copy_expert(query string, mut file io.ReaderWriter) !int {
code := C.PQputCopyData(db.conn, buf.data, n)
if code == -1 {
- return error('pg copy error: Failed to send data, code=$code')
+ return error('pg copy error: Failed to send data, code=${code}')
}
}
code := C.PQputCopyEnd(db.conn, 0)
if code != 1 {
- return error('pg copy error: Failed to finish copy command, code: $code')
+ return error('pg copy error: Failed to finish copy command, code: ${code}')
}
} else if status == C.PGRES_COPY_OUT {
for {
diff --git a/vlib/picoev/picoev.v b/vlib/picoev/picoev.v
index d02a78aeed..993904f65e 100644
--- a/vlib/picoev/picoev.v
+++ b/vlib/picoev/picoev.v
@@ -189,7 +189,7 @@ fn accept_callback(loop &C.picoev_loop, fd int, events int, cb_arg voidptr) {
}
fn default_err_cb(data voidptr, req picohttpparser.Request, mut res picohttpparser.Response, error IError) {
- eprintln('picoev: $error')
+ eprintln('picoev: ${error}')
res.end()
}
diff --git a/vlib/rand/rand.v b/vlib/rand/rand.v
index ceaf90bd7f..5371da6c0a 100644
--- a/vlib/rand/rand.v
+++ b/vlib/rand/rand.v
@@ -277,7 +277,7 @@ pub fn (mut rng PRNG) ascii(len int) string {
// bernoulli returns true with a probability p. Note that 0 <= p <= 1.
pub fn (mut rng PRNG) bernoulli(p f64) !bool {
if p < 0 || p > 1 {
- return error('$p is not a valid probability value.')
+ return error('${p} is not a valid probability value.')
}
return rng.f64() <= p
}
@@ -317,7 +317,7 @@ pub fn (mut rng PRNG) normal_pair(conf config.NormalConfigStruct) !(f64, f64) {
// probability of success for each trial is p.
pub fn (mut rng PRNG) binomial(n int, p f64) !int {
if p < 0 || p > 1 {
- return error('$p is not a valid probability value.')
+ return error('${p} is not a valid probability value.')
}
mut count := 0
for _ in 0 .. n {
@@ -372,7 +372,7 @@ pub fn (mut rng PRNG) shuffle_clone(a []T, config config.ShuffleConfigStruct)
pub fn (mut rng PRNG) choose(array []T, k int) ![]T {
n := array.len
if k > n {
- return error('Cannot choose $k elements without replacement from a $n-element array.')
+ return error('Cannot choose ${k} elements without replacement from a ${n}-element array.')
}
mut results := []T{len: k}
mut indices := []int{len: n, init: it}
diff --git a/vlib/readline/README.md b/vlib/readline/README.md
index 8f57153d58..3f6cec0017 100644
--- a/vlib/readline/README.md
+++ b/vlib/readline/README.md
@@ -22,5 +22,5 @@ or just:
import readline { read_line }
input := read_line('What is your name: ')?
-println('Your name is: $input')
+println('Your name is: ${input}')
```
diff --git a/vlib/regex/README.md b/vlib/regex/README.md
index 34df63a0d9..52a5e5b768 100644
--- a/vlib/regex/README.md
+++ b/vlib/regex/README.md
@@ -250,17 +250,17 @@ fn convert_html_rgb(in_col string) u32 {
// this is the regex query, it use the V string interpolation to customize the regex query
// NOTE: if you want use escaped code you must use the r"" (raw) strings,
// *** please remember that the V interpoaltion doesn't work on raw strings. ***
- query := '#([a-fA-F0-9]{$n_digit})([a-fA-F0-9]{$n_digit})([a-fA-F0-9]{$n_digit})'
+ query := '#([a-fA-F0-9]{${n_digit}})([a-fA-F0-9]{${n_digit}})([a-fA-F0-9]{${n_digit}})'
mut re := regex.regex_opt(query) or { panic(err) }
start, end := re.match_string(in_col)
- println('start: $start, end: $end')
+ println('start: ${start}, end: ${end}')
mut res := u32(0)
if start >= 0 {
group_list := re.get_group_list() // this is the utility function
r := ('0x' + in_col[group_list[0].start..group_list[0].end]).int() << col_mul
g := ('0x' + in_col[group_list[1].start..group_list[1].end]).int() << col_mul
b := ('0x' + in_col[group_list[2].start..group_list[2].end]).int() << col_mul
- println('r: $r g: $g b: $b')
+ println('r: ${r} g: ${g} b: ${b}')
res = u32(r) << 16 | u32(g) << 8 | u32(b)
}
return res
@@ -317,19 +317,19 @@ fn main(){
re.group_csave_flag = true
start, end := re.match_string(txt)
if start >= 0 {
- println("Match ($start, $end) => [${txt[start..end]}]")
+ println("Match (${start}, ${end}) => [${txt[start..end]}]")
} else {
println("No Match")
}
if re.group_csave_flag == true && start >= 0 && re.group_csave.len > 0{
- println("cg: $re.group_csave")
+ println("cg: ${re.group_csave}")
mut cs_i := 1
for cs_i < re.group_csave[0]*3 {
g_id := re.group_csave[cs_i]
st := re.group_csave[cs_i+1]
en := re.group_csave[cs_i+2]
- println("cg[$g_id] $st $en:[${txt[st..en]}]")
+ println("cg[${g_id}] ${st} ${en}:[${txt[st..en]}]")
cs_i += 3
}
}
@@ -381,13 +381,13 @@ fn main(){
re.debug=0 // disable log
start, end := re.match_string(txt)
if start >= 0 {
- println("Match ($start, $end) => [${txt[start..end]}]")
+ println("Match (${start}, ${end}) => [${txt[start..end]}]")
} else {
println("No Match")
}
for name in re.group_map.keys() {
- println("group:'$name' \t=> [${re.get_group_by_name(txt, name)}] \
+ println("group:'${name}' \t=> [${re.get_group_by_name(txt, name)}] \
bounds: ${re.get_group_bounds_by_name(name)}")
}
}
@@ -412,11 +412,11 @@ Here is a more complex example of using them:
fn convert_html_rgb_n(in_col string) u32 {
mut n_digit := if in_col.len == 4 { 1 } else { 2 }
mut col_mul := if in_col.len == 4 { 4 } else { 0 }
- query := '#(?P[a-fA-F0-9]{$n_digit})' + '(?P[a-fA-F0-9]{$n_digit})' +
- '(?P[a-fA-F0-9]{$n_digit})'
+ query := '#(?P[a-fA-F0-9]{${n_digit}})' + '(?P[a-fA-F0-9]{${n_digit}})' +
+ '(?P[a-fA-F0-9]{${n_digit}})'
mut re := regex.regex_opt(query) or { panic(err) }
start, end := re.match_string(in_col)
- println('start: $start, end: $end')
+ println('start: ${start}, end: ${end}')
mut res := u32(0)
if start >= 0 {
red_s, red_e := re.get_group_by_name('red')
@@ -425,7 +425,7 @@ fn convert_html_rgb_n(in_col string) u32 {
g := ('0x' + in_col[green_s..green_e]).int() << col_mul
blue_s, blue_e := re.get_group_by_name('blue')
b := ('0x' + in_col[blue_s..blue_e]).int() << col_mul
- println('r: $r g: $g b: $b')
+ println('r: ${r} g: ${g} b: ${b}')
res = u32(r) << 16 | u32(g) << 8 | u32(b)
}
return res
@@ -438,7 +438,7 @@ that return the string of a group using its `name`:
```v ignore
txt := "my used string...."
for name in re.group_map.keys() {
- println("group:'$name' \t=> [${re.get_group_by_name(txt, name)}] \
+ println("group:'${name}' \t=> [${re.get_group_by_name(txt, name)}] \
bounds: ${re.get_group_bounds_by_name(name)}")
}
```
@@ -646,7 +646,7 @@ fn my_repl(re regex.RE, in_txt string, start int, end int) string {
g0 := re.get_group_by_id(in_txt, 0)
g1 := re.get_group_by_id(in_txt, 1)
g2 := re.get_group_by_id(in_txt, 2)
- return "*$g0*$g1*$g2*"
+ return "*${g0}*${g1}*${g2}*"
}
fn main(){
@@ -787,7 +787,7 @@ output function:
```v oksyntax
// custom print function, the input will be the regex debug string
fn custom_print(txt string) {
- println('my log: $txt')
+ println('my log: ${txt}')
}
mut re := new()
@@ -810,13 +810,13 @@ fn main(){
start, end := re.match_string(txt)
if start >= 0 {
- println("Match ($start, $end) => [${txt[start..end]}]")
+ println("Match (${start}, ${end}) => [${txt[start..end]}]")
for g_index := 0; g_index < re.group_count ; g_index++ {
println("#${g_index} [${re.get_group_by_id(txt, g_index)}] \
bounds: ${re.get_group_bounds_by_id(g_index)}")
}
for name in re.group_map.keys() {
- println("group:'$name' \t=> [${re.get_group_by_name(txt, name)}] \
+ println("group:'${name}' \t=> [${re.get_group_by_name(txt, name)}] \
bounds: ${re.get_group_bounds_by_name(name)}")
}
} else {
@@ -851,33 +851,33 @@ fn main(){
start, end := re.match_string(txt)
if start >= 0 {
- println("Match ($start, $end) => [${txt[start..end]}]")
+ println("Match (${start}, ${end}) => [${txt[start..end]}]")
} else {
println("No Match")
}
// show results for continuos group saving
if re.group_csave_flag == true && start >= 0 && re.group_csave.len > 0{
- println("cg: $re.group_csave")
+ println("cg: ${re.group_csave}")
mut cs_i := 1
for cs_i < re.group_csave[0]*3 {
g_id := re.group_csave[cs_i]
st := re.group_csave[cs_i+1]
en := re.group_csave[cs_i+2]
- println("cg[$g_id] $st $en:[${txt[st..en]}]")
+ println("cg[${g_id}] ${st} ${en}:[${txt[st..en]}]")
cs_i += 3
}
}
// show results for captured groups
if start >= 0 {
- println("Match ($start, $end) => [${txt[start..end]}]")
+ println("Match (${start}, ${end}) => [${txt[start..end]}]")
for g_index := 0; g_index < re.group_count ; g_index++ {
println("#${g_index} [${re.get_group_by_id(txt, g_index)}] \
bounds: ${re.get_group_bounds_by_id(g_index)}")
}
for name in re.group_map.keys() {
- println("group:'$name' \t=> [${re.get_group_by_name(txt, name)}] \
+ println("group:'${name}' \t=> [${re.get_group_by_name(txt, name)}] \
bounds: ${re.get_group_bounds_by_name(name)}")
}
} else {
diff --git a/vlib/regex/regex.v b/vlib/regex/regex.v
index 6f600cb371..780e3e0bd1 100644
--- a/vlib/regex/regex.v
+++ b/vlib/regex/regex.v
@@ -1444,7 +1444,7 @@ fn (mut re RE) impl_compile(in_txt string) (int, int) {
pub fn (re RE) get_code() string {
mut pc1 := 0
mut res := strings.new_builder(re.cc.len * 2 * re.prog.len)
- res.write_string('========================================\nv RegEx compiler v $regex.v_regex_version output:\n')
+ res.write_string('========================================\nv RegEx compiler v ${regex.v_regex_version} output:\n')
mut stop_flag := false
@@ -1477,24 +1477,24 @@ pub fn (re RE) get_code() string {
res.write_string(' last!')
}
} else if ist == regex.ist_dot_char {
- res.write_string('. DOT_CHAR nx chk: $tk.dot_check_pc')
+ res.write_string('. DOT_CHAR nx chk: ${tk.dot_check_pc}')
if tk.last_dot_flag == true {
res.write_string(' last!')
}
} else if ist == regex.ist_group_start {
- res.write_string('( GROUP_START #:$tk.group_id')
+ res.write_string('( GROUP_START #:${tk.group_id}')
if tk.group_id == -1 {
res.write_string(' ?:')
} else {
for x in re.group_map.keys() {
if re.group_map[x] == (tk.group_id + 1) {
- res.write_string(' ?P<$x>')
+ res.write_string(' ?P<${x}>')
break
}
}
}
} else if ist == regex.ist_group_end {
- res.write_string(') GROUP_END #:$tk.group_id')
+ res.write_string(') GROUP_END #:${tk.group_id}')
} else if ist == regex.ist_simple_char {
res.write_string('[${tk.ch:1c}] query_ch')
}
@@ -1539,7 +1539,7 @@ pub fn (re RE) get_query() string {
// GROUP start
if ch == regex.ist_group_start {
if re.debug > 0 {
- res.write_string('#$tk.group_id')
+ res.write_string('#${tk.group_id}')
}
res.write_string('(')
@@ -1551,7 +1551,7 @@ pub fn (re RE) get_query() string {
for x in re.group_map.keys() {
if re.group_map[x] == (tk.group_id + 1) {
- res.write_string('?P<$x>')
+ res.write_string('?P<${x}>')
break
}
}
@@ -1569,7 +1569,7 @@ pub fn (re RE) get_query() string {
if ch == regex.ist_or_branch {
res.write_string('|')
if re.debug > 0 {
- res.write_string('{$tk.rep_min,$tk.rep_max}')
+ res.write_string('{${tk.rep_min},${tk.rep_max}}')
}
i++
continue
@@ -1613,9 +1613,9 @@ pub fn (re RE) get_query() string {
res.write_string('*')
} else {
if tk.rep_max == regex.max_quantifier {
- res.write_string('{$tk.rep_min,MAX}')
+ res.write_string('{${tk.rep_min},MAX}')
} else {
- res.write_string('{$tk.rep_min,$tk.rep_max}')
+ res.write_string('{${tk.rep_min},${tk.rep_max}}')
}
if tk.greedy == true {
res.write_string('?')
@@ -1778,7 +1778,7 @@ pub fn (mut re RE) match_base(in_txt &u8, in_txt_len int) (int, int) {
buf2.write_string('# ${step_count:3d} s: ${state_str(m_state):12s} PC: ${state.pc:3d}=>')
buf2.write_string('${ist:8x}'.replace(' ', '0'))
- buf2.write_string(" i,ch,len:[${state.i:3d},'${utf8_str(ch)}',$char_len] f.m:[${state.first_match:3d},${state.match_index:3d}] ")
+ buf2.write_string(" i,ch,len:[${state.i:3d},'${utf8_str(ch)}',${char_len}] f.m:[${state.first_match:3d},${state.match_index:3d}] ")
if ist == regex.ist_simple_char {
buf2.write_string('query_ch: [${re.prog[state.pc].ch:1c}]')
@@ -1798,9 +1798,9 @@ pub fn (mut re RE) match_base(in_txt &u8, in_txt_len int) (int, int) {
} else if ist == regex.ist_group_start {
tmp_gi := re.prog[state.pc].group_id
tmp_gr := re.prog[re.prog[state.pc].goto_pc].group_rep
- buf2.write_string('GROUP_START #:$tmp_gi rep:$tmp_gr ')
+ buf2.write_string('GROUP_START #:${tmp_gi} rep:${tmp_gr} ')
} else if ist == regex.ist_group_end {
- buf2.write_string('GROUP_END #:${re.prog[state.pc].group_id} deep:$state.group_index')
+ buf2.write_string('GROUP_END #:${re.prog[state.pc].group_id} deep:${state.group_index}')
}
}
if re.prog[state.pc].rep_max == regex.max_quantifier {
@@ -1811,7 +1811,7 @@ pub fn (mut re RE) match_base(in_txt &u8, in_txt_len int) (int, int) {
if re.prog[state.pc].greedy == true {
buf2.write_string('?')
}
- buf2.write_string(' (#$state.group_index)')
+ buf2.write_string(' (#${state.group_index})')
if ist == regex.ist_dot_char {
buf2.write_string(' last!')
diff --git a/vlib/regex/regex_opt.v b/vlib/regex/regex_opt.v
index e02f563dca..3e366f517e 100644
--- a/vlib/regex/regex_opt.v
+++ b/vlib/regex/regex_opt.v
@@ -8,11 +8,11 @@ pub fn (mut re RE) compile_opt(pattern string) ? {
if re_err != compile_ok {
mut err_msg := strings.new_builder(300)
- err_msg.write_string('\nquery: $pattern\n')
+ err_msg.write_string('\nquery: ${pattern}\n')
line := '-'.repeat(err_pos)
- err_msg.write_string('err : $line^\n')
+ err_msg.write_string('err : ${line}^\n')
err_str := re.get_parse_error_string(re_err)
- err_msg.write_string('ERROR: $err_str\n')
+ err_msg.write_string('ERROR: ${err_str}\n')
return error_with_code(err_msg.str(), re_err)
}
}
diff --git a/vlib/regex/regex_test.v b/vlib/regex/regex_test.v
index 107d131081..abc5e91a11 100644
--- a/vlib/regex/regex_test.v
+++ b/vlib/regex/regex_test.v
@@ -432,11 +432,11 @@ fn test_regex() {
for c, to in cgroups_test_suite {
// debug print
if debug {
- println('$c [$to.src] [q$to.q] ($to.s, $to.e)')
+ println('${c} [${to.src}] [q${to.q}] (${to.s}, ${to.e})')
}
mut re := regex.regex_opt(to.q) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
continue
}
@@ -461,7 +461,7 @@ fn test_regex() {
}
if start != to.s || end != to.e {
- println('#$c [$to.src] q[$to.q] res[$tmp_str] base:[$to.s,$to.e] $start, $end')
+ println('#${c} [${to.src}] q[${to.q}] res[${tmp_str}] base:[${to.s},${to.e}] ${start}, ${end}')
eprintln('ERROR!')
assert false
continue
@@ -479,7 +479,7 @@ fn test_regex() {
mut ln := re.group_csave[0] * 3
for ln > 0 {
if re.group_csave[ln] != to.cg[ln] {
- eprintln('Capturing group failed on $ln item!')
+ eprintln('Capturing group failed on ${ln} item!')
assert false
}
ln--
@@ -488,7 +488,7 @@ fn test_regex() {
// check named captured groups
for k in to.cgn.keys() {
if to.cgn[k] != (re.group_map[k] - 1) { // we have -1 because the map not found is 0, in groups we start from 0 and we store using +1
- eprintln('Named capturing group error! [$k]')
+ eprintln('Named capturing group error! [${k}]')
assert false
continue
}
@@ -501,8 +501,8 @@ fn test_regex() {
for ln := 0; ln < re.groups.len; ln++ {
if re.groups[ln] != to.cg[ln] {
eprintln("Capture group doesn't match:")
- eprintln('true ground: $to.cg')
- eprintln('elaborated : $re.groups')
+ eprintln('true ground: ${to.cg}')
+ eprintln('elaborated : ${re.groups}')
assert false
}
}
@@ -513,11 +513,11 @@ fn test_regex() {
for c, to in find_all_test_suite {
// debug print
if debug {
- println('#$c [$to.src] q[$to.q] ($to.res, $to.res_str)')
+ println('#${c} [${to.src}] q[${to.q}] (${to.res}, ${to.res_str})')
}
mut re := regex.regex_opt(to.q) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
continue
}
@@ -527,7 +527,7 @@ fn test_regex() {
if res != to.res {
eprintln('err: find_all !!')
if debug {
- println('#$c exp: $to.res calculated: $res')
+ println('#${c} exp: ${to.res} calculated: ${res}')
}
assert false
}
@@ -536,7 +536,7 @@ fn test_regex() {
if res_str != to.res_str {
eprintln('err: find_all_str !!')
if debug {
- println('#$c exp: $to.res_str calculated: $res_str')
+ println('#${c} exp: ${to.res_str} calculated: ${res_str}')
}
assert false
}
@@ -546,11 +546,11 @@ fn test_regex() {
for c, to in split_test_suite {
// debug print
if debug {
- println('#$c [$to.src] q[$to.q] ($to.res)')
+ println('#${c} [${to.src}] q[${to.q}] (${to.res})')
}
mut re := regex.regex_opt(to.q) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
continue
}
@@ -560,7 +560,7 @@ fn test_regex() {
if res != to.res {
eprintln('err: split !!')
if debug {
- println('#$c exp: $to.res calculated: $res')
+ println('#${c} exp: ${to.res} calculated: ${res}')
}
assert false
}
@@ -570,11 +570,11 @@ fn test_regex() {
for c, to in match_test_suite_replace {
// debug print
if debug {
- println('#$c [$to.src] q[$to.q] $to.r')
+ println('#${c} [${to.src}] q[${to.q}] ${to.r}')
}
mut re := regex.regex_opt(to.q) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
continue
}
@@ -591,11 +591,11 @@ fn test_regex() {
for c, to in match_test_suite_replace_simple {
// debug print
if debug {
- println('#$c [$to.src] q[$to.q] $to.r')
+ println('#${c} [${to.src}] q[${to.q}] ${to.r}')
}
mut re := regex.regex_opt(to.q) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
continue
}
@@ -612,13 +612,13 @@ fn test_regex() {
for c, to in match_test_suite {
// debug print
if debug {
- println('#$c [$to.src] q[$to.q] $to.s $to.e')
+ println('#${c} [${to.src}] q[${to.q}] ${to.s} ${to.e}')
}
// test the find
if to.s > 0 {
mut re := regex.regex_opt(to.q) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
continue
}
@@ -628,7 +628,7 @@ fn test_regex() {
if start != to.s || end != to.e {
err_str := re.get_parse_error_string(start)
- eprintln('ERROR : $err_str start: $start end: $end')
+ eprintln('ERROR : ${err_str} start: ${start} end: ${end}')
assert false
} else {
// tmp_str := text[start..end]
@@ -643,7 +643,7 @@ fn test_regex() {
// re.debug = true
re.compile_opt(to.q) or {
- eprintln('err: $err')
+ eprintln('err: ${err}')
assert false
continue
}
@@ -656,7 +656,7 @@ fn test_regex() {
}
if start != to.s || end != to.e {
- eprintln('#$c [$to.src] q[$to.q] res[$tmp_str] $start, $end')
+ eprintln('#${c} [${to.src}] q[${to.q}] res[${tmp_str}] ${start}, ${end}')
eprintln('ERROR!')
assert false
continue
@@ -693,7 +693,7 @@ fn test_regex_func() {
start, end := re.match_string(test_str)
assert (start == 0) && (end == 6)
} else {
- eprintln('Error in query string in pos $err_pos')
+ eprintln('Error in query string in pos ${err_pos}')
eprintln('Error: ${re.get_parse_error_string(re_err)}')
assert false
}
@@ -701,7 +701,7 @@ fn test_regex_func() {
fn my_repl_1(re regex.RE, in_txt string, start int, end int) string {
s0 := re.get_group_by_id(in_txt, 0)
- println('[$start, $end] => $s0')
+ println('[${start}, ${end}] => ${s0}')
return 'a' + s0.to_upper()
}
@@ -718,7 +718,7 @@ fn my_repl(re regex.RE, in_txt string, start int, end int) string {
s0 := re.get_group_by_id(in_txt, 0)[0..1] + 'X'
s1 := re.get_group_by_id(in_txt, 1)[0..1] + 'X'
s2 := re.get_group_by_id(in_txt, 2)[0..1] + 'X'
- return '$s0$s1$s2'
+ return '${s0}${s1}${s2}'
}
// test regex replace function
@@ -781,7 +781,7 @@ fn test_quantifier_sequences() {
for pattern in test_quantifier_sequences_list {
re, re_err, err_pos := regex.regex_base(pattern)
if re_err != regex.err_syntax_error {
- eprintln('pattern: $pattern => $re_err')
+ eprintln('pattern: ${pattern} => ${re_err}')
}
assert re_err == regex.err_syntax_error
}
diff --git a/vlib/regex/regex_util.v b/vlib/regex/regex_util.v
index 5f7fc1f344..58c32eee2c 100644
--- a/vlib/regex/regex_util.v
+++ b/vlib/regex/regex_util.v
@@ -452,7 +452,7 @@ fn (re RE) parsed_replace_string(in_txt string, repl string) string {
group_id := int(tmp[0] - `0`)
group := re.get_group_by_id(in_txt, group_id)
// println("group: $group_id [$group]")
- res += '$group${tmp[1..]}'
+ res += '${group}${tmp[1..]}'
} else {
res += '\\' + tmp
}
diff --git a/vlib/semver/parse.v b/vlib/semver/parse.v
index fc984cbce9..0b2c6b1527 100644
--- a/vlib/semver/parse.v
+++ b/vlib/semver/parse.v
@@ -56,7 +56,7 @@ fn (ver RawVersion) is_missing(typ int) bool {
fn (raw_ver RawVersion) coerce() ?Version {
ver := raw_ver.complete()
if !is_valid_number(ver.raw_ints[semver.ver_major]) {
- return error('Invalid major version: $ver.raw_ints[ver_major]')
+ return error('Invalid major version: ${ver.raw_ints}[ver_major]')
}
return ver.to_version()
}
diff --git a/vlib/semver/range.v b/vlib/semver/range.v
index 81174fea7b..2d2c398976 100644
--- a/vlib/semver/range.v
+++ b/vlib/semver/range.v
@@ -88,14 +88,14 @@ fn parse_comparator_set(input string) ?ComparatorSet {
raw_comparators := input.split(semver.comparator_sep)
if raw_comparators.len > 2 {
return &InvalidComparatorFormatError{
- msg: 'Invalid format of comparator set for input "$input"'
+ msg: 'Invalid format of comparator set for input "${input}"'
}
}
mut comparators := []Comparator{}
for raw_comp in raw_comparators {
c := parse_comparator(raw_comp) or {
return &InvalidComparatorFormatError{
- msg: 'Invalid comparator "$raw_comp" in input "$input"'
+ msg: 'Invalid comparator "${raw_comp}" in input "${input}"'
}
}
comparators << c
diff --git a/vlib/semver/semver.v b/vlib/semver/semver.v
index 48754df73d..b7bdba123b 100644
--- a/vlib/semver/semver.v
+++ b/vlib/semver/semver.v
@@ -33,7 +33,7 @@ struct InvalidVersionFormatError {
}
pub fn (err InvalidVersionFormatError) msg() string {
- return 'Invalid version format for input "$err.input"'
+ return 'Invalid version format for input "${err.input}"'
}
// * Constructor.
@@ -97,12 +97,12 @@ pub fn (v1 Version) le(v2 Version) bool {
// str returns the `string` representation of the `Version`.
pub fn (ver Version) str() string {
- common_string := '${ver.major}.${ver.minor}.$ver.patch'
+ common_string := '${ver.major}.${ver.minor}.${ver.patch}'
- prerelease_string := if ver.prerelease.len > 0 { '-$ver.prerelease' } else { '' }
- metadata_string := if ver.metadata.len > 0 { '+$ver.metadata' } else { '' }
+ prerelease_string := if ver.prerelease.len > 0 { '-${ver.prerelease}' } else { '' }
+ metadata_string := if ver.metadata.len > 0 { '+${ver.metadata}' } else { '' }
- return '$common_string$prerelease_string$metadata_string'
+ return '${common_string}${prerelease_string}${metadata_string}'
}
// * Utilites.
diff --git a/vlib/semver/util.v b/vlib/semver/util.v
index 142ce196da..ebaa172b82 100644
--- a/vlib/semver/util.v
+++ b/vlib/semver/util.v
@@ -10,7 +10,7 @@ fn is_version_valid(input string) bool {
[inline]
fn coerce_version(input string) ?Version {
raw_ver := parse(input)
- ver := raw_ver.coerce() or { return error('Invalid version for input "$input"') }
+ ver := raw_ver.coerce() or { return error('Invalid version for input "${input}"') }
return ver
}
diff --git a/vlib/sokol/sapp/sapp_structs.c.v b/vlib/sokol/sapp/sapp_structs.c.v
index e21d9a6323..f5f60dcfec 100644
--- a/vlib/sokol/sapp/sapp_structs.c.v
+++ b/vlib/sokol/sapp/sapp_structs.c.v
@@ -108,7 +108,7 @@ pub:
pub type Event = C.sapp_event
pub fn (e &C.sapp_event) str() string {
- return 'evt: frame_count=$e.frame_count, type=${e.@type}'
+ return 'evt: frame_count=${e.frame_count}, type=${e.@type}'
}
[typedef]
diff --git a/vlib/sokol/sapp/sapp_v.c.v b/vlib/sokol/sapp/sapp_v.c.v
index 4de1fa9232..440556aca7 100644
--- a/vlib/sokol/sapp/sapp_v.c.v
+++ b/vlib/sokol/sapp/sapp_v.c.v
@@ -51,7 +51,7 @@ fn write_rgba_to_ppm(path string, w int, h int, components int, pixels &u8) ! {
f_out.close()
}
f_out.writeln('P3')!
- f_out.writeln('$w $h')!
+ f_out.writeln('${w} ${h}')!
f_out.writeln('255')!
for i := h - 1; i >= 0; i-- {
for j := 0; j < w; j++ {
@@ -60,7 +60,7 @@ fn write_rgba_to_ppm(path string, w int, h int, components int, pixels &u8) ! {
r := int(pixels[idx])
g := int(pixels[idx + 1])
b := int(pixels[idx + 2])
- f_out.write_string('$r $g $b ')!
+ f_out.write_string('${r} ${g} ${b} ')!
}
}
}
diff --git a/vlib/sqlite/orm.v b/vlib/sqlite/orm.v
index 81987fa4de..2e164889b7 100644
--- a/vlib/sqlite/orm.v
+++ b/vlib/sqlite/orm.v
@@ -9,7 +9,7 @@ pub fn (db DB) @select(config orm.SelectConfig, data orm.QueryData, where orm.Qu
// 1. Create query and bind necessary data
query := orm.orm_select_gen(config, '`', true, '?', 1, where)
$if trace_sqlite ? {
- eprintln('> @select query: "$query"')
+ eprintln('> @select query: "${query}"')
}
stmt := db.new_init_stmt(query)!
defer {
@@ -83,7 +83,7 @@ pub fn (db DB) create(table string, fields []orm.TableField) ! {
}
pub fn (db DB) drop(table string) ! {
- query := 'DROP TABLE `$table`;'
+ query := 'DROP TABLE `${table}`;'
sqlite_stmt_worker(db, query, orm.QueryData{}, orm.QueryData{})!
}
@@ -92,7 +92,7 @@ pub fn (db DB) drop(table string) ! {
// Executes query and bind prepared statement data directly
fn sqlite_stmt_worker(db DB, query string, data orm.QueryData, where orm.QueryData) ! {
$if trace_sqlite ? {
- eprintln('> sqlite_stmt_worker query: "$query"')
+ eprintln('> sqlite_stmt_worker query: "${query}"')
}
stmt := db.new_init_stmt(query)!
defer {
@@ -158,7 +158,7 @@ fn (stmt Stmt) sqlite_select_column(idx int, typ int) !orm.Primitive {
d := stmt.get_int(idx)
primitive = time.unix(d)
} else {
- return error('Unknown type $typ')
+ return error('Unknown type ${typ}')
}
return primitive
@@ -173,6 +173,6 @@ fn sqlite_type_from_v(typ int) !string {
} else if typ == orm.string {
'TEXT'
} else {
- error('Unknown type $typ')
+ error('Unknown type ${typ}')
}
}
diff --git a/vlib/sqlite/sqlite.v b/vlib/sqlite/sqlite.v
index 48d1d31570..c9eaeae89d 100644
--- a/vlib/sqlite/sqlite.v
+++ b/vlib/sqlite/sqlite.v
@@ -264,7 +264,7 @@ pub fn (db &DB) exec_one(query string) !Row {
[manualfree]
pub fn (db &DB) error_message(code int, query string) IError {
errmsg := unsafe { cstring_to_vstring(&char(C.sqlite3_errmsg(db.conn))) }
- msg := '$errmsg ($code) ($query)'
+ msg := '${errmsg} (${code}) (${query})'
unsafe { errmsg.free() }
return SQLError{
msg: msg
@@ -293,7 +293,7 @@ pub fn (db &DB) exec_param(query string, param string) []Row {
// Creates table named 'table_name', with columns generated from 'columns' array.
// Default columns type will be TEXT.
pub fn (db &DB) create_table(table_name string, columns []string) {
- db.exec('create table if not exists $table_name (' + columns.join(',\n') + ')')
+ db.exec('create table if not exists ${table_name} (' + columns.join(',\n') + ')')
}
// Set a busy timeout in milliseconds.
diff --git a/vlib/sqlite/sqlite_vfs_lowlevel_test.v b/vlib/sqlite/sqlite_vfs_lowlevel_test.v
index f769f237b9..85e1807e61 100644
--- a/vlib/sqlite/sqlite_vfs_lowlevel_test.v
+++ b/vlib/sqlite/sqlite_vfs_lowlevel_test.v
@@ -20,7 +20,7 @@ fn test_vfs_register() {
panic('expected that vfs is not known')
}
- vfs_descr.register_as_nondefault() or { panic('vfs register failed $err') }
+ vfs_descr.register_as_nondefault() or { panic('vfs register failed ${err}') }
sqlite.get_vfs(vfs_name)?
@@ -28,7 +28,7 @@ fn test_vfs_register() {
assert now_default_vfs.zName == org_default_vfs.zName
- vfs_descr.unregister() or { panic('vfs unregister failed $err') }
+ vfs_descr.unregister() or { panic('vfs unregister failed ${err}') }
if _ := sqlite.get_vfs(vfs_name) {
panic('vfs supposedly unregistered yet somehow still foundable')
@@ -119,7 +119,7 @@ fn example_vfs_fullpathname(vfs &sqlite.Sqlite3_vfs, input &char, size_of_output
}
result := unsafe { cstring_to_vstring(output) }
- vfs_state.log << 'fullpathname from=$from to=$result}'
+ vfs_state.log << 'fullpathname from=${from} to=${result}}'
return sqlite.sqlite_ok
}
@@ -174,7 +174,7 @@ fn example_vfs_open(vfs &sqlite.Sqlite3_vfs, file_name_or_null_for_tempfile &cha
outp.name = file_name.clone()
outp.vfs_state = vfs_state
}
- vfs_state.log << 'open temp?=$is_temp name=$file_name'
+ vfs_state.log << 'open temp?=${is_temp} name=${file_name}'
return sqlite.sqlite_ok
}
@@ -213,7 +213,7 @@ fn example_vfsfile_read(file &sqlite.Sqlite3_file, output voidptr, amount int, o
mut vfsfile := to_vfsopenedfile(file)
- vfsfile.vfs_state.log << 'read file=$vfsfile.name'
+ vfsfile.vfs_state.log << 'read file=${vfsfile.name}'
unsafe {
C.memset(output, 0, amount)
@@ -263,7 +263,7 @@ fn example_vfsfile_close(file &sqlite.Sqlite3_file) int {
mut vfsfile := to_vfsopenedfile(file)
- vfsfile.vfs_state.log << 'close file=$vfsfile.name'
+ vfsfile.vfs_state.log << 'close file=${vfsfile.name}'
return sqlite.sqlite_ok
}
diff --git a/vlib/sqlite/vfs_lowlevel.v b/vlib/sqlite/vfs_lowlevel.v
index 24f614284c..0aa5847dc2 100644
--- a/vlib/sqlite/vfs_lowlevel.v
+++ b/vlib/sqlite/vfs_lowlevel.v
@@ -118,14 +118,14 @@ pub fn get_default_vfs() ?&Sqlite3_vfs {
pub fn (mut v Sqlite3_vfs) register_as_nondefault() ? {
res := C.sqlite3_vfs_register(v, 0)
- return if sqlite_ok == res { none } else { error('sqlite3_vfs_register returned $res') }
+ return if sqlite_ok == res { none } else { error('sqlite3_vfs_register returned ${res}') }
}
// unregister Requests sqlite to stop using VFS as passed in receiver argument
pub fn (mut v Sqlite3_vfs) unregister() ? {
res := C.sqlite3_vfs_unregister(v)
- return if sqlite_ok == res { none } else { error('sqlite3_vfs_unregister returned $res') }
+ return if sqlite_ok == res { none } else { error('sqlite3_vfs_unregister returned ${res}') }
}
// https://www.sqlite.org/c3ref/open.html
diff --git a/vlib/stbi/stbi.c.v b/vlib/stbi/stbi.c.v
index 09766ffb1f..2c662d7375 100644
--- a/vlib/stbi/stbi.c.v
+++ b/vlib/stbi/stbi.c.v
@@ -12,14 +12,14 @@ fn trace_allocation(message string) {
[export: 'stbi__callback_malloc']
fn cb_malloc(s usize) voidptr {
res := unsafe { malloc(isize(s)) }
- trace_allocation('> stbi__callback_malloc: $s => ${ptr_str(res)}')
+ trace_allocation('> stbi__callback_malloc: ${s} => ${ptr_str(res)}')
return res
}
[export: 'stbi__callback_realloc']
fn cb_realloc(p voidptr, s usize) voidptr {
res := unsafe { v_realloc(p, isize(s)) }
- trace_allocation('> stbi__callback_realloc: ${ptr_str(p)} , $s => ${ptr_str(res)}')
+ trace_allocation('> stbi__callback_realloc: ${ptr_str(p)} , ${s} => ${ptr_str(res)}')
return res
}
@@ -126,7 +126,7 @@ pub fn load(path string) !Image {
res.nr_channels = 4
}
if isnil(res.data) {
- return error('stbi_image failed to load from "$path"')
+ return error('stbi_image failed to load from "${path}"')
}
return res
}
@@ -162,21 +162,21 @@ fn C.stbi_write_jpg(filename &char, w int, h int, comp int, buffer &u8, quality
// row_stride_in_bytes is usually equal to: w * comp
pub fn stbi_write_png(path string, w int, h int, comp int, buf &u8, row_stride_in_bytes int) ! {
if 0 == C.stbi_write_png(&char(path.str), w, h, comp, buf, row_stride_in_bytes) {
- return error('stbi_image failed to write png file to "$path"')
+ return error('stbi_image failed to write png file to "${path}"')
}
}
// stbi_write_png write on path a BMP file
pub fn stbi_write_bmp(path string, w int, h int, comp int, buf &u8) ! {
if 0 == C.stbi_write_bmp(&char(path.str), w, h, comp, buf) {
- return error('stbi_image failed to write bmp file to "$path"')
+ return error('stbi_image failed to write bmp file to "${path}"')
}
}
// stbi_write_png write on path a TGA file
pub fn stbi_write_tga(path string, w int, h int, comp int, buf &u8) ! {
if 0 == C.stbi_write_tga(&char(path.str), w, h, comp, buf) {
- return error('stbi_image failed to write tga file to "$path"')
+ return error('stbi_image failed to write tga file to "${path}"')
}
}
@@ -185,7 +185,7 @@ pub fn stbi_write_tga(path string, w int, h int, comp int, buf &u8) ! {
// quality is between 1 and 100. Higher quality looks better but results in a bigger image.
pub fn stbi_write_jpg(path string, w int, h int, comp int, buf &u8, quality int) ! {
if 0 == C.stbi_write_jpg(&char(path.str), w, h, comp, buf, quality) {
- return error('stbi_image failed to write jpg file to "$path"')
+ return error('stbi_image failed to write jpg file to "${path}"')
}
}
diff --git a/vlib/stbi/stbi_test.v b/vlib/stbi/stbi_test.v
index 55933aea51..3bcedd0831 100644
--- a/vlib/stbi/stbi_test.v
+++ b/vlib/stbi/stbi_test.v
@@ -14,18 +14,18 @@ fn testsuite_end() {
fn test_stbi_read_write() {
vroot := @VEXEROOT
path := os.join_path(vroot, 'examples', 'assets', 'logo.png')
- println('Source path: $path')
+ println('Source path: ${path}')
d_s := stbi.load(path) or { panic(err) }
- println('Image source data:\n $d_s')
+ println('Image source data:\n ${d_s}')
out_path := os.join_path(tfolder, 'test.png')
- println('Out path: $out_path')
+ println('Out path: ${out_path}')
stbi.stbi_write_png(out_path, d_s.width, d_s.height, 4, d_s.data, d_s.width * 4) or {
panic(err)
}
d_d := stbi.load(out_path) or { panic(err) }
- println('Image dest data:\n $d_d')
+ println('Image dest data:\n ${d_d}')
assert d_s.width == d_d.width
assert d_s.height == d_d.height
diff --git a/vlib/strconv/atof_test.v b/vlib/strconv/atof_test.v
index 069ea79ee7..b05311e1c4 100644
--- a/vlib/strconv/atof_test.v
+++ b/vlib/strconv/atof_test.v
@@ -78,13 +78,13 @@ fn test_atof() {
fn test_atof_errors() {
if x := strconv.atof64('') {
- eprintln('> x: $x')
+ eprintln('> x: ${x}')
assert false // strconv.atof64 should have failed
} else {
assert err.str() == 'expected a number found an empty string'
}
if x := strconv.atof64('####') {
- eprintln('> x: $x')
+ eprintln('> x: ${x}')
assert false // strconv.atof64 should have failed
} else {
assert err.str() == 'not a number'
diff --git a/vlib/strconv/atoi.v b/vlib/strconv/atoi.v
index 345ec87235..3dfe47949b 100644
--- a/vlib/strconv/atoi.v
+++ b/vlib/strconv/atoi.v
@@ -24,10 +24,10 @@ pub fn common_parse_uint(s string, _base int, _bit_size int, error_on_non_digit
// TODO: error_on_non_digit and error_on_high_digit have no difference
if err != 0 && (error_on_non_digit || error_on_high_digit) {
match err {
- -1 { return error('common_parse_uint: wrong base $_base for $s') }
- -2 { return error('common_parse_uint: wrong bit size $_bit_size for $s') }
- -3 { return error('common_parse_uint: integer overflow $s') }
- else { return error('common_parse_uint: syntax error $s') }
+ -1 { return error('common_parse_uint: wrong base ${_base} for ${s}') }
+ -2 { return error('common_parse_uint: wrong bit size ${_bit_size} for ${s}') }
+ -3 { return error('common_parse_uint: integer overflow ${s}') }
+ else { return error('common_parse_uint: syntax error ${s}') }
}
}
return result
@@ -196,7 +196,7 @@ pub fn atoi(s string) !int {
start_idx++
if s.len - start_idx < 1 {
// return 0, &NumError{fnAtoi, s0, ErrSyntax}
- return error('strconv.atoi: parsing "$s": invalid syntax')
+ return error('strconv.atoi: parsing "${s}": invalid syntax')
}
}
mut n := 0
@@ -204,7 +204,7 @@ pub fn atoi(s string) !int {
ch := s[i] - `0`
if ch > 9 {
// return 0, &NumError{fnAtoi, s0, ErrSyntax}
- return error('strconv.atoi: parsing "$s": invalid syntax')
+ return error('strconv.atoi: parsing "${s}": invalid syntax')
}
n = n * 10 + int(ch)
}
diff --git a/vlib/strconv/format.md b/vlib/strconv/format.md
index e8701f2669..2ef6116a1d 100644
--- a/vlib/strconv/format.md
+++ b/vlib/strconv/format.md
@@ -209,7 +209,7 @@ mut x := 0
sc8 := '[%20g][%20G]|'
for x < 12 {
temp_s := strconv.v_sprintf(sc8, ft, ft)
- println('$temp_s\n')
+ println('${temp_s}\n')
ft = ft * 10.0
x++
}
diff --git a/vlib/strconv/number_to_base.c.v b/vlib/strconv/number_to_base.c.v
index 69409f7e21..46171ef444 100644
--- a/vlib/strconv/number_to_base.c.v
+++ b/vlib/strconv/number_to_base.c.v
@@ -8,7 +8,7 @@ const base_digits = '0123456789abcdefghijklmnopqrstuvwxyz'
pub fn format_int(n i64, radix int) string {
unsafe {
if radix < 2 || radix > 36 {
- panic('invalid radix: $radix . It should be => 2 and <= 36')
+ panic('invalid radix: ${radix} . It should be => 2 and <= 36')
}
if n == 0 {
return '0'
@@ -45,7 +45,7 @@ pub fn format_int(n i64, radix int) string {
pub fn format_uint(n u64, radix int) string {
unsafe {
if radix < 2 || radix > 36 {
- panic('invalid radix: $radix . It should be => 2 and <= 36')
+ panic('invalid radix: ${radix} . It should be => 2 and <= 36')
}
if n == 0 {
return '0'
diff --git a/vlib/strconv/vprintf.c.v b/vlib/strconv/vprintf.c.v
index 134dbf9e2e..382d44db98 100644
--- a/vlib/strconv/vprintf.c.v
+++ b/vlib/strconv/vprintf.c.v
@@ -538,7 +538,7 @@ pub fn v_sprintf(str string, pt ...voidptr) string {
}
if p_index != pt.len {
- panic('$p_index % conversion specifiers, but given $pt.len args')
+ panic('${p_index} % conversion specifiers, but given ${pt.len} args')
}
return res.str()
@@ -547,7 +547,7 @@ pub fn v_sprintf(str string, pt ...voidptr) string {
[inline]
fn v_sprintf_panic(idx int, len int) {
if idx >= len {
- panic('${idx + 1} % conversion specifiers, but given only $len args')
+ panic('${idx + 1} % conversion specifiers, but given only ${len} args')
}
}
diff --git a/vlib/strings/builder_test.js.v b/vlib/strings/builder_test.js.v
index dc25577e44..48bcc31109 100644
--- a/vlib/strings/builder_test.js.v
+++ b/vlib/strings/builder_test.js.v
@@ -23,14 +23,14 @@ fn test_sb() {
sb = strings.new_builder(10)
x := 10
y := MyInt(20)
- sb.writeln('x = $x y = $y')
+ sb.writeln('x = ${x} y = ${y}')
res := sb.str()
assert res[res.len - 1] == `\n`
- println('"$res"')
+ println('"${res}"')
assert res.trim_space() == 'x = 10 y = 20'
//
sb = strings.new_builder(10)
- sb.write_string('x = $x y = $y')
+ sb.write_string('x = ${x} y = ${y}')
assert sb.str() == 'x = 10 y = 20'
//$if !windows {
sb = strings.new_builder(10)
diff --git a/vlib/strings/builder_test.v b/vlib/strings/builder_test.v
index 14614f3f8c..77e6e8a6a5 100644
--- a/vlib/strings/builder_test.v
+++ b/vlib/strings/builder_test.v
@@ -23,14 +23,14 @@ fn test_sb() {
sb = strings.new_builder(10)
x := 10
y := MyInt(20)
- sb.writeln('x = $x y = $y')
+ sb.writeln('x = ${x} y = ${y}')
res := sb.str()
assert res[res.len - 1] == `\n`
- println('"$res"')
+ println('"${res}"')
assert res.trim_space() == 'x = 10 y = 20'
//
sb = strings.new_builder(10)
- sb.write_string('x = $x y = $y')
+ sb.write_string('x = ${x} y = ${y}')
assert sb.str() == 'x = 10 y = 20'
//$if !windows {
sb = strings.new_builder(10)
diff --git a/vlib/strings/strings_test.v b/vlib/strings/strings_test.v
index 579d99b28e..e1dfe63a85 100644
--- a/vlib/strings/strings_test.v
+++ b/vlib/strings/strings_test.v
@@ -79,19 +79,19 @@ fn test_find_between_pair_family() {
for i, tstr in test_rune_and_byte {
e1 := strings.find_between_pair_rune(tstr, `[`, `]`)
e2 := expected_rune_and_byte_outputs[i]
- assert '$e1' == '$e2'
+ assert '${e1}' == '${e2}'
}
for i, tstr in test_rune_and_byte {
e1 := strings.find_between_pair_u8(tstr, `[`, `]`)
e2 := expected_rune_and_byte_outputs[i]
- assert '$e1' == '$e2'
+ assert '${e1}' == '${e2}'
}
for i, tstr in test_strings {
e1 := strings.find_between_pair_string(tstr, '/*', '*/')
e2 := expected_string_outputs[i]
- assert '$e1' == '$e2'
+ assert '${e1}' == '${e2}'
}
}
diff --git a/vlib/sync/bench/channel_bench_v.v b/vlib/sync/bench/channel_bench_v.v
index 96ea2d6447..6e918f60f4 100644
--- a/vlib/sync/bench/channel_bench_v.v
+++ b/vlib/sync/bench/channel_bench_v.v
@@ -58,9 +58,9 @@ fn main() {
}
elapsed := stopwatch.elapsed()
rate := f64(nobj) / elapsed * time.microsecond
- println('$nobj objects in ${f64(elapsed) / time.second} s (${rate:.2f} objs/µs)')
+ println('${nobj} objects in ${f64(elapsed) / time.second} s (${rate:.2f} objs/µs)')
// use sum formula by Gauß to calculate the expected result
expected_sum := i64(nobj) * (nobj - 1) / 2
- println('got: $sum, expected: $expected_sum')
+ println('got: ${sum}, expected: ${expected_sum}')
assert sum == expected_sum
}
diff --git a/vlib/sync/bench/many_writers_and_receivers_on_1_channel.v b/vlib/sync/bench/many_writers_and_receivers_on_1_channel.v
index 529c0c5002..02a10d645d 100644
--- a/vlib/sync/bench/many_writers_and_receivers_on_1_channel.v
+++ b/vlib/sync/bench/many_writers_and_receivers_on_1_channel.v
@@ -35,7 +35,7 @@ mut:
}
fn do_rec(ch chan int, id int, mut ctx Context) {
- eprintln('start of do_rec id: $id')
+ eprintln('start of do_rec id: ${id}')
mut timer_sw_x := time.new_stopwatch()
mut tmp := int(0)
mut i := int(0)
@@ -68,7 +68,7 @@ fn do_rec(ch chan int, id int, mut ctx Context) {
}
fn do_send(ch chan int, id int, mut ctx Context) {
- eprintln('start of do_send id: $id')
+ eprintln('start of do_send id: ${id}')
mut timer_sw_x := time.new_stopwatch()
n_iters := ctx.n_iters
base := n_iters * id // sender events can not overlap
@@ -100,12 +100,12 @@ fn main() {
n_readers := cmdline.option(args, '-readers', '1').int()
n_writers := cmdline.option(args, '-writers', '4').int()
chan_cap := cmdline.option(args, '-chan_cap', '100').int()
- eprintln('> n_iters, $n_iters, n_writers, $n_writers, n_readers, $n_readers, chan_cap, $chan_cap')
+ eprintln('> n_iters, ${n_iters}, n_writers, ${n_writers}, n_readers, ${n_readers}, chan_cap, ${chan_cap}')
//
ch := chan int{cap: chan_cap}
max_number_of_pushes := n_writers * (n_iters + 2)
max_number_of_pops := max_number_of_pushes * n_readers
- eprintln('> max_number_of_pushes, $max_number_of_pushes, max_number_of_pops (per receiver), $max_number_of_pops')
+ eprintln('> max_number_of_pushes, ${max_number_of_pushes}, max_number_of_pops (per receiver), ${max_number_of_pops}')
mut ctx := &Context{
n_iters: n_iters
n_readers: n_readers
diff --git a/vlib/sync/channel_polling_test.v b/vlib/sync/channel_polling_test.v
index a7bb24717f..1f56d2ffac 100644
--- a/vlib/sync/channel_polling_test.v
+++ b/vlib/sync/channel_polling_test.v
@@ -47,10 +47,10 @@ fn test_channel_polling() {
mut sum := i64(0)
for _ in 0 .. nrec {
sum += <-resch
- println('> running sum: $sum')
+ println('> running sum: ${sum}')
}
// use sum formula by Gauß to calculate the expected result
expected_sum := i64(nobj) * (nobj - 1) / 2
- println('expected sum: $expected_sum | sum: $sum')
+ println('expected sum: ${expected_sum} | sum: ${sum}')
assert sum == expected_sum
}
diff --git a/vlib/sync/channel_push_or_1_test.v b/vlib/sync/channel_push_or_1_test.v
index 1ad57d6128..4f50115579 100644
--- a/vlib/sync/channel_push_or_1_test.v
+++ b/vlib/sync/channel_push_or_1_test.v
@@ -43,7 +43,7 @@ fn g(ch chan int, res chan int) {
j++
}
- println('done $j')
+ println('done ${j}')
res <- j
}
diff --git a/vlib/sync/channel_select_test.v b/vlib/sync/channel_select_test.v
index 65a030fa7c..c533c468dd 100644
--- a/vlib/sync/channel_select_test.v
+++ b/vlib/sync/channel_select_test.v
@@ -81,7 +81,7 @@ fn test_select() {
sum += rb
}
else {
- println('got $idx (timeout)')
+ println('got ${idx} (timeout)')
}
}
}
diff --git a/vlib/sync/channels.c.v b/vlib/sync/channels.c.v
index 32e6971bd1..269df09f4b 100644
--- a/vlib/sync/channels.c.v
+++ b/vlib/sync/channels.c.v
@@ -116,7 +116,7 @@ fn new_channel_st_noscan(n u32, st u32) &Channel {
}
pub fn (ch &Channel) auto_str(typename string) string {
- return 'chan $typename{cap: $ch.cap, closed: $ch.closed}'
+ return 'chan ${typename}{cap: ${ch.cap}, closed: ${ch.closed}}'
}
pub fn (mut ch Channel) close() {
diff --git a/vlib/sync/pool/README.md b/vlib/sync/pool/README.md
index 4853a97cf2..c1c64fa198 100644
--- a/vlib/sync/pool/README.md
+++ b/vlib/sync/pool/README.md
@@ -18,7 +18,7 @@ pub struct SResult {
fn sprocess(mut pp pool.PoolProcessor, idx int, wid int) &SResult {
item := pp.get_item(idx)
- println('idx: $idx, wid: $wid, item: ' + item)
+ println('idx: ${idx}, wid: ${wid}, item: ' + item)
return &SResult{item.reverse()}
}
@@ -27,7 +27,7 @@ fn main() {
pp.work_on_items(['1abc', '2abc', '3abc', '4abc', '5abc', '6abc', '7abc'])
// optionally, you can iterate over the results too:
for x in pp.get_results() {
- println('result: $x.s')
+ println('result: ${x.s}')
}
}
```
diff --git a/vlib/sync/pool/pool_test.v b/vlib/sync/pool/pool_test.v
index 08a5b27dd3..338236cb5e 100644
--- a/vlib/sync/pool/pool_test.v
+++ b/vlib/sync/pool/pool_test.v
@@ -11,14 +11,14 @@ pub struct IResult {
fn worker_s(mut p pool.PoolProcessor, idx int, worker_id int) &SResult {
item := p.get_item(idx)
- println('worker_s worker_id: $worker_id | idx: $idx | item: $item')
+ println('worker_s worker_id: ${worker_id} | idx: ${idx} | item: ${item}')
time.sleep(3 * time.millisecond)
- return &SResult{'$item $item'}
+ return &SResult{'${item} ${item}'}
}
fn worker_i(mut p pool.PoolProcessor, idx int, worker_id int) &IResult {
item := p.get_item(idx)
- println('worker_i worker_id: $worker_id | idx: $idx | item: $item')
+ println('worker_i worker_id: ${worker_id} | idx: ${idx} | item: ${item}')
time.sleep(5 * time.millisecond)
return &IResult{item * 1000}
}
diff --git a/vlib/sync/select_close_test.v b/vlib/sync/select_close_test.v
index 083340162e..505b8c2569 100644
--- a/vlib/sync/select_close_test.v
+++ b/vlib/sync/select_close_test.v
@@ -76,7 +76,7 @@ fn test_select() {
assert j == 1100
}
else {
- println('got $idx (timeout)')
+ println('got ${idx} (timeout)')
assert false
}
}
diff --git a/vlib/sync/thread_test.v b/vlib/sync/thread_test.v
index 913cf2f561..8ec702144a 100644
--- a/vlib/sync/thread_test.v
+++ b/vlib/sync/thread_test.v
@@ -2,18 +2,18 @@ import sync
fn simple_thread() u64 {
tid := sync.thread_id()
- eprintln('simple_thread thread_id: $tid.hex()')
+ eprintln('simple_thread thread_id: ${tid.hex()}')
return tid
}
fn test_sync_thread_id() {
mtid := sync.thread_id()
- eprintln('main thread_id: $sync.thread_id().hex()')
+ eprintln('main thread_id: ${sync.thread_id().hex()}')
x := spawn simple_thread()
y := spawn simple_thread()
xtid := x.wait()
ytid := y.wait()
- eprintln('main thread_id: $sync.thread_id().hex()')
+ eprintln('main thread_id: ${sync.thread_id().hex()}')
dump(xtid.hex())
dump(ytid.hex())
assert mtid != xtid
diff --git a/vlib/szip/szip.v b/vlib/szip/szip.v
index a057082b4b..2d0a32c5ba 100644
--- a/vlib/szip/szip.v
+++ b/vlib/szip/szip.v
@@ -126,7 +126,7 @@ pub fn (mut zentry Zip) open_entry(name string) ! {
pub fn (mut z Zip) open_entry_by_index(index int) ! {
res := C.zip_entry_openbyindex(z, index)
if res == -1 {
- return error('szip: cannot open archive entry at index $index')
+ return error('szip: cannot open archive entry at index ${index}')
}
}
diff --git a/vlib/term/colors.v b/vlib/term/colors.v
index 576ba0b00a..33110bfda4 100644
--- a/vlib/term/colors.v
+++ b/vlib/term/colors.v
@@ -4,11 +4,11 @@
module term
pub fn format(msg string, open string, close string) string {
- return '\x1b[${open}m$msg\x1b[${close}m'
+ return '\x1b[${open}m${msg}\x1b[${close}m'
}
pub fn format_rgb(r int, g int, b int, msg string, open string, close string) string {
- return '\x1b[$open;2;$r;$g;${b}m$msg\x1b[${close}m'
+ return '\x1b[${open};2;${r};${g};${b}m${msg}\x1b[${close}m'
}
pub fn rgb(r int, g int, b int, msg string) string {
@@ -194,5 +194,5 @@ pub fn bright_bg_white(msg string) string {
// highlight_command highlights the command with an on-brand background
// to make CLI commands immediately recognizable.
pub fn highlight_command(command string) string {
- return bright_white(bg_cyan(' $command '))
+ return bright_white(bg_cyan(' ${command} '))
}
diff --git a/vlib/term/control.v b/vlib/term/control.v
index 35760e947d..2662ddcc1d 100644
--- a/vlib/term/control.v
+++ b/vlib/term/control.v
@@ -15,7 +15,7 @@ module term
// x is the x coordinate
// y is the y coordinate
pub fn set_cursor_position(c Coord) {
- print('\x1b[$c.y;$c.x' + 'H')
+ print('\x1b[${c.y};${c.x}' + 'H')
flush_stdout()
}
@@ -25,7 +25,7 @@ pub fn set_cursor_position(c Coord) {
// direction: C is forward / East
// direction: D is backward / West
pub fn move(n int, direction string) {
- print('\x1b[$n$direction')
+ print('\x1b[${n}${direction}')
flush_stdout()
}
diff --git a/vlib/term/term.v b/vlib/term/term.v
index 47ff7cfc3e..6b1296829f 100644
--- a/vlib/term/term.v
+++ b/vlib/term/term.v
@@ -40,7 +40,7 @@ pub fn failed(s string) string {
// If colors are not allowed, returns a given string.
pub fn ok_message(s string) string {
if can_show_color_on_stdout() {
- return green(' $s ')
+ return green(' ${s} ')
}
return s
}
@@ -48,14 +48,14 @@ pub fn ok_message(s string) string {
// fail_message returns a colored string with red color.
// If colors are not allowed, returns a given string.
pub fn fail_message(s string) string {
- return failed(' $s ')
+ return failed(' ${s} ')
}
// warn_message returns a colored string with yellow color.
// If colors are not allowed, returns a given string.
pub fn warn_message(s string) string {
if can_show_color_on_stdout() {
- return bright_yellow(' $s ')
+ return bright_yellow(' ${s} ')
}
return s
}
@@ -148,7 +148,7 @@ pub fn header_left(text string, divider string) string {
hstart := relement.repeat(4)[0..4]
remaining_cols := imax(0, (cols - (hstart.len + 1 + plain_text.len + 1)))
hend := relement.repeat((remaining_cols + 1) / relement.len)[0..remaining_cols]
- return '$hstart $text $hend'
+ return '${hstart} ${text} ${hend}'
}
// header returns a horizontal divider line with a centered text in the middle.
diff --git a/vlib/term/term_test.v b/vlib/term/term_test.v
index bf003fcee8..db782787e0 100644
--- a/vlib/term/term_test.v
+++ b/vlib/term/term_test.v
@@ -76,9 +76,9 @@ fn test_get_cursor_position() {
cursor_position_3 := term.get_cursor_position()!
//
term.set_cursor_position(original_position)
- eprintln('original_position: $original_position')
- eprintln('cursor_position_2: $cursor_position_2')
- eprintln('cursor_position_3: $cursor_position_3')
+ eprintln('original_position: ${original_position}')
+ eprintln('cursor_position_2: ${cursor_position_2}')
+ eprintln('cursor_position_3: ${cursor_position_3}')
// 0,0 is returned on dumb terminals
if cursor_position_2.x == 0 && cursor_position_2.y == 0 {
return
diff --git a/vlib/term/ui/input_windows.c.v b/vlib/term/ui/input_windows.c.v
index b6c2a5dbdf..da4196ad1e 100644
--- a/vlib/term/ui/input_windows.c.v
+++ b/vlib/term/ui/input_windows.c.v
@@ -75,7 +75,7 @@ pub fn init(cfg Config) &Context {
}
if ctx.cfg.window_title != '' {
- print('\x1b]0;$ctx.cfg.window_title\x07')
+ print('\x1b]0;${ctx.cfg.window_title}\x07')
flush_stdout()
}
@@ -293,7 +293,7 @@ fn (mut ctx Context) parse_events() {
}
w := sb.srWindow.Right - sb.srWindow.Left + 1
h := sb.srWindow.Bottom - sb.srWindow.Top + 1
- utf8 := '($ctx.window_width, $ctx.window_height) -> ($w, $h)'
+ utf8 := '(${ctx.window_width}, ${ctx.window_height}) -> (${w}, ${h})'
if w != ctx.window_width || h != ctx.window_height {
ctx.window_width, ctx.window_height = w, h
mut event := &Event{
diff --git a/vlib/term/ui/termios_nix.c.v b/vlib/term/ui/termios_nix.c.v
index 4e9768f40f..7421d62cfe 100644
--- a/vlib/term/ui/termios_nix.c.v
+++ b/vlib/term/ui/termios_nix.c.v
@@ -78,7 +78,7 @@ fn (mut ctx Context) termios_setup() ! {
}
if ctx.cfg.window_title != '' {
- print('\x1b]0;$ctx.cfg.window_title\x07')
+ print('\x1b]0;${ctx.cfg.window_title}\x07')
flush_stdout()
}
@@ -90,7 +90,7 @@ fn (mut ctx Context) termios_setup() ! {
C.tcsetattr(C.STDIN_FILENO, C.TCSAFLUSH, &termios)
// feature-test the SU spec
sx, sy := get_cursor_position()
- print('$bsu$esu')
+ print('${bsu}${esu}')
flush_stdout()
ex, ey := get_cursor_position()
if sx == ex && sy == ey {
diff --git a/vlib/term/ui/ui.v b/vlib/term/ui/ui.v
index bfe8aa8080..691de1b8cf 100644
--- a/vlib/term/ui/ui.v
+++ b/vlib/term/ui/ui.v
@@ -13,7 +13,7 @@ pub:
}
pub fn (c Color) hex() string {
- return '#$c.r.hex()$c.g.hex()$c.b.hex()'
+ return '#${c.r.hex()}${c.g.hex()}${c.b.hex()}'
}
// Synchronized Updates spec, designed to avoid tearing during renders
@@ -54,7 +54,7 @@ pub fn (mut ctx Context) bold() {
// set_cursor_position positions the cusor at the given coordinates `x`,`y`.
[inline]
pub fn (mut ctx Context) set_cursor_position(x int, y int) {
- ctx.write('\x1b[$y;${x}H')
+ ctx.write('\x1b[${y};${x}H')
}
// show_cursor will make the cursor appear if it is not already visible
@@ -115,7 +115,7 @@ pub fn (mut ctx Context) clear() {
// set_window_title sets the string `s` as the window title.
[inline]
pub fn (mut ctx Context) set_window_title(s string) {
- print('\x1b]0;$s\x07')
+ print('\x1b]0;${s}\x07')
flush_stdout()
}
diff --git a/vlib/time/README.md b/vlib/time/README.md
index 22f1d01619..39f4375d4a 100644
--- a/vlib/time/README.md
+++ b/vlib/time/README.md
@@ -46,7 +46,7 @@ You can also parse strings to produce time.Time values,
import time
s := '2018-01-27 12:48:34'
-t := time.parse(s) or { panic('failing format: $s | err: $err') }
+t := time.parse(s) or { panic('failing format: ${s} | err: ${err}') }
println(t)
println(t.unix)
```
@@ -72,6 +72,6 @@ fn do_something() {
fn main() {
sw := time.new_stopwatch()
do_something()
- println('Note: do_something() took: $sw.elapsed().milliseconds() ms')
+ println('Note: do_something() took: ${sw.elapsed().milliseconds()} ms')
}
-```
\ No newline at end of file
+```
diff --git a/vlib/time/chrono_test.v b/vlib/time/chrono_test.v
index a6fc0f3f35..89dd4bd2a7 100644
--- a/vlib/time/chrono_test.v
+++ b/vlib/time/chrono_test.v
@@ -3,7 +3,7 @@ module time
fn test_days_from_unix_epoch() {
s := '2000-05-10 22:11:03'
time_test := parse(s) or {
- eprintln('> failing format: $s | err: $err')
+ eprintln('> failing format: ${s} | err: ${err}')
assert false
return
}
diff --git a/vlib/time/format.v b/vlib/time/format.v
index aca3a7a50d..4950c5624d 100644
--- a/vlib/time/format.v
+++ b/vlib/time/format.v
@@ -271,10 +271,10 @@ pub fn (t Time) custom_format(s string) string {
'Z' {
mut hours := offset() / seconds_per_hour
if hours >= 0 {
- sb.write_string('+$hours')
+ sb.write_string('+${hours}')
} else {
hours = -hours
- sb.write_string('-$hours')
+ sb.write_string('-${hours}')
}
}
'ZZ' {
@@ -367,13 +367,13 @@ pub fn (t Time) get_fmt_time_str(fmt_time FormatTime) string {
t.hour
}
return match fmt_time {
- .hhmm12 { '$hour_:${t.minute:02d} $tp' }
+ .hhmm12 { '${hour_}:${t.minute:02d} ${tp}' }
.hhmm24 { '${t.hour:02d}:${t.minute:02d}' }
- .hhmmss12 { '$hour_:${t.minute:02d}:${t.second:02d} $tp' }
+ .hhmmss12 { '${hour_}:${t.minute:02d}:${t.second:02d} ${tp}' }
.hhmmss24 { '${t.hour:02d}:${t.minute:02d}:${t.second:02d}' }
.hhmmss24_milli { '${t.hour:02d}:${t.minute:02d}:${t.second:02d}.${(t.microsecond / 1000):03d}' }
.hhmmss24_micro { '${t.hour:02d}:${t.minute:02d}:${t.second:02d}.${t.microsecond:06d}' }
- else { 'unknown enumeration $fmt_time' }
+ else { 'unknown enumeration ${fmt_time}' }
}
}
@@ -386,17 +386,17 @@ pub fn (t Time) get_fmt_date_str(fmt_dlmtr FormatDelimiter, fmt_date FormatDate)
month := t.smonth()
year := '${(t.year % 100):02d}'
mut res := match fmt_date {
- .ddmmyy { '${t.day:02d}|${t.month:02d}|$year' }
+ .ddmmyy { '${t.day:02d}|${t.month:02d}|${year}' }
.ddmmyyyy { '${t.day:02d}|${t.month:02d}|${t.year:04d}' }
- .mmddyy { '${t.month:02d}|${t.day:02d}|$year' }
+ .mmddyy { '${t.month:02d}|${t.day:02d}|${year}' }
.mmddyyyy { '${t.month:02d}|${t.day:02d}|${t.year:04d}' }
- .mmmd { '$month|$t.day' }
- .mmmdd { '$month|${t.day:02d}' }
- .mmmddyy { '$month|${t.day:02d}|$year' }
- .mmmddyyyy { '$month|${t.day:02d}|${t.year:04d}' }
+ .mmmd { '${month}|${t.day}' }
+ .mmmdd { '${month}|${t.day:02d}' }
+ .mmmddyy { '${month}|${t.day:02d}|${year}' }
+ .mmmddyyyy { '${month}|${t.day:02d}|${t.year:04d}' }
.yyyymmdd { '${t.year:04d}|${t.month:02d}|${t.day:02d}' }
- .yymmdd { '$year|${t.month:02d}|${t.day:02d}' }
- else { 'unknown enumeration $fmt_date' }
+ .yymmdd { '${year}|${t.month:02d}|${t.day:02d}' }
+ else { 'unknown enumeration ${fmt_date}' }
}
del := match fmt_dlmtr {
.dot { '.' }
@@ -424,7 +424,7 @@ pub fn (t Time) get_fmt_str(fmt_dlmtr FormatDelimiter, fmt_time FormatTime, fmt_
if fmt_time != .no_time {
dstr := t.get_fmt_date_str(fmt_dlmtr, fmt_date)
tstr := t.get_fmt_time_str(fmt_time)
- return '$dstr $tstr'
+ return '${dstr} ${tstr}'
} else {
return t.get_fmt_date_str(fmt_dlmtr, fmt_date)
}
@@ -435,7 +435,7 @@ pub fn (t Time) get_fmt_str(fmt_dlmtr FormatDelimiter, fmt_time FormatTime, fmt_
pub fn (t Time) utc_string() string {
day_str := t.weekday_str()
month_str := t.smonth()
- utc_string := '$day_str, $t.day $month_str $t.year ${t.hour:02d}:${t.minute:02d}:${t.second:02d} UTC'
+ utc_string := '${day_str}, ${t.day} ${month_str} ${t.year} ${t.hour:02d}:${t.minute:02d}:${t.second:02d} UTC'
return utc_string
}
diff --git a/vlib/time/parse.v b/vlib/time/parse.v
index d1e6840f81..466258f472 100644
--- a/vlib/time/parse.v
+++ b/vlib/time/parse.v
@@ -11,7 +11,7 @@ pub struct TimeParseError {
// msg implements the `IError.msg()` method for `TimeParseError`.
pub fn (err TimeParseError) msg() string {
- return 'Invalid time format code: $err.code'
+ return 'Invalid time format code: ${err.code}'
}
fn error_invalid_time(code int) IError {
diff --git a/vlib/time/parse_test.v b/vlib/time/parse_test.v
index f25963d1e2..24fd647d65 100644
--- a/vlib/time/parse_test.v
+++ b/vlib/time/parse_test.v
@@ -3,7 +3,7 @@ import time
fn test_parse() {
s := '2018-01-27 12:48:34'
t := time.parse(s) or {
- eprintln('> failing format: $s | err: $err')
+ eprintln('> failing format: ${s} | err: ${err}')
assert false
return
}
@@ -24,7 +24,7 @@ fn test_parse_invalid() {
fn test_parse_rfc2822() {
s1 := 'Thu, 12 Dec 2019 06:07:45 GMT'
t1 := time.parse_rfc2822(s1) or {
- eprintln('> failing format: $s1 | err: $err')
+ eprintln('> failing format: ${s1} | err: ${err}')
assert false
return
}
@@ -33,7 +33,7 @@ fn test_parse_rfc2822() {
assert t1.unix == 1576130865
s2 := 'Thu 12 Dec 2019 06:07:45 +0800'
t2 := time.parse_rfc2822(s2) or {
- eprintln('> failing format: $s2 | err: $err')
+ eprintln('> failing format: ${s2} | err: ${err}')
assert false
return
}
@@ -70,7 +70,7 @@ fn test_parse_iso8601() {
]
for i, format in formats {
t := time.parse_iso8601(format) or {
- eprintln('>>> failing format: $format | err: $err')
+ eprintln('>>> failing format: ${format} | err: ${err}')
assert false
continue
}
@@ -94,7 +94,7 @@ fn test_parse_iso8601() {
fn test_parse_iso8601_local() {
format := '2020-06-05T15:38:06.015959'
t := time.parse_iso8601(format) or {
- eprintln('> failing format: $format | err: $err')
+ eprintln('> failing format: ${format} | err: ${err}')
assert false
return
}
@@ -132,7 +132,7 @@ fn test_parse_iso8601_invalid() {
fn test_parse_iso8601_date_only() {
format := '2020-06-05'
t := time.parse_iso8601(format) or {
- eprintln('> failing format: $format | err: $err')
+ eprintln('> failing format: ${format} | err: ${err}')
assert false
return
}
@@ -147,7 +147,7 @@ fn test_parse_iso8601_date_only() {
fn check_invalid_date(s string) {
if date := time.parse(s) {
- eprintln('invalid date: "$s" => "$date"')
+ eprintln('invalid date: "${s}" => "${date}"')
assert false
}
assert true
@@ -176,7 +176,7 @@ fn test_parse_rfc3339() {
for pair in pairs {
input, expected := pair[0], pair[1]
res := time.parse_rfc3339(input) or {
- eprintln('>>> failing input: $input | err: $err')
+ eprintln('>>> failing input: ${input} | err: ${err}')
assert false
return
}
diff --git a/vlib/time/time.v b/vlib/time/time.v
index 744dfc3296..2157c362ff 100644
--- a/vlib/time/time.v
+++ b/vlib/time/time.v
@@ -162,35 +162,35 @@ pub fn (t Time) relative() string {
if secs < time.seconds_per_hour {
m := secs / time.seconds_per_minute
if m == 1 {
- return '${prefix}1 minute$suffix'
+ return '${prefix}1 minute${suffix}'
}
- return '$prefix$m minutes$suffix'
+ return '${prefix}${m} minutes${suffix}'
}
if secs < time.seconds_per_hour * 24 {
h := secs / time.seconds_per_hour
if h == 1 {
- return '${prefix}1 hour$suffix'
+ return '${prefix}1 hour${suffix}'
}
- return '$prefix$h hours$suffix'
+ return '${prefix}${h} hours${suffix}'
}
if secs < time.seconds_per_hour * 24 * 7 {
d := secs / time.seconds_per_hour / 24
if d == 1 {
- return '${prefix}1 day$suffix'
+ return '${prefix}1 day${suffix}'
}
- return '$prefix$d days$suffix'
+ return '${prefix}${d} days${suffix}'
}
if secs < time.seconds_per_hour * 24 * time.days_in_year {
if prefix == 'in ' {
- return 'on $t.md()'
+ return 'on ${t.md()}'
}
- return 'last $t.md()'
+ return 'last ${t.md()}'
}
y := secs / time.seconds_per_hour / 24 / time.days_in_year
if y == 1 {
- return '${prefix}1 year$suffix'
+ return '${prefix}1 year${suffix}'
}
- return '$prefix$y years$suffix'
+ return '${prefix}${y} years${suffix}'
}
// relative_short returns a string saying how long ago a time occured as follows:
@@ -224,29 +224,29 @@ pub fn (t Time) relative_short() string {
if secs < time.seconds_per_hour {
m := secs / time.seconds_per_minute
if m == 1 {
- return '${prefix}1m$suffix'
+ return '${prefix}1m${suffix}'
}
- return '$prefix${m}m$suffix'
+ return '${prefix}${m}m${suffix}'
}
if secs < time.seconds_per_hour * 24 {
h := secs / time.seconds_per_hour
if h == 1 {
- return '${prefix}1h$suffix'
+ return '${prefix}1h${suffix}'
}
- return '$prefix${h}h$suffix'
+ return '${prefix}${h}h${suffix}'
}
if secs < time.seconds_per_hour * 24 * time.days_in_year {
d := secs / time.seconds_per_hour / 24
if d == 1 {
- return '${prefix}1d$suffix'
+ return '${prefix}1d${suffix}'
}
- return '$prefix${d}d$suffix'
+ return '${prefix}${d}d${suffix}'
}
y := secs / time.seconds_per_hour / 24 / time.days_in_year
if y == 1 {
- return '${prefix}1y$suffix'
+ return '${prefix}1y${suffix}'
}
- return '$prefix${y}y$suffix'
+ return '${prefix}${y}y${suffix}'
}
// day_of_week returns the current day of a given year, month, and day,
@@ -287,7 +287,7 @@ pub fn is_leap_year(year int) bool {
// days_in_month returns a number of days in a given month.
pub fn days_in_month(month int, year int) ?int {
if month > 12 || month < 1 {
- return error('Invalid month: $month')
+ return error('Invalid month: ${month}')
}
extra := if month == 2 && is_leap_year(year) { 1 } else { 0 }
res := time.month_days[month - 1] + extra
@@ -378,10 +378,10 @@ pub fn (d Duration) str() string {
ns := t
if hr > 0 {
- return '$hr:${min:02}:${sec:02}'
+ return '${hr}:${min:02}:${sec:02}'
}
if min > 0 {
- return '$min:${sec:02}.${ms:03}'
+ return '${min}:${sec:02}.${ms:03}'
}
if sec > 0 {
return '${sec}.${ms:03}s'
diff --git a/vlib/time/time_test.v b/vlib/time/time_test.v
index 16ad54d179..2514e7e9ce 100644
--- a/vlib/time/time_test.v
+++ b/vlib/time/time_test.v
@@ -210,18 +210,18 @@ fn test_unix_time() {
t1 := time.utc()
time.sleep(50 * time.millisecond)
t2 := time.utc()
- eprintln('t1: $t1')
- eprintln('t2: $t2')
+ eprintln('t1: ${t1}')
+ eprintln('t2: ${t2}')
ut1 := t1.unix_time()
ut2 := t2.unix_time()
- eprintln('ut1: $ut1')
- eprintln('ut2: $ut2')
+ eprintln('ut1: ${ut1}')
+ eprintln('ut2: ${ut2}')
assert ut2 - ut1 < 2
//
utm1 := t1.unix_time_milli()
utm2 := t2.unix_time_milli()
- eprintln('utm1: $utm1')
- eprintln('utm2: $utm2')
+ eprintln('utm1: ${utm1}')
+ eprintln('utm2: ${utm2}')
assert (utm1 - ut1 * 1000) < 1000
assert (utm2 - ut2 * 1000) < 1000
//
diff --git a/vlib/toml/README.md b/vlib/toml/README.md
index bad47177d1..dc2e166418 100644
--- a/vlib/toml/README.md
+++ b/vlib/toml/README.md
@@ -57,9 +57,9 @@ hosts = [
fn main() {
doc := toml.parse_text(toml_text) or { panic(err) }
title := doc.value('title').string()
- println('title: "$title"')
+ println('title: "${title}"')
ip := doc.value('servers.alpha.ip').string()
- println('Server IP: "$ip"')
+ println('Server IP: "${ip}"')
}
```
diff --git a/vlib/toml/any.v b/vlib/toml/any.v
index f9e55e57d6..b3ab466c98 100644
--- a/vlib/toml/any.v
+++ b/vlib/toml/any.v
@@ -139,7 +139,7 @@ pub fn (a Any) as_map() map[string]Any {
} else if a is []Any {
mut mp := map[string]Any{}
for i, fi in a {
- mp['$i'] = fi
+ mp['${i}'] = fi
}
return mp
}
@@ -223,9 +223,9 @@ pub fn (m map[string]Any) to_toml() string {
for k, v in m {
mut key := k
if key.contains(' ') {
- key = '"$key"'
+ key = '"${key}"'
}
- toml_text += '$key = ' + v.to_toml() + '\n'
+ toml_text += '${key} = ' + v.to_toml() + '\n'
}
toml_text = toml_text.trim_right('\n')
return toml_text
@@ -238,9 +238,9 @@ pub fn (m map[string]Any) to_inline_toml() string {
for k, v in m {
mut key := k
if key.contains(' ') {
- key = '"$key"'
+ key = '"${key}"'
}
- toml_text += ' $key = ' + v.to_toml() + ','
+ toml_text += ' ${key} = ' + v.to_toml() + ','
}
return toml_text + ' }'
}
diff --git a/vlib/toml/ast/ast.v b/vlib/toml/ast/ast.v
index e191e3eed6..30920ba561 100644
--- a/vlib/toml/ast/ast.v
+++ b/vlib/toml/ast/ast.v
@@ -18,8 +18,8 @@ pub mut:
pub fn (r Root) str() string {
mut s := typeof(r).name + '{\n'
- s += ' input: $r.input\n'
- s += ' table: $r.table\n'
+ s += ' input: ${r.input}\n'
+ s += ' table: ${r.table}\n'
s += '}'
return s
}
diff --git a/vlib/toml/ast/types.v b/vlib/toml/ast/types.v
index f87386ec6f..d6c21e0b4d 100644
--- a/vlib/toml/ast/types.v
+++ b/vlib/toml/ast/types.v
@@ -31,7 +31,7 @@ pub type Value = Bool
pub fn (v Value) str() string {
match v {
Quoted, Date, DateTime, Time {
- return '"$v.text"'
+ return '"${v.text}"'
}
Bool, Null, Number {
return v.text
@@ -39,7 +39,7 @@ pub fn (v Value) str() string {
map[string]Value {
mut str := '{'
for key, val in v {
- str += ' "$key": $val,'
+ str += ' "${key}": ${val},'
}
str = str.trim_right(',')
str += ' }'
@@ -48,7 +48,7 @@ pub fn (v Value) str() string {
[]Value {
mut str := '['
for val in v {
- str += ' $val,'
+ str += ' ${val},'
}
str = str.trim_right(',')
str += ' ]'
@@ -76,8 +76,8 @@ pub:
// str returns the `string` representation of the `Comment` type.
pub fn (c Comment) str() string {
mut s := typeof(c).name + '{\n'
- s += ' text: \'$c.text\'\n'
- s += ' pos: $c.pos\n'
+ s += ' text: \'${c.text}\'\n'
+ s += ' pos: ${c.pos}\n'
s += '}'
return s
}
@@ -108,10 +108,10 @@ pub:
// str returns the `string` representation of the `Quoted` type.
pub fn (q Quoted) str() string {
mut str := typeof(q).name + '{\n'
- str += ' text: \'$q.text\'\n'
- str += ' pos: $q.pos\n'
- str += ' is_multiline: $q.is_multiline\n'
- str += ' quote: \'$q.quote\'\n'
+ str += ' text: \'${q.text}\'\n'
+ str += ' pos: ${q.pos}\n'
+ str += ' is_multiline: ${q.is_multiline}\n'
+ str += ' quote: \'${q.quote}\'\n'
str += '}'
return str
}
@@ -128,8 +128,8 @@ pub:
// str returns the `string` representation of the `Bare` type.
pub fn (b Bare) str() string {
mut str := typeof(b).name + '{\n'
- str += ' text: \'$b.text\'\n'
- str += ' pos: $b.pos\n'
+ str += ' text: \'${b.text}\'\n'
+ str += ' pos: ${b.pos}\n'
str += '}'
return str
}
@@ -146,8 +146,8 @@ pub:
// str returns the `string` representation of the `Bool` type.
pub fn (b Bool) str() string {
mut str := typeof(b).name + '{\n'
- str += ' text: \'$b.text\'\n'
- str += ' pos: $b.pos\n'
+ str += ' text: \'${b.text}\'\n'
+ str += ' pos: ${b.pos}\n'
str += '}'
return str
}
@@ -165,8 +165,8 @@ pub mut:
// str returns the `string` representation of the `Number` type.
pub fn (n Number) str() string {
mut str := typeof(n).name + '{\n'
- str += ' text: \'$n.text\'\n'
- str += ' pos: $n.pos\n'
+ str += ' text: \'${n.text}\'\n'
+ str += ' pos: ${n.pos}\n'
str += '}'
return str
}
@@ -203,8 +203,8 @@ pub:
// str returns the `string` representation of the `Date` type.
pub fn (d Date) str() string {
mut str := typeof(d).name + '{\n'
- str += ' text: \'$d.text\'\n'
- str += ' pos: $d.pos\n'
+ str += ' text: \'${d.text}\'\n'
+ str += ' pos: ${d.pos}\n'
str += '}'
return str
}
@@ -221,9 +221,9 @@ pub:
// str returns the `string` representation of the `Time` type.
pub fn (t Time) str() string {
mut str := typeof(t).name + '{\n'
- str += ' text: \'$t.text\'\n'
- str += ' offset: \'$t.offset\'\n'
- str += ' pos: $t.pos\n'
+ str += ' text: \'${t.text}\'\n'
+ str += ' offset: \'${t.offset}\'\n'
+ str += ' pos: ${t.pos}\n'
str += '}'
return str
}
@@ -242,10 +242,10 @@ pub:
// str returns the `string` representation of the `DateTime` type.
pub fn (dt DateTime) str() string {
mut str := typeof(dt).name + '{\n'
- str += ' text: \'$dt.text\'\n'
- str += ' date: \'$dt.date\'\n'
- str += ' time: \'$dt.time\'\n'
- str += ' pos: $dt.pos\n'
+ str += ' text: \'${dt.text}\'\n'
+ str += ' date: \'${dt.date}\'\n'
+ str += ' time: \'${dt.time}\'\n'
+ str += ' pos: ${dt.pos}\n'
str += '}'
return str
}
@@ -259,7 +259,7 @@ pub:
// str returns the `string` representation of the `EOF` type.
pub fn (e EOF) str() string {
mut str := typeof(e).name + '{\n'
- str += ' pos: $e.pos\n'
+ str += ' pos: ${e.pos}\n'
str += '}'
return str
}
diff --git a/vlib/toml/checker/checker.v b/vlib/toml/checker/checker.v
index d5397e730c..ff4d4a92af 100644
--- a/vlib/toml/checker/checker.v
+++ b/vlib/toml/checker/checker.v
@@ -91,11 +91,11 @@ fn (c Checker) check_number(num ast.Number) ! {
if lit.contains('_') {
if lit.starts_with('_') || lit.ends_with('_') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not start or end with `_` in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not start or end with `_` in ...${c.excerpt(num.pos)}...')
}
if lit.contains('__') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not have more than one underscore (`_`) in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not have more than one underscore (`_`) in ...${c.excerpt(num.pos)}...')
}
}
@@ -114,34 +114,34 @@ fn (c Checker) check_number(num ast.Number) ! {
if hex_bin_oct {
ascii = u8(lit[0]).ascii_str()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (hex, octal and binary) can not start with `$ascii` in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (hex, octal and binary) can not start with `${ascii}` in ...${c.excerpt(num.pos)}...')
}
if lit.len > 1 && lit_sans_sign.starts_with('0') && !lit_sans_sign.starts_with('0.') {
ascii = u8(lit_sans_sign[0]).ascii_str()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not start with `$ascii` in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not start with `${ascii}` in ...${c.excerpt(num.pos)}...')
}
} else {
if !hex_bin_oct {
if !is_float && lit[0] == `0` {
if lit[1] in [`B`, `O`, `X`] {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" only lowercase notation in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" only lowercase notation in ...${c.excerpt(num.pos)}...')
}
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not start with a zero in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not start with a zero in ...${c.excerpt(num.pos)}...')
}
if is_float && lit[0] == `0` && float_decimal_index > 1 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not start with a zero in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not start with a zero in ...${c.excerpt(num.pos)}...')
}
}
}
if has_repeating(lit, [`_`, `.`, `b`, `o`, `x`]) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not have $scanner.digit_extras as repeating characters in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not have ${scanner.digit_extras} as repeating characters in ...${c.excerpt(num.pos)}...')
}
if hex_bin_oct {
@@ -154,23 +154,23 @@ fn (c Checker) check_number(num ast.Number) ! {
if lit_sans_sign_and_type_prefix.starts_with('_')
|| lit_sans_sign_and_type_prefix.ends_with('_') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not start or end with `_` in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not start or end with `_` in ...${c.excerpt(num.pos)}...')
}
if is_bin {
if !c.is_valid_binary_literal(lit_sans_sign_and_type_prefix) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid binary number in ...${c.excerpt(num.pos)}...')
+ ' "${lit}" is not a valid binary number in ...${c.excerpt(num.pos)}...')
}
} else if is_oct {
if !c.is_valid_octal_literal(lit_sans_sign_and_type_prefix) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid octal number in ...${c.excerpt(num.pos)}...')
+ ' "${lit}" is not a valid octal number in ...${c.excerpt(num.pos)}...')
}
} else {
if !c.is_valid_hex_literal(lit_sans_sign_and_type_prefix) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid hexadecimal number in ...${c.excerpt(num.pos)}...')
+ ' "${lit}" is not a valid hexadecimal number in ...${c.excerpt(num.pos)}...')
}
}
}
@@ -179,50 +179,50 @@ fn (c Checker) check_number(num ast.Number) ! {
if lit_lower_case.all_after('e').starts_with('_')
|| lit_lower_case.all_before('e').ends_with('_') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' the exponent in "$lit" can not start nor end with an underscore in ...${c.excerpt(num.pos)}...')
+ ' the exponent in "${lit}" can not start nor end with an underscore in ...${c.excerpt(num.pos)}...')
}
if lit_lower_case.all_after('e').contains('.') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (with exponent) can not have a decimal point in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (with exponent) can not have a decimal point in ...${c.excerpt(num.pos)}...')
}
if !is_hex && lit_lower_case.count('e') > 1 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (with exponent) can only have one exponent in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (with exponent) can only have one exponent in ...${c.excerpt(num.pos)}...')
}
}
if is_float {
if lit.count('.') > 1 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (float) can only have one decimal point in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (float) can only have one decimal point in ...${c.excerpt(num.pos)}...')
}
last := lit[lit.len - 1]
if last in scanner.digit_extras {
ascii = u8(last).ascii_str()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (float) can not end with `$ascii` in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (float) can not end with `${ascii}` in ...${c.excerpt(num.pos)}...')
}
if lit.contains('_.') || lit.contains('._') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (float) can not have underscores before or after the decimal point in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (float) can not have underscores before or after the decimal point in ...${c.excerpt(num.pos)}...')
}
if lit_lower_case.contains('e.') || lit.contains('.e') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (float) can not have decimal points on either side of the exponent notation in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (float) can not have decimal points on either side of the exponent notation in ...${c.excerpt(num.pos)}...')
}
// Check if it contains other chars than the allowed
for r in lit {
if r !in [`0`, `1`, `2`, `3`, `4`, `5`, `6`, `7`, `8`, `9`, `.`, `e`, `E`, `-`, `+`,
`_`] {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" (float) can not contain `${u8(r).ascii_str()}` in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" (float) can not contain `${u8(r).ascii_str()}` in ...${c.excerpt(num.pos)}...')
}
}
} else {
if lit.len > 1 && lit.starts_with('0') && lit[1] !in [`b`, `o`, `x`] {
ascii = u8(lit[0]).ascii_str()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' numbers like "$lit" can not start with `$ascii` in ...${c.excerpt(num.pos)}...')
+ ' numbers like "${lit}" can not start with `${ascii}` in ...${c.excerpt(num.pos)}...')
}
}
}
@@ -273,7 +273,7 @@ fn (c Checker) check_boolean(b ast.Bool) ! {
return
}
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' boolean values like "$lit" can only be `true` or `false` literals, not `$lit` in ...${c.excerpt(b.pos)}...')
+ ' boolean values like "${lit}" can only be `true` or `false` literals, not `${lit}` in ...${c.excerpt(b.pos)}...')
}
// check_date_time returns an error if `dt` is not a valid TOML date-time string (RFC 3339).
@@ -296,7 +296,7 @@ fn (c Checker) check_date_time(dt ast.DateTime) ! {
// Validate the split into date and time parts.
if split.len != 2 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" contains too many date/time separators in ...${c.excerpt(dt.pos)}...')
+ ' "${lit}" contains too many date/time separators in ...${c.excerpt(dt.pos)}...')
}
// Re-use date and time validation code for detailed testing of each part
c.check_date(ast.Date{
@@ -320,11 +320,11 @@ fn (c Checker) check_date_time(dt ast.DateTime) ! {
// Use V's builtin functionality to validate the string
time.parse_rfc3339(lit) or {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid RFC 3339 Date-Time format string "$err". In ...${c.excerpt(dt.pos)}...')
+ ' "${lit}" is not a valid RFC 3339 Date-Time format string "${err}". In ...${c.excerpt(dt.pos)}...')
}
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid RFC 3339 Date-Time format string in ...${c.excerpt(dt.pos)}...')
+ ' "${lit}" is not a valid RFC 3339 Date-Time format string in ...${c.excerpt(dt.pos)}...')
}
}
@@ -334,27 +334,27 @@ fn (c Checker) check_date(date ast.Date) ! {
parts := lit.split('-')
if parts.len != 3 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid RFC 3339 Date format string in ...${c.excerpt(date.pos)}...')
+ ' "${lit}" is not a valid RFC 3339 Date format string in ...${c.excerpt(date.pos)}...')
}
yyyy := parts[0]
if yyyy.len != 4 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" does not have a valid RFC 3339 year indication in ...${c.excerpt(date.pos)}...')
+ ' "${lit}" does not have a valid RFC 3339 year indication in ...${c.excerpt(date.pos)}...')
}
mm := parts[1]
if mm.len != 2 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" does not have a valid RFC 3339 month indication in ...${c.excerpt(date.pos)}...')
+ ' "${lit}" does not have a valid RFC 3339 month indication in ...${c.excerpt(date.pos)}...')
}
dd := parts[2]
if dd.len != 2 {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" does not have a valid RFC 3339 day indication in ...${c.excerpt(date.pos)}...')
+ ' "${lit}" does not have a valid RFC 3339 day indication in ...${c.excerpt(date.pos)}...')
}
// Use V's builtin functionality to validate the string
time.parse_rfc3339(lit) or {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid RFC 3339 Date format string "$err". In ...${c.excerpt(date.pos)}...')
+ ' "${lit}" is not a valid RFC 3339 Date format string "${err}". In ...${c.excerpt(date.pos)}...')
}
}
@@ -374,15 +374,15 @@ fn (c Checker) check_time(t ast.Time) ! {
starts_with_zero := hhmmss.starts_with('0')
if !starts_with_zero {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" must be zero prefixed in ...${c.excerpt(t.pos)}...')
+ ' "${lit}" must be zero prefixed in ...${c.excerpt(t.pos)}...')
}
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid RFC 3339 Time format string in ...${c.excerpt(t.pos)}...')
+ ' "${lit}" is not a valid RFC 3339 Time format string in ...${c.excerpt(t.pos)}...')
}
// Use V's builtin functionality to validate the time string
time.parse_rfc3339(parts[0]) or {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$lit" is not a valid RFC 3339 Time format string "$err". In ...${c.excerpt(t.pos)}...')
+ ' "${lit}" is not a valid RFC 3339 Time format string "${err}". In ...${c.excerpt(t.pos)}...')
}
}
@@ -393,7 +393,7 @@ pub fn (c Checker) check_quoted(q ast.Quoted) ! {
triple_quote := quote + quote + quote
if q.is_multiline && lit.ends_with(triple_quote) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' string values like "$lit" has unbalanced quote literals `q.quote` in ...${c.excerpt(q.pos)}...')
+ ' string values like "${lit}" has unbalanced quote literals `q.quote` in ...${c.excerpt(q.pos)}...')
}
c.check_quoted_escapes(q)!
c.check_utf8_validity(q)!
@@ -441,7 +441,7 @@ fn (c Checker) check_quoted_escapes(q ast.Quoted) ! {
if !contains_newlines {
st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' can not escape whitespaces in multi-line strings (`\\ `) at `$escape` ($st.line_nr,$st.col) in ...${c.excerpt(q.pos)}...')
+ ' can not escape whitespaces in multi-line strings (`\\ `) at `${escape}` (${st.line_nr},${st.col}) in ...${c.excerpt(q.pos)}...')
}
// Rest of line must only be space chars from this point on
for {
@@ -452,7 +452,7 @@ fn (c Checker) check_quoted_escapes(q ast.Quoted) ! {
if !(ch_ == ` ` || ch_ == `\t`) {
st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' invalid character `${u8(ch_).ascii_str()}` after `$escape` at ($st.line_nr,$st.col) in ...${c.excerpt(q.pos)}...')
+ ' invalid character `${u8(ch_).ascii_str()}` after `${escape}` at (${st.line_nr},${st.col}) in ...${c.excerpt(q.pos)}...')
}
}
}
@@ -464,7 +464,7 @@ fn (c Checker) check_quoted_escapes(q ast.Quoted) ! {
if next_ch !in checker.allowed_basic_escape_chars {
st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unknown basic string escape character `$next_ch.ascii_str()` in `$escape` ($st.line_nr,$st.col) in ...${c.excerpt(q.pos)}...')
+ ' unknown basic string escape character `${next_ch.ascii_str()}` in `${escape}` (${st.line_nr},${st.col}) in ...${c.excerpt(q.pos)}...')
}
}
// Check Unicode escapes
@@ -477,14 +477,14 @@ fn (c Checker) check_quoted_escapes(q ast.Quoted) ! {
c.check_unicode_escape(s.text[pos..pos + 11]) or {
st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' escaped Unicode is invalid. $err.msg().capitalize() ($st.line_nr,$st.col) in ...${c.excerpt(q.pos)}...')
+ ' escaped Unicode is invalid. ${err.msg().capitalize()} (${st.line_nr},${st.col}) in ...${c.excerpt(q.pos)}...')
}
} else {
pos := s.state().pos
c.check_unicode_escape(s.text[pos..]) or {
st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' escaped Unicode is invalid. $err.msg().capitalize() ($st.line_nr,$st.col) in ...${c.excerpt(q.pos)}...')
+ ' escaped Unicode is invalid. ${err.msg().capitalize()} (${st.line_nr},${st.col}) in ...${c.excerpt(q.pos)}...')
}
}
}
@@ -497,7 +497,7 @@ fn (c Checker) check_utf8_validity(q ast.Quoted) ! {
lit := q.text
if !utf8.validate_str(lit) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' the string value "$lit" is not valid UTF-8 in ...${c.excerpt(q.pos)}...')
+ ' the string value "${lit}" is not valid UTF-8 in ...${c.excerpt(q.pos)}...')
}
}
@@ -507,16 +507,16 @@ fn (c Checker) check_utf8_validity(q ast.Quoted) ! {
fn validate_utf8_codepoint_string(str string) ! {
int_val := strconv.parse_int(str, 16, 64) or { i64(-1) }
if int_val > checker.utf8_max || int_val < 0 {
- return error('Unicode code point `$str` is outside the valid Unicode scalar value ranges.')
+ return error('Unicode code point `${str}` is outside the valid Unicode scalar value ranges.')
}
// Check if the Unicode value is actually in the valid Unicode scalar value ranges.
// TODO should probably be transferred / implemented in `utf8.validate(...)` also?
if !((int_val >= 0x0000 && int_val <= 0xD7FF) || (int_val >= 0xE000 && int_val <= 0x10FFFF)) {
- return error('Unicode code point `$str` is not a valid Unicode scalar value.')
+ return error('Unicode code point `${str}` is not a valid Unicode scalar value.')
}
bytes := str.bytes()
if !utf8.validate(bytes.data, bytes.len) {
- return error('Unicode code point `$str` is not a valid UTF-8 code point.')
+ return error('Unicode code point `${str}` is not a valid UTF-8 code point.')
}
}
@@ -526,13 +526,13 @@ fn validate_utf8_codepoint_string(str string) ! {
fn (c Checker) check_unicode_escape(esc_unicode string) ! {
if esc_unicode.len < 5 || !esc_unicode.to_lower().starts_with('u') {
// Makes sure the input to this function is actually valid.
- return error('`$esc_unicode` is not a valid escaped Unicode sequence.')
+ return error('`${esc_unicode}` is not a valid escaped Unicode sequence.')
}
is_long_esc_type := esc_unicode.starts_with('U')
mut sequence := esc_unicode[1..]
hex_digits_len := if is_long_esc_type { 8 } else { 4 }
if sequence.len < hex_digits_len {
- return error('Unicode escape sequence `$esc_unicode` should be at least $hex_digits_len in length.')
+ return error('Unicode escape sequence `${esc_unicode}` should be at least ${hex_digits_len} in length.')
}
sequence = sequence[..hex_digits_len]
// TODO not enforced in BurnSushi testsuite??
@@ -563,19 +563,19 @@ pub fn (c Checker) check_comment(comment ast.Comment) ! {
if ch_byte == 0x0D {
st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' carrige return character `$ch_byte.hex()` is not allowed in comments ($st.line_nr,$st.col).')
+ ' carrige return character `${ch_byte.hex()}` is not allowed in comments (${st.line_nr},${st.col}).')
}
// Check for control characters (allow TAB)
if util.is_illegal_ascii_control_character(ch_byte) {
st := s.state()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' control character `$ch_byte.hex()` is not allowed ($st.line_nr,$st.col) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(st.pos, 10)}...')
+ ' control character `${ch_byte.hex()}` is not allowed (${st.line_nr},${st.col}) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(st.pos, 10)}...')
}
}
// Check for bad UTF-8 encoding
if !utf8.validate_str(lit) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' comment "$lit" is not valid UTF-8 in ...${c.excerpt(comment.pos)}...')
+ ' comment "${lit}" is not valid UTF-8 in ...${c.excerpt(comment.pos)}...')
}
}
diff --git a/vlib/toml/decoder/decoder.v b/vlib/toml/decoder/decoder.v
index 8d375199e7..465a0f84d3 100644
--- a/vlib/toml/decoder/decoder.v
+++ b/vlib/toml/decoder/decoder.v
@@ -228,7 +228,7 @@ fn decode_unicode_escape(esc_unicode string) !(string, int, int) {
}
i64_val := strconv.parse_int(unicode_point, 16, 0)!
rn := rune(i64_val)
- return '$rn', int(i64_val), sequence_len
+ return '${rn}', int(i64_val), sequence_len
}
// decode_date_time decodes the `dt ast.DateTime`.
diff --git a/vlib/toml/input/input.v b/vlib/toml/input/input.v
index d15f0581a8..d413540b1c 100644
--- a/vlib/toml/input/input.v
+++ b/vlib/toml/input/input.v
@@ -54,7 +54,7 @@ pub fn (c Config) read_input() !string {
if text == '' && os.is_file(c.file_path) {
text = os.read_file(c.file_path) or {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' Could not read "$c.file_path": "$err.msg()"')
+ ' Could not read "${c.file_path}": "${err.msg()}"')
}
}
return text
diff --git a/vlib/toml/parser/parser.v b/vlib/toml/parser/parser.v
index ca0451e0f1..f9a6aea4c1 100644
--- a/vlib/toml/parser/parser.v
+++ b/vlib/toml/parser/parser.v
@@ -161,7 +161,7 @@ fn (mut p Parser) peek(n int) !token.Token {
} else {
mut token := token.Token{}
mut count := n - p.tokens.len
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'buffering $count tokens...')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'buffering ${count} tokens...')
for token.kind != .eof && count != 0 {
token = p.scanner.scan()!
p.tokens << token
@@ -179,7 +179,7 @@ fn (mut p Parser) check(check_token token.Kind) ! {
p.next()!
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' expected token "$check_token" but found "$p.tok.kind" in this (excerpt): "...${p.excerpt()}..."')
+ ' expected token "${check_token}" but found "${p.tok.kind}" in this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -191,7 +191,7 @@ fn (mut p Parser) peek_for_correct_line_ending_or_fail() ! {
if peek_tok.kind !in [.cr, .nl, .hash, .eof] {
p.next()! // Forward to the peek_tok
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unexpected EOL "$p.tok.kind" "$p.tok.lit" expected one of [.cr, .nl, .hash, .eof] at this (excerpt): "...${p.excerpt()}..."')
+ ' unexpected EOL "${p.tok.kind}" "${p.tok.lit}" expected one of [.cr, .nl, .hash, .eof] at this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -202,7 +202,7 @@ fn (mut p Parser) check_one_of(tokens []token.Kind) ! {
p.next()!
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' expected one of $tokens but found "$p.tok.kind" in this (excerpt): "...${p.excerpt()}..."')
+ ' expected one of ${tokens} but found "${p.tok.kind}" in this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -211,7 +211,7 @@ fn (mut p Parser) check_one_of(tokens []token.Kind) ! {
// a stream of formatting tokens.
fn (mut p Parser) ignore_while(tokens []token.Kind) {
if p.tok.kind in tokens {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'ignoring "$p.tok.kind" ...')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'ignoring "${p.tok.kind}" ...')
p.next() or { return }
p.ignore_while(tokens)
}
@@ -224,7 +224,7 @@ fn (mut p Parser) ignore_while(tokens []token.Kind) {
// sometimes necessary since not all parser calls forward using the `next()` call.
fn (mut p Parser) ignore_while_peek(tokens []token.Kind) {
for p.peek_tok.kind in tokens {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'eating "$p.tok.kind" ...')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'eating "${p.tok.kind}" ...')
p.next() or { return }
}
}
@@ -256,7 +256,7 @@ fn (mut p Parser) expect(expected_token token.Kind) ! {
return
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' expected token "$expected_token" but found "$p.tok.kind" in this text "...${p.excerpt()}..."')
+ ' expected token "${expected_token}" but found "${p.tok.kind}" in this text "...${p.excerpt()}..."')
}
}
@@ -281,7 +281,7 @@ fn todo_msvc_astring2dkey(s []string) DottedKey {
fn (p Parser) check_explicitly_declared(key DottedKey) ! {
if p.explicit_declared.len > 0 && p.explicit_declared.has(key) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key `$key.str()` is already explicitly declared. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key `${key.str()}` is already explicitly declared. Unexpected redeclaration at "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -290,7 +290,7 @@ fn (p Parser) check_explicitly_declared(key DottedKey) ! {
fn (p Parser) check_explicitly_declared_array_of_tables(key DottedKey) ! {
if p.explicit_declared_array_of_tables.len > 0 && p.explicit_declared_array_of_tables.has(key) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key `$key.str()` is already an explicitly declared array of tables. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key `${key.str()}` is already an explicitly declared array of tables. Unexpected redeclaration at "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -298,7 +298,7 @@ fn (p Parser) check_explicitly_declared_array_of_tables(key DottedKey) ! {
fn (p Parser) check_implicitly_declared(key DottedKey) ! {
if p.implicit_declared.len > 0 && p.implicit_declared.has(key) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key `$key.str()` is already implicitly declared. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key `${key.str()}` is already implicitly declared. Unexpected redeclaration at "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -308,7 +308,7 @@ fn (p Parser) check_implicitly_declared(key DottedKey) ! {
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
// See also `find_in_table`.
pub fn (mut p Parser) find_table() !&map[string]ast.Value {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$p.root_map_key" in map ${ptr_str(p.root_map)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "${p.root_map_key}" in map ${ptr_str(p.root_map)}')
mut t := unsafe { &p.root_map }
if p.root_map_key.len == 0 {
return t
@@ -319,7 +319,7 @@ pub fn (mut p Parser) find_table() !&map[string]ast.Value {
// allocate_table allocates all tables in "dotted" `key` (`a.b.c`) in the *root* table.
pub fn (mut p Parser) allocate_table(key DottedKey) ! {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'allocating "$key" in map ${ptr_str(p.root_map)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'allocating "${key}" in map ${ptr_str(p.root_map)}')
mut t := unsafe { &p.root_map }
if key.len == 0 {
return
@@ -347,7 +347,7 @@ pub fn (mut p Parser) find_sub_table(key DottedKey) !&map[string]ast.Value {
if p.root_map_key.len == 0 {
ky = key
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$ky" in map ${ptr_str(p.root_map)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "${ky}" in map ${ptr_str(p.root_map)}')
mut t := unsafe { &p.root_map }
if ky.len == 0 {
return t
@@ -364,20 +364,20 @@ pub fn (mut p Parser) find_in_table(mut table map[string]ast.Value, key DottedKe
// NOTE This code is the result of much trial and error.
// I'm still not quite sure *exactly* why it works. All I can leave here is a hope
// that this kind of minefield someday will be easier in V :)
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$key" in map ${ptr_str(table)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "${key}" in map ${ptr_str(table)}')
mut t := unsafe { &table }
unsafe {
for k in key {
if val := t[k] {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "$k" in $t.keys()')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "${k}" in ${t.keys()}')
if val is map[string]ast.Value {
t = &(val as map[string]ast.Value)
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$k" in "$key" is not a map but `$val.type_name()`')
+ ' "${k}" in "${key}" is not a map but `${val.type_name()}`')
}
} else {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'no key "$k" in "$key" found, allocating new map at key "$k" in map ${ptr_str(t)}"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'no key "${k}" in "${key}" found, allocating new map at key "${k}" in map ${ptr_str(t)}"')
t[k] = map[string]ast.Value{}
t = &(t[k] as map[string]ast.Value)
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'allocated new map ${ptr_str(t)}"')
@@ -397,35 +397,35 @@ pub fn (mut p Parser) find_array_of_tables() ![]ast.Value {
if key.len > 1 {
key = DottedKey([key[0]])
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$key" in map ${ptr_str(t)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "${key}" in map ${ptr_str(t)}')
unsafe {
if val := t[key.str()] {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "$key" in $t.keys()')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "${key}" in ${t.keys()}')
if val is []ast.Value {
arr := (val as []ast.Value)
return arr
}
}
}
- return error(@MOD + '.' + @STRUCT + '.' + @FN + 'no key `$key` found in map ${ptr_str(t)}"')
+ return error(@MOD + '.' + @STRUCT + '.' + @FN + 'no key `${key}` found in map ${ptr_str(t)}"')
}
// allocate_in_table allocates all tables in "dotted" `key` (`a.b.c`) in `table`.
pub fn (mut p Parser) allocate_in_table(mut table map[string]ast.Value, key DottedKey) ! {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'allocating "$key" in map ${ptr_str(table)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'allocating "${key}" in map ${ptr_str(table)}')
mut t := unsafe { &table }
unsafe {
for k in key {
if val := t[k] {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "$k" in $t.keys()')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "${k}" in ${t.keys()}')
if val is map[string]ast.Value {
t = &(val as map[string]ast.Value)
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$k" in "$key" is not a map ($val.type_name())')
+ ' "${k}" in "${key}" is not a map (${val.type_name()})')
}
} else {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'no key "$k" in "$key" found, allocating new map at key "$k" in map ${ptr_str(t)}"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'no key "${k}" in "${key}" found, allocating new map at key "${k}" in map ${ptr_str(t)}"')
t[k] = map[string]ast.Value{}
t = &(t[k] as map[string]ast.Value)
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'allocated new map ${ptr_str(t)}"')
@@ -451,7 +451,7 @@ pub fn (mut p Parser) dotted_key() !DottedKey {
p.ignore_while_peek(parser.space_formatting)
}
p.next()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed dotted key `$dotted_key` now at "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed dotted key `${dotted_key}` now at "${p.tok.kind}" "${p.tok.lit}"')
return dotted_key
}
@@ -467,15 +467,15 @@ pub fn (mut p Parser) root_table() ! {
p.skip_next = false
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "${p.tok.kind}" "${p.tok.lit}"')
match p.tok.kind {
.hash {
c := p.comment()
p.ast_root.comments << c
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comment "$c.text"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comment "${c.text}"')
}
.whitespace, .tab, .nl, .cr {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping formatting "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping formatting "${p.tok.kind}" "${p.tok.lit}"')
continue
}
.bare, .quoted, .number, .minus, .underscore {
@@ -495,7 +495,7 @@ pub fn (mut p Parser) root_table() ! {
if p.build_abs_dotted_key(sub_table) == explicit_key {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key `$sub_table` has already been explicitly declared. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key `${sub_table}` has already been explicitly declared. Unexpected redeclaration at "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
if explicit_key.len == 1 || explicit_key == p.root_map_key {
continue
@@ -505,7 +505,7 @@ pub fn (mut p Parser) root_table() ! {
// https://github.com/BurntSushi/toml-test/blob/576db85/tests/invalid/table/injection-2.toml
if p.build_abs_dotted_key(sub_table).starts_with(explicit_key) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key `$dotted_key` has already been explicitly declared. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key `${dotted_key}` has already been explicitly declared. Unexpected redeclaration at "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -520,7 +520,7 @@ pub fn (mut p Parser) root_table() ! {
t := p.find_sub_table(sub_table)!
unsafe {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "$key" = $val in table ${ptr_str(t)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "${key}" = ${val} in table ${ptr_str(t)}')
t[key.str()] = val
}
} else {
@@ -529,11 +529,11 @@ pub fn (mut p Parser) root_table() ! {
t := p.find_table()!
unsafe {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "$key.str()" = $val in table ${ptr_str(t)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "${key.str()}" = ${val} in table ${ptr_str(t)}')
key_str := key.str()
if _ := t[key_str] {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key "$key" is already initialized with a value. At "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key "${key}" is already initialized with a value. At "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
t[key_str] = val
}
@@ -549,7 +549,7 @@ pub fn (mut p Parser) root_table() ! {
peek_tok, _ = p.peek_over(1, parser.space_formatting)!
if peek_tok.kind == .lsbr {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unexpected "$p.tok.kind" "$p.tok.lit" at this (excerpt): "...${p.excerpt()}..."')
+ ' unexpected "${p.tok.kind}" "${p.tok.lit}" at this (excerpt): "...${p.excerpt()}..."')
}
}
@@ -565,7 +565,7 @@ pub fn (mut p Parser) root_table() ! {
p.array_of_tables(mut &p.root_map)!
}
p.skip_next = true // skip calling p.next() in coming iteration
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'leaving double bracket at "$p.tok.kind" "$p.tok.lit". NEXT is "$p.peek_tok.kind "$p.peek_tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'leaving double bracket at "${p.tok.kind}" "${p.tok.lit}". NEXT is "${p.peek_tok.kind} "${p.peek_tok.lit}"')
} else if peek_tok.kind == .period {
// Parse `[d.e.f]`
dotted_key := p.dotted_key()!
@@ -602,13 +602,13 @@ pub fn (mut p Parser) root_table() ! {
sub_table, key := p.sub_table_key(new_key)
t := p.find_in_table(mut mut_val, sub_table)!
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN,
- 'setting "$key" = $val in table ${ptr_str(t)}')
+ 'setting "${key}" = ${val} in table ${ptr_str(t)}')
t[new_key.last().str()] = m
}
}
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' "$p.last_aot_index" in array is not a map but `${typeof(val).name}`')
+ ' "${p.last_aot_index}" in array is not a map but `${typeof(val).name}`')
}
}
continue
@@ -622,7 +622,7 @@ pub fn (mut p Parser) root_table() ! {
p.ignore_while(parser.space_formatting)
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `$dotted_key` at "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `${dotted_key}` at "${p.tok.kind}" "${p.tok.lit}"')
p.root_map_key = dotted_key
p.allocate_table(p.root_map_key)!
p.expect(.rsbr)!
@@ -641,13 +641,13 @@ pub fn (mut p Parser) root_table() ! {
// [tbl]
if p.last_aot == dotted_key {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key `$dotted_key` has already been explicitly declared. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key `${dotted_key}` has already been explicitly declared. Unexpected redeclaration at "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
// Allow [ key ]
p.ignore_while(parser.space_formatting)
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `$dotted_key` at "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `${dotted_key}` at "${p.tok.kind}" "${p.tok.lit}"')
p.root_map_key = dotted_key
p.allocate_table(p.root_map_key)!
p.next()!
@@ -660,7 +660,7 @@ pub fn (mut p Parser) root_table() ! {
}
else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' could not parse "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' could not parse "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
}
@@ -686,15 +686,15 @@ pub fn (mut p Parser) table_contents(mut tbl map[string]ast.Value) ! {
p.skip_next = false
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "${p.tok.kind}" "${p.tok.lit}"')
match p.tok.kind {
.hash {
c := p.comment()
p.ast_root.comments << c
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comment "$c.text"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comment "${c.text}"')
}
.whitespace, .tab, .nl, .cr {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping formatting "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping formatting "${p.tok.kind}" "${p.tok.lit}"')
continue
}
.bare, .quoted, .number, .minus, .underscore {
@@ -708,7 +708,7 @@ pub fn (mut p Parser) table_contents(mut tbl map[string]ast.Value) ! {
t := p.find_in_table(mut tbl, sub_table)!
unsafe {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "$key" = $val in table ${ptr_str(t)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "${key}" = ${val} in table ${ptr_str(t)}')
t[key.str()] = val
}
} else {
@@ -716,11 +716,11 @@ pub fn (mut p Parser) table_contents(mut tbl map[string]ast.Value) ! {
key, val := p.key_value()!
unsafe {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "$key.str()" = $val in table ${ptr_str(tbl)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "${key.str()}" = ${val} in table ${ptr_str(tbl)}')
key_str := key.str()
if _ := tbl[key_str] {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key "$key" is already initialized with a value. At "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key "${key}" is already initialized with a value. At "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
tbl[key_str] = val
}
@@ -732,7 +732,7 @@ pub fn (mut p Parser) table_contents(mut tbl map[string]ast.Value) ! {
}
else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' could not parse "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' could not parse "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
}
@@ -746,7 +746,7 @@ pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Value) ! {
mut previous_token_was_value := false
for p.tok.kind != .eof {
p.next()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "$p.tok.kind"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "${p.tok.kind}"')
if previous_token_was_value {
p.ignore_while(parser.space_formatting)
@@ -763,7 +763,7 @@ pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Value) ! {
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping "$p.tok.kind" "$p.tok.lit"')
continue
}*/
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping formatting "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping formatting "${p.tok.kind}" "${p.tok.lit}"')
continue
}
.comma {
@@ -771,9 +771,9 @@ pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Value) ! {
if p.peek_tok.kind in [.comma, .rcbr] {
p.next()! // Forward to the peek_tok
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unexpected "$p.tok.kind" "$p.tok.lit" at this (excerpt): "...${p.excerpt()}..."')
+ ' unexpected "${p.tok.kind}" "${p.tok.lit}" at this (excerpt): "...${p.excerpt()}..."')
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comma table value separator "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comma table value separator "${p.tok.lit}"')
continue
}
.rcbr {
@@ -791,7 +791,7 @@ pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Value) ! {
mut t := p.find_in_table(mut tbl, sub_table)!
unsafe {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val into ${ptr_str(t)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "${key}" = ${val} into ${ptr_str(t)}')
t[key.str()] = val
}
} else {
@@ -800,28 +800,28 @@ pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Value) ! {
key_str := key.str()
if _ := tbl[key_str] {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key "$key_str" is already initialized with a value. At "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' key "${key_str}" is already initialized with a value. At "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @5 "$key_str" = $val into ${ptr_str(tbl)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @5 "${key_str}" = ${val} into ${ptr_str(tbl)}')
tbl[key_str] = val
}
previous_token_was_value = true
}
else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unexpected "$p.tok.kind" "$p.tok.lit" at this (excerpt): "...${p.excerpt()}..."')
+ ' unexpected "${p.tok.kind}" "${p.tok.lit}" at this (excerpt): "...${p.excerpt()}..."')
}
}
}
// Make sure the inline-table actually use the return at .rcbr match branch.
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unexpected end of inline-table "$p.tok.kind" "$p.tok.lit" at this (excerpt): "...${p.excerpt()}..."')
+ ' unexpected end of inline-table "${p.tok.kind}" "${p.tok.lit}" at this (excerpt): "...${p.excerpt()}..."')
}
// array_of_tables parses next tokens into an array of `ast.Value`s.
[autofree_bug; manualfree]
pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Value) ! {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array of tables "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array of tables "${p.tok.kind}" "${p.tok.lit}"')
// NOTE this is starting to get ugly. TOML isn't simple at this point
p.check(.lsbr)! // '[' bracket
@@ -862,7 +862,7 @@ pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Value) ! {
table[dotted_key_str] = arr
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' table[$dotted_key_str] is not an array. (excerpt): "...${p.excerpt()}..."')
+ ' table[${dotted_key_str}] is not an array. (excerpt): "...${p.excerpt()}..."')
}
} else {
table[dotted_key_str] = p.array_of_tables_contents()!
@@ -878,21 +878,21 @@ pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Value) ! {
// array_of_tables_contents parses next tokens into an array of `ast.Value`s.
pub fn (mut p Parser) array_of_tables_contents() ![]ast.Value {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing contents from "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing contents from "${p.tok.kind}" "${p.tok.lit}"')
mut tbl := map[string]ast.Value{}
p.table_contents(mut tbl)!
mut arr := []ast.Value{}
arr << tbl
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed array of tables ${ast.Value(arr)}. leaving at "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed array of tables ${ast.Value(arr)}. leaving at "${p.tok.kind}" "${p.tok.lit}"')
return arr
}
// double_array_of_tables parses next tokens into an array of tables of arrays of `ast.Value`s...
[autofree_bug; manualfree]
pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ! {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing nested array of tables "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing nested array of tables "${p.tok.kind}" "${p.tok.lit}"')
dotted_key := p.dotted_key()!
p.ignore_while(parser.space_formatting)
@@ -922,16 +922,16 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ! {
unsafe {
// NOTE this is starting to get EVEN uglier. TOML is not *at all* simple at this point...
if first != p.last_aot {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, '$first != $p.last_aot')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, '${first} != ${p.last_aot}')
// Implicit allocation
if p.last_aot.len == 0 {
p.last_aot = first
mut nm := &p.root_map
if first.str() in table.keys() {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'adding to existing table entry at `$first`.')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'adding to existing table entry at `${first}`.')
nm = &(table[first.str()] as map[string]ast.Value)
} else {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'implicit allocation of map for `$first` in dotted key `$dotted_key`.')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'implicit allocation of map for `${first}` in dotted key `${dotted_key}`.')
nm = &map[string]ast.Value{}
// We register this implicit allocation as *explicit* to be able to catch
// special cases like:
@@ -947,7 +947,7 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ! {
return
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' nested array of tables key "$first" does not match "$p.last_aot". (excerpt): "...${p.excerpt()}..."')
+ ' nested array of tables key "${first}" does not match "${p.last_aot}". (excerpt): "...${p.excerpt()}..."')
}
}
@@ -966,7 +966,7 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ! {
t[last.str()] = arr
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' t[$last.str()] is not an array. (excerpt): "...${p.excerpt()}..."')
+ ' t[${last.str()}] is not an array. (excerpt): "...${p.excerpt()}..."')
}
} else {
t[last.str()] = p.double_array_of_tables_contents(dotted_key)!
@@ -980,7 +980,7 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ! {
// double_array_of_tables_contents parses next tokens into an array of `ast.Value`s.
pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ![]ast.Value {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing contents from "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing contents from "${p.tok.kind}" "${p.tok.lit}"')
mut tbl := map[string]ast.Value{}
mut implicit_allocation_key := DottedKey([]string{})
@@ -989,7 +989,7 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ![]a
for p.tok.kind != .eof {
p.next()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "$p.tok.kind"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "${p.tok.kind}"')
p.ignore_while(parser.all_formatting)
// Peek forward as far as we can skipping over space formatting tokens.
@@ -1019,7 +1019,7 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ![]a
mut t := p.find_in_table(mut tbl, sub_table)!
unsafe {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val into ${ptr_str(t)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "${key}" = ${val} into ${ptr_str(t)}')
t[key.str()] = val
}
} else {
@@ -1030,7 +1030,7 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ![]a
t = p.find_in_table(mut tbl, implicit_allocation_key)!
}
unsafe {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @7 "$key" = $val into ${ptr_str(t)}')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @7 "${key}" = ${val} into ${ptr_str(t)}')
t[key.str()] = val
}
}
@@ -1054,14 +1054,14 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ![]a
implicit_allocation_key = dotted_key[2..]
}
p.ignore_while(parser.space_formatting)
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'keys are: dotted `$dotted_key`, target `$target_key`, implicit `$implicit_allocation_key` at "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'keys are: dotted `${dotted_key}`, target `${target_key}`, implicit `${implicit_allocation_key}` at "${p.tok.kind}" "${p.tok.lit}"')
p.expect(.rsbr)!
p.peek_for_correct_line_ending_or_fail()!
p.explicit_declared << dotted_key
continue
} else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' could not parse "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' could not parse "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
else {
@@ -1071,7 +1071,7 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ![]a
}
mut arr := []ast.Value{}
arr << tbl
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed array of tables ${ast.Value(arr)}. leaving at "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed array of tables ${ast.Value(arr)}. leaving at "${p.tok.kind}" "${p.tok.lit}"')
return arr
}
@@ -1083,7 +1083,7 @@ pub fn (mut p Parser) array() ![]ast.Value {
mut previous_token_was_value := false
for p.tok.kind != .eof {
p.next()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "${p.tok.kind}" "${p.tok.lit}"')
if previous_token_was_value {
p.ignore_while(parser.all_formatting)
@@ -1107,19 +1107,19 @@ pub fn (mut p Parser) array() ![]ast.Value {
if p.peek_tok.kind in [.comma, .bare] {
p.next()! // Forward to the peek_tok
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unexpected "$p.tok.kind" "$p.tok.lit" at this (excerpt): "...${p.excerpt()}..."')
+ ' unexpected "${p.tok.kind}" "${p.tok.lit}" at this (excerpt): "...${p.excerpt()}..."')
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comma table value separator "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comma table value separator "${p.tok.lit}"')
continue
}
.eof {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' could not parse array. Reached EOF "$p.tok.kind" "$p.tok.lit" ("$p.tok.lit") in this (excerpt): "...${p.excerpt()}..."')
+ ' could not parse array. Reached EOF "${p.tok.kind}" "${p.tok.lit}" ("${p.tok.lit}") in this (excerpt): "...${p.excerpt()}..."')
}
.hash {
c := p.comment()
p.ast_root.comments << c
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comment "$c.text"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comment "${c.text}"')
}
.lcbr {
p.ignore_while(parser.space_formatting)
@@ -1138,7 +1138,7 @@ pub fn (mut p Parser) array() ![]ast.Value {
previous_token_was_value = true
}
.lsbr {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array in array "$p.tok.kind" "$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array in array "${p.tok.kind}" "${p.tok.lit}"')
arr << ast.Value(p.array()!)
previous_token_was_value = true
}
@@ -1147,21 +1147,21 @@ pub fn (mut p Parser) array() ![]ast.Value {
}
else {
error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' could not parse "$p.tok.kind" "$p.tok.lit" ("$p.tok.lit") in this (excerpt): "...${p.excerpt()}..."')
+ ' could not parse "${p.tok.kind}" "${p.tok.lit}" ("${p.tok.lit}") in this (excerpt): "...${p.excerpt()}..."')
}
}
}
p.expect(.rsbr)! // ']' bracket
$if debug {
flat := arr.str().replace('\n', r'\n')
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed array: $flat . Currently @ token "$p.tok.kind"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed array: ${flat} . Currently @ token "${p.tok.kind}"')
}
return arr
}
// comment returns an `ast.Comment` type.
pub fn (mut p Parser) comment() ast.Comment {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed hash comment "#$p.tok.lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed hash comment "#${p.tok.lit}"')
return ast.Comment{
text: p.tok.lit
pos: p.tok.pos()
@@ -1171,7 +1171,7 @@ pub fn (mut p Parser) comment() ast.Comment {
// key parse and returns an `ast.Key` type.
// Keys are the token(s) appearing before an assignment operator (=).
pub fn (mut p Parser) key() !ast.Key {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing key from "$p.tok.lit" ...')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing key from "${p.tok.lit}" ...')
mut key := ast.Key(ast.Null{})
if p.tok.kind == .number {
@@ -1214,7 +1214,7 @@ pub fn (mut p Parser) key() !ast.Key {
if key is ast.Null {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' key expected .bare, .underscore, .number, .quoted or .boolean but got "$p.tok.kind"')
+ ' key expected .bare, .underscore, .number, .quoted or .boolean but got "${p.tok.kind}"')
}
// A few small exceptions that can't easily be done via `checker` or `decoder` *after* the
@@ -1252,7 +1252,7 @@ pub fn (mut p Parser) key_value() !(ast.Key, ast.Value) {
p.check(.assign)! // Assignment operator
p.ignore_while(parser.space_formatting)
value := p.value()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed key value pair. `$key = $value`')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed key value pair. `${key} = ${value}`')
p.explicit_declared << p.build_abs_dotted_key(DottedKey([
key.str(),
@@ -1271,7 +1271,7 @@ pub fn (mut p Parser) dotted_key_value() !(DottedKey, ast.Value) {
p.check(.assign)!
p.ignore_while(parser.space_formatting)
value := p.value()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed dotted key value pair `$dotted_key = $value`...')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed dotted key value pair `${dotted_key} = ${value}`...')
p.explicit_declared << p.build_abs_dotted_key(dotted_key)
@@ -1281,7 +1281,7 @@ pub fn (mut p Parser) dotted_key_value() !(DottedKey, ast.Value) {
// value parse and returns an `ast.Value` type.
// values are the token(s) appearing after an assignment operator (=).
pub fn (mut p Parser) value() !ast.Value {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing value from token "$p.tok.kind" "$p.tok.lit"...')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing value from token "${p.tok.kind}" "${p.tok.lit}"...')
mut value := ast.Value(ast.Null{})
if p.tok.kind == .number {
@@ -1310,10 +1310,10 @@ pub fn (mut p Parser) value() !ast.Value {
}
if value is ast.Null {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' value expected .boolean, .quoted, .lsbr, .lcbr or .number got "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
+ ' value expected .boolean, .quoted, .lsbr, .lcbr or .number got "${p.tok.kind}" "${p.tok.lit}" in this (excerpt): "...${p.excerpt()}..."')
}
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed "$p.tok.kind" as value $value')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed "${p.tok.kind}" as value ${value}')
return value
}
@@ -1350,7 +1350,7 @@ pub fn (mut p Parser) bare() !ast.Bare {
continue
}
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' bare key expected .bare, .minus, or .underscore but got "$p.tok.kind"')
+ ' bare key expected .bare, .minus, or .underscore but got "${p.tok.kind}"')
}
return ast.Bare{
text: lits
@@ -1387,7 +1387,7 @@ pub fn (mut p Parser) quoted() ast.Quoted {
pub fn (mut p Parser) boolean() !ast.Bool {
if p.tok.lit !in ['true', 'false'] {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' expected literal to be either `true` or `false` got "$p.tok.kind"')
+ ' expected literal to be either `true` or `false` got "${p.tok.kind}"')
}
return ast.Bool{
text: p.tok.lit
@@ -1428,7 +1428,7 @@ pub fn (mut p Parser) date_time() !ast.DateTimeType {
time = p.time()!
lit += time.text
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed date-time: "$lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed date-time: "${lit}"')
return ast.DateTime{
text: lit
pos: pos
@@ -1463,7 +1463,7 @@ pub fn (mut p Parser) date() !ast.Date {
lit += p.tok.lit
p.expect(.number)!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed date: "$lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed date: "${lit}"')
return ast.Date{
text: lit
pos: pos
@@ -1524,7 +1524,7 @@ pub fn (mut p Parser) time() !ast.Time {
p.expect(.bare)!
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed time: "$lit"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed time: "${lit}"')
return ast.Time{
text: lit
pos: pos
diff --git a/vlib/toml/scanner/scanner.v b/vlib/toml/scanner/scanner.v
index 56d3d61786..ce64c180af 100644
--- a/vlib/toml/scanner/scanner.v
+++ b/vlib/toml/scanner/scanner.v
@@ -108,12 +108,12 @@ pub fn (mut s Scanner) scan() !token.Token {
}
ascii := byte_c.ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'current char "$ascii"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'current char "${ascii}"')
if byte_c == u8(0x0) {
s.reset()
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' NULL control character `$c.hex()` is not allowed at ($s.line_nr,$s.col) "$ascii" near ...${s.excerpt(s.pos, 5)}...')
+ ' NULL control character `${c.hex()}` is not allowed at (${s.line_nr},${s.col}) "${ascii}" near ...${s.excerpt(s.pos, 5)}...')
}
is_sign := c == `+` || c == `-`
@@ -128,7 +128,7 @@ pub fn (mut s Scanner) scan() !token.Token {
&& peek_2 == `f`
if !s.is_left_of_assign && (is_nan || is_inf || is_signed_nan || is_signed_inf) {
num := s.extract_nan_or_inf_number()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a special number "$num" ($num.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a special number "${num}" (${num.len})')
return s.new_token(.number, num, num.len)
}
@@ -136,17 +136,17 @@ pub fn (mut s Scanner) scan() !token.Token {
is_digit := byte_c.is_digit()
if is_digit || is_signed_number {
num := s.extract_number()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a number "$num" ($num.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a number "${num}" (${num.len})')
return s.new_token(.number, num, num.len)
}
if util.is_key_char(byte_c) {
key := s.extract_key()
if !s.is_left_of_assign && (key == 'true' || key == 'false') {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a boolean "$key" ($key.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a boolean "${key}" (${key.len})')
return s.new_token(.boolean, key, key.len)
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a bare key "$key" ($key.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified a bare key "${key}" (${key.len})')
return s.new_token(.bare, key, key.len)
}
@@ -154,13 +154,13 @@ pub fn (mut s Scanner) scan() !token.Token {
` `, `\t`, `\n`, `\r` {
if c == `\n` {
s.inc_line_number()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'incremented line nr to $s.line_nr')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'incremented line nr to ${s.line_nr}')
}
// Date-Time in RFC 3339 is allowed to have a space between the date and time in supplement to the 'T'
// so we allow space characters to slip through to the parser if the space is between two digits...
// util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, '"'+u8(s.peek(-1)).ascii_str()+'" < "$ascii" > "'+u8(s.at()).ascii_str()+'"')
if c == ` ` && u8(s.peek(-1)).is_digit() && u8(s.at()).is_digit() {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified, what could be, a space between a RFC 3339 date and time ("$ascii") ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified, what could be, a space between a RFC 3339 date and time ("${ascii}") (${ascii.len})')
return s.new_token(token.Kind.whitespace, ascii, ascii.len)
}
if s.config.tokenize_formatting {
@@ -172,75 +172,75 @@ pub fn (mut s Scanner) scan() !token.Token {
} else if c == `\n` {
kind = token.Kind.nl
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified formatting character ("$ascii") ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified formatting character ("${ascii}") (${ascii.len})')
return s.new_token(kind, ascii, ascii.len)
} else {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping " ", "\\t" or "\\n" ("$ascii") ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping " ", "\\t" or "\\n" ("${ascii}") (${ascii.len})')
}
continue
}
`-` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified minus "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified minus "${ascii}" (${ascii.len})')
return s.new_token(.minus, ascii, ascii.len)
}
`_` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified underscore "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified underscore "${ascii}" (${ascii.len})')
return s.new_token(.underscore, ascii, ascii.len)
}
`+` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified plus "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified plus "${ascii}" (${ascii.len})')
return s.new_token(.plus, ascii, ascii.len)
}
`=` {
s.is_left_of_assign = false
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified assignment "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified assignment "${ascii}" (${ascii.len})')
return s.new_token(.assign, ascii, ascii.len)
}
`"`, `'` { // ... some string "/'
ident_string := s.extract_string()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified quoted string `$ident_string`')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified quoted string `${ident_string}`')
return s.new_token(.quoted, ident_string, ident_string.len)
}
`#` {
hash := s.ignore_line()!
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified comment hash "$hash" ($hash.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified comment hash "${hash}" (${hash.len})')
return s.new_token(.hash, hash, hash.len + 1)
}
`{` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified left curly bracket "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified left curly bracket "${ascii}" (${ascii.len})')
return s.new_token(.lcbr, ascii, ascii.len)
}
`}` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified right curly bracket "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified right curly bracket "${ascii}" (${ascii.len})')
return s.new_token(.rcbr, ascii, ascii.len)
}
`[` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified left square bracket "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified left square bracket "${ascii}" (${ascii.len})')
return s.new_token(.lsbr, ascii, ascii.len)
}
`]` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified right square bracket "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified right square bracket "${ascii}" (${ascii.len})')
return s.new_token(.rsbr, ascii, ascii.len)
}
`:` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified colon "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified colon "${ascii}" (${ascii.len})')
return s.new_token(.colon, ascii, ascii.len)
}
`,` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified comma "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified comma "${ascii}" (${ascii.len})')
return s.new_token(.comma, ascii, ascii.len)
}
`.` {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified period "$ascii" ($ascii.len)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified period "${ascii}" (${ascii.len})')
return s.new_token(.period, ascii, ascii.len)
}
else {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' could not scan character `$ascii` / $c at $s.pos ($s.line_nr,$s.col) near ...${s.excerpt(s.pos, 5)}...')
+ ' could not scan character `${ascii}` / ${c} at ${s.pos} (${s.line_nr},${s.col}) near ...${s.excerpt(s.pos, 5)}...')
}
}
}
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'unknown character code at $s.pos ($s.line_nr,$s.col) near ...${s.excerpt(s.pos,
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'unknown character code at ${s.pos} (${s.line_nr},${s.col}) near ...${s.excerpt(s.pos,
5)}...')
return s.new_token(.unknown, '', 0)
}
@@ -358,7 +358,7 @@ fn (mut s Scanner) ignore_line() !string {
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, ' ignoring until EOL...')
start := s.pos
for c := s.at(); c != scanner.end_of_text && c != `\n`; c = s.at() {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping "${u8(c).ascii_str()} / $c"')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping "${u8(c).ascii_str()} / ${c}"')
if s.at_crlf() {
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'letting `\\r\\n` slip through')
break
@@ -420,11 +420,11 @@ fn (mut s Scanner) extract_string() !string {
if s.pos >= s.text.len {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unfinished single-line string literal `$quote.ascii_str()` started at $start ($s.line_nr,$s.col) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
+ ' unfinished single-line string literal `${quote.ascii_str()}` started at ${start} (${s.line_nr},${s.col}) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
}
c := u8(s.at())
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'c: `$c.ascii_str()` / $c (quote type: $quote/$quote.ascii_str())')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'c: `${c.ascii_str()}` / ${c} (quote type: ${quote}/${quote.ascii_str()})')
// Check for escaped chars
if c == u8(92) {
@@ -439,7 +439,7 @@ fn (mut s Scanner) extract_string() !string {
// Check for control characters (allow TAB)
if util.is_illegal_ascii_control_character(c) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' control character `$c.hex()` is not allowed at $start ($s.line_nr,$s.col) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
+ ' control character `${c.hex()}` is not allowed at ${start} (${s.line_nr},${s.col}) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
}
if c == quote {
@@ -453,7 +453,7 @@ fn (mut s Scanner) extract_string() !string {
// Don't eat multiple lines in single-line mode
if lit.contains('\n') {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unfinished single-line string literal `$quote.ascii_str()` started at $start ($s.line_nr,$s.col) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
+ ' unfinished single-line string literal `${quote.ascii_str()}` started at ${start} (${s.line_nr},${s.col}) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
}
}
return lit
@@ -470,8 +470,8 @@ fn (mut s Scanner) extract_multiline_string() !string {
start := s.pos
mut lit := quote.ascii_str() + quote.ascii_str() + quote.ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'multi-line `$quote.ascii_str()${s.text[s.pos +
- 1].ascii_str()}${s.text[s.pos + 2].ascii_str()}` string started at pos $start ($s.line_nr,$s.col) (quote type: $quote.ascii_str() / $quote)')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'multi-line `${quote.ascii_str()}${s.text[
+ s.pos + 1].ascii_str()}${s.text[s.pos + 2].ascii_str()}` string started at pos ${start} (${s.line_nr},${s.col}) (quote type: ${quote.ascii_str()} / ${quote})')
s.pos += 2
s.col += 2
@@ -482,16 +482,16 @@ fn (mut s Scanner) extract_multiline_string() !string {
if s.pos >= s.text.len {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' unfinished multi-line string literal ($quote.ascii_str()$quote.ascii_str()$quote.ascii_str()) started at $start ($s.line_nr,$s.col) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
+ ' unfinished multi-line string literal (${quote.ascii_str()}${quote.ascii_str()}${quote.ascii_str()}) started at ${start} (${s.line_nr},${s.col}) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
}
c := u8(s.at())
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'c: `$c.ascii_str()` / $c (quote type: $quote/$quote.ascii_str())')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'c: `${c.ascii_str()}` / ${c} (quote type: ${quote}/${quote.ascii_str()})')
if c == `\n` {
s.inc_line_number()
lit += c.ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'c: `\\n` / $c')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'c: `\\n` / ${c}')
continue
}
// Check for escaped chars
@@ -507,7 +507,7 @@ fn (mut s Scanner) extract_multiline_string() !string {
// Check for control characters (allow TAB)
if util.is_illegal_ascii_control_character(c) {
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' control character `$c.hex()` is not allowed at $start ($s.line_nr,$s.col) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
+ ' control character `${c.hex()}` is not allowed at ${start} (${s.line_nr},${s.col}) "${u8(s.at()).ascii_str()}" near ...${s.excerpt(s.pos, 5)}...')
}
if c == quote {
@@ -516,7 +516,7 @@ fn (mut s Scanner) extract_multiline_string() !string {
s.pos += 3
s.col += 3
lit += quote.ascii_str() + quote.ascii_str() + quote.ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'returning at $c.ascii_str() `$lit`')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'returning at ${c.ascii_str()} `${lit}`')
return lit
} else if s.peek(3) != quote {
// lit += c.ascii_str()
@@ -524,7 +524,7 @@ fn (mut s Scanner) extract_multiline_string() !string {
s.pos += 3
s.col += 3
lit += quote.ascii_str() + quote.ascii_str() + quote.ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'returning at $c.ascii_str() `$lit`')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'returning at ${c.ascii_str()} `${lit}`')
return lit
}
}
@@ -544,28 +544,28 @@ fn (mut s Scanner) handle_escapes(quote u8, is_multiline bool) (string, int) {
if s.peek(1) == `u` && u8(s.peek(2)).is_hex_digit() && u8(s.peek(3)).is_hex_digit()
&& u8(s.peek(4)).is_hex_digit() && u8(s.peek(5)).is_hex_digit() {
lit += s.text[s.pos + 1..s.pos + 6] //.ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'gulp escaped unicode `$lit`')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'gulp escaped unicode `${lit}`')
return lit, 5
} else if s.peek(1) == quote {
if (!is_multiline && s.peek(2) == `\n`)
|| (is_multiline && s.peek(2) == quote && s.peek(3) == quote && s.peek(4) == `\n`) {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'ignore special case escaped `$lit` at end of string')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'ignore special case escaped `${lit}` at end of string')
return '', 0
}
lit += quote.ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'gulp escaped `$lit`')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'gulp escaped `${lit}`')
return lit, 1
}
}
if is_literal_string {
if s.peek(1) == quote {
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'ignore escape `$lit${u8(s.peek(1)).ascii_str()}` in literal string')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'ignore escape `${lit}${u8(s.peek(1)).ascii_str()}` in literal string')
return '', 0
}
}
lit += u8(s.peek(1)).ascii_str()
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'gulp escaped `$lit`')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'gulp escaped `${lit}`')
return lit, 1
}
@@ -604,7 +604,7 @@ fn (mut s Scanner) extract_number() !string {
s.col++
}
key := s.text[start..s.pos]
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified number "$key" in range [$start .. $s.pos]')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified number "${key}" in range [${start} .. ${s.pos}]')
return key
}
@@ -634,7 +634,7 @@ fn (mut s Scanner) extract_nan_or_inf_number() !string {
s.col++
}
key := s.text[start..s.pos]
- util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified special number "$key" in range [$start .. $s.pos]')
+ util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified special number "${key}" in range [${start} .. ${s.pos}]')
return key
}
@@ -679,12 +679,12 @@ fn (mut s Scanner) check_utf16_or_32_bom() ! {
s.header_len = 4
s.skip_n(s.header_len)
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' UTF-32 is not a valid TOML encoding at $s.pos ($s.line_nr,$s.col) near ...${s.excerpt(s.pos, 5)}...')
+ ' UTF-32 is not a valid TOML encoding at ${s.pos} (${s.line_nr},${s.col}) near ...${s.excerpt(s.pos, 5)}...')
}
if (s.at() == 0xFE && s.peek(1) == 0xFF) || (s.at() == 0xFF && s.peek(1) == 0xFE) {
s.header_len = 2
s.skip_n(s.header_len)
return error(@MOD + '.' + @STRUCT + '.' + @FN +
- ' UTF-16 is not a valid TOML encoding at $s.pos ($s.line_nr,$s.col) near ...${s.excerpt(s.pos, 5)}...')
+ ' UTF-16 is not a valid TOML encoding at ${s.pos} (${s.line_nr},${s.col}) near ...${s.excerpt(s.pos, 5)}...')
}
}
diff --git a/vlib/toml/tests/alexcrichton.toml-rs-tests_test.v b/vlib/toml/tests/alexcrichton.toml-rs-tests_test.v
index c19501e755..b2fba76d55 100644
--- a/vlib/toml/tests/alexcrichton.toml-rs-tests_test.v
+++ b/vlib/toml/tests/alexcrichton.toml-rs-tests_test.v
@@ -57,7 +57,7 @@ normalize'
fn run(args []string) !string {
res := os.execute(args.join(' '))
if res.exit_code != 0 {
- return error('${args[0]} failed with return code ${res.exit_code}.\n$res.output')
+ return error('${args[0]} failed with return code ${res.exit_code}.\n${res.output}')
}
return res.output
}
@@ -69,7 +69,7 @@ fn test_alexcrichton_toml_rs() {
if os.is_dir(test_root) {
valid_test_files := os.walk_ext(os.join_path(test_root, 'test-suite', 'tests',
'valid'), '.toml')
- println('Testing $valid_test_files.len valid TOML files...')
+ println('Testing ${valid_test_files.len} valid TOML files...')
mut valid := 0
mut e := 0
for i, valid_test_file in valid_test_files {
@@ -81,23 +81,23 @@ fn test_alexcrichton_toml_rs() {
if relative in valid_exceptions {
e++
idx := valid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID EXCEPTION [$idx/$valid_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
}
toml_doc := toml.parse_file(valid_test_file)!
valid++
}
- println('$valid/$valid_test_files.len TOML files were parsed correctly')
+ println('${valid}/${valid_test_files.len} TOML files were parsed correctly')
if valid_exceptions.len > 0 {
- println('TODO Skipped parsing of $e valid TOML files...')
+ println('TODO Skipped parsing of ${e} valid TOML files...')
}
// If the command-line tool `jq` is installed, value tests can be run as well.
if jq != '' {
- println('Testing value output of $valid_test_files.len valid TOML files using "$jq"...')
+ println('Testing value output of ${valid_test_files.len} valid TOML files using "${jq}"...')
if os.exists(compare_work_dir_root) {
os.rmdir_all(compare_work_dir_root)!
@@ -115,25 +115,25 @@ fn test_alexcrichton_toml_rs() {
relative = relative.replace('/', '\\')
}
if !os.exists(valid_test_file.all_before_last('.') + '.json') {
- println('N/A [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('N/A [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
continue
}
// Skip the file if we know it can't be parsed or we know that the value retrieval needs work.
if relative in valid_exceptions {
e++
idx := valid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID EXCEPTION [$idx/$valid_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
continue
}
if relative in valid_value_exceptions {
e++
idx := valid_value_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID VALUE EXCEPTION [$idx/$valid_value_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID VALUE EXCEPTION [${idx}/${valid_value_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
}
toml_doc := toml.parse_file(valid_test_file)?
@@ -153,29 +153,29 @@ fn test_alexcrichton_toml_rs() {
os.write_file(alexcrichton_toml_json_path, alexcrichton_json)!
- v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
+ v_normalized_json := run([jq, '-S', '-f "${jq_normalize_path}"', v_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)!
- panic(err.msg() + '\n$contents')
+ panic(err.msg() + '\n${contents}')
}
- alexcrichton_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"',
+ alexcrichton_normalized_json := run([jq, '-S', '-f "${jq_normalize_path}"',
alexcrichton_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)!
- panic(err.msg() + '\n$contents')
+ panic(err.msg() + '\n${contents}')
}
assert alexcrichton_normalized_json == v_normalized_json
valid++
}
- println('$valid/$valid_test_files.len TOML files were parsed correctly and value checked')
+ println('${valid}/${valid_test_files.len} TOML files were parsed correctly and value checked')
if valid_value_exceptions.len > 0 {
- println('TODO Skipped value checks of $e valid TOML files...')
+ println('TODO Skipped value checks of ${e} valid TOML files...')
}
}
invalid_test_files := os.walk_ext(os.join_path(test_root, 'test-suite', 'tests',
'invalid'), '.toml')
- println('Testing $invalid_test_files.len invalid TOML files...')
+ println('Testing ${invalid_test_files.len} invalid TOML files...')
mut invalid := 0
e = 0
for i, invalid_test_file in invalid_test_files {
@@ -186,31 +186,31 @@ fn test_alexcrichton_toml_rs() {
if relative in invalid_exceptions {
e++
idx := invalid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$invalid_test_files.len] "$invalid_test_file" INVALID EXCEPTION [$idx/$invalid_exceptions.len]...')
+ println('SKIP [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}" INVALID EXCEPTION [${idx}/${invalid_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$invalid_test_files.len] "$invalid_test_file"...')
+ println('OK [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}"...')
}
if toml_doc := toml.parse_file(invalid_test_file) {
content_that_should_have_failed := os.read_file(invalid_test_file)!
- println(' This TOML should have failed:\n${'-'.repeat(40)}\n$content_that_should_have_failed\n${'-'.repeat(40)}')
+ println(' This TOML should have failed:\n${'-'.repeat(40)}\n${content_that_should_have_failed}\n${'-'.repeat(40)}')
assert false
} else {
if !hide_oks {
- println(' $err.msg()')
+ println(' ${err.msg()}')
}
assert true
}
invalid++
}
- println('$invalid/$invalid_test_files.len TOML files were parsed correctly')
+ println('${invalid}/${invalid_test_files.len} TOML files were parsed correctly')
if invalid_exceptions.len > 0 {
- println('TODO Skipped parsing of $invalid_exceptions.len invalid TOML files...')
+ println('TODO Skipped parsing of ${invalid_exceptions.len} invalid TOML files...')
}
} else {
- println('No test data directory found in "$test_root"')
+ println('No test data directory found in "${test_root}"')
assert true
}
}
@@ -236,7 +236,7 @@ fn to_alexcrichton(value ast.Value, array_type int) string {
match value {
ast.Quoted {
json_text := json2.Any(value.text).json_str()
- return '{ "type": "string", "value": $json_text }'
+ return '{ "type": "string", "value": ${json_text} }'
}
ast.DateTime {
// Normalization for json
@@ -253,51 +253,51 @@ fn to_alexcrichton(value ast.Value, array_type int) string {
// date-time values are represented in detail. For now we follow the BurntSushi format
// that expands to 6 digits which is also a valid RFC 3339 representation.
json_text = to_alexcrichton_time(json_text[1..json_text.len - 1])
- return '{ "type": "$typ", "value": "$json_text" }'
+ return '{ "type": "${typ}", "value": "${json_text}" }'
}
ast.Date {
json_text := json2.Any(value.text).json_str()
- return '{ "type": "date", "value": $json_text }'
+ return '{ "type": "date", "value": ${json_text} }'
}
ast.Time {
mut json_text := json2.Any(value.text).json_str()
json_text = to_alexcrichton_time(json_text[1..json_text.len - 1])
- return '{ "type": "time", "value": "$json_text" }'
+ return '{ "type": "time", "value": "${json_text}" }'
}
ast.Bool {
json_text := json2.Any(value.text.bool()).json_str()
- return '{ "type": "bool", "value": "$json_text" }'
+ return '{ "type": "bool", "value": "${json_text}" }'
}
ast.Null {
json_text := json2.Any(value.text).json_str()
- return '{ "type": "null", "value": $json_text }'
+ return '{ "type": "null", "value": ${json_text} }'
}
ast.Number {
text := value.text
if text.contains('inf') || text.contains('nan') {
- return '{ "type": "float", "value": $value.text }'
+ return '{ "type": "float", "value": ${value.text} }'
}
if !text.starts_with('0x') && (text.contains('.') || text.to_lower().contains('e')) {
mut val := ''
if text.to_lower().contains('e') && !text.contains('-') {
val = '${value.f64():.1f}'
} else {
- val = '$value.f64()'
+ val = '${value.f64()}'
}
- return '{ "type": "float", "value": "$val" }'
+ return '{ "type": "float", "value": "${val}" }'
}
v := value.i64()
// TODO workaround https://github.com/vlang/v/issues/9507
if v == i64(-9223372036854775807 - 1) {
return '{ "type": "integer", "value": "-9223372036854775808" }'
}
- return '{ "type": "integer", "value": "$v" }'
+ return '{ "type": "integer", "value": "${v}" }'
}
map[string]ast.Value {
mut str := '{ '
for key, val in value {
json_key := json2.Any(key).json_str()
- str += ' $json_key: ${to_alexcrichton(val, array_type)},'
+ str += ' ${json_key}: ${to_alexcrichton(val, array_type)},'
}
str = str.trim_right(',')
str += ' }'
diff --git a/vlib/toml/tests/burntsushi.toml-test_test.v b/vlib/toml/tests/burntsushi.toml-test_test.v
index 931563adb6..633dc78cea 100644
--- a/vlib/toml/tests/burntsushi.toml-test_test.v
+++ b/vlib/toml/tests/burntsushi.toml-test_test.v
@@ -47,7 +47,7 @@ normalize'
fn run(args []string) ?string {
res := os.execute(args.join(' '))
if res.exit_code != 0 {
- return error('${args[0]} failed with return code ${res.exit_code}.\n$res.output')
+ return error('${args[0]} failed with return code ${res.exit_code}.\n${res.output}')
}
return res.output
}
@@ -58,14 +58,14 @@ fn test_burnt_sushi_tomltest() {
test_root := os.join_path(os.dir(this_file), 'testdata', 'burntsushi', 'toml-test',
'tests')
if !os.is_dir(test_root) {
- println('No test data directory found in "$test_root"')
+ println('No test data directory found in "${test_root}"')
assert true
return
}
valid_folder := os.join_path('toml-test', 'tests', 'valid')
invalid_folder := os.join_path('toml-test', 'tests', 'invalid')
valid_test_files := os.walk_ext(os.join_path(test_root, 'valid'), '.toml')
- println('Testing $valid_test_files.len valid TOML files...')
+ println('Testing ${valid_test_files.len} valid TOML files...')
mut valid := 0
mut e := 0
for i, valid_test_file in valid_test_files {
@@ -76,23 +76,23 @@ fn test_burnt_sushi_tomltest() {
if relative in valid_exceptions {
e++
idx := valid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID EXCEPTION [$idx/$valid_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
}
toml_doc := toml.parse_file(valid_test_file)?
valid++
}
- println('$valid/$valid_test_files.len TOML files were parsed correctly')
+ println('${valid}/${valid_test_files.len} TOML files were parsed correctly')
if valid_exceptions.len > 0 {
- println('TODO Skipped parsing of $valid_exceptions.len valid TOML files...')
+ println('TODO Skipped parsing of ${valid_exceptions.len} valid TOML files...')
}
// If the command-line tool `jq` is installed, value tests can be run as well.
if jq != '' {
- println('Testing value output of $valid_test_files.len valid TOML files using "$jq"...')
+ println('Testing value output of ${valid_test_files.len} valid TOML files using "${jq}"...')
if os.exists(compare_work_dir_root) {
os.rmdir_all(compare_work_dir_root)!
@@ -113,18 +113,18 @@ fn test_burnt_sushi_tomltest() {
if relative in valid_exceptions {
e++
idx := valid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID EXCEPTION [$idx/$valid_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
continue
}
if relative in valid_value_exceptions {
e++
idx := valid_value_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID VALUE EXCEPTION [$idx/$valid_value_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID VALUE EXCEPTION [${idx}/${valid_value_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
}
toml_doc := toml.parse_file(valid_test_file)!
@@ -139,13 +139,13 @@ fn test_burnt_sushi_tomltest() {
os.write_file(bs_toml_json_path, bs_json)!
- v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
+ v_normalized_json := run([jq, '-S', '-f "${jq_normalize_path}"', v_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)!
- panic(err.msg() + '\n$contents')
+ panic(err.msg() + '\n${contents}')
}
- bs_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', bs_toml_json_path]) or {
+ bs_normalized_json := run([jq, '-S', '-f "${jq_normalize_path}"', bs_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)!
- panic(err.msg() + '\n$contents')
+ panic(err.msg() + '\n${contents}')
}
assert bs_normalized_json == v_normalized_json
@@ -153,13 +153,13 @@ fn test_burnt_sushi_tomltest() {
valid++
}
}
- println('$valid/$valid_test_files.len TOML files were parsed correctly and value checked')
+ println('${valid}/${valid_test_files.len} TOML files were parsed correctly and value checked')
if valid_value_exceptions.len > 0 {
- println('TODO Skipped value checks of $valid_value_exceptions.len valid TOML files...')
+ println('TODO Skipped value checks of ${valid_value_exceptions.len} valid TOML files...')
}
invalid_test_files := os.walk_ext(os.join_path(test_root, 'invalid'), '.toml')
- println('Testing $invalid_test_files.len invalid TOML files...')
+ println('Testing ${invalid_test_files.len} invalid TOML files...')
mut invalid := 0
e = 0
for i, invalid_test_file in invalid_test_files {
@@ -170,27 +170,27 @@ fn test_burnt_sushi_tomltest() {
if relative in invalid_exceptions {
e++
idx := invalid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$invalid_test_files.len] "$invalid_test_file" INVALID EXCEPTION [$idx/$invalid_exceptions.len]...')
+ println('SKIP [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}" INVALID EXCEPTION [${idx}/${invalid_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$invalid_test_files.len] "$invalid_test_file"...')
+ println('OK [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}"...')
}
if toml_doc := toml.parse_file(invalid_test_file) {
content_that_should_have_failed := os.read_file(invalid_test_file)!
- println(' This TOML should have failed:\n${'-'.repeat(40)}\n$content_that_should_have_failed\n${'-'.repeat(40)}')
+ println(' This TOML should have failed:\n${'-'.repeat(40)}\n${content_that_should_have_failed}\n${'-'.repeat(40)}')
assert false
} else {
if !hide_oks {
- println(' $err.msg()')
+ println(' ${err.msg()}')
}
assert true
}
invalid++
}
- println('$invalid/$invalid_test_files.len TOML files were parsed correctly')
+ println('${invalid}/${invalid_test_files.len} TOML files were parsed correctly')
if invalid_exceptions.len > 0 {
- println('TODO Skipped parsing of $invalid_exceptions.len invalid TOML files...')
+ println('TODO Skipped parsing of ${invalid_exceptions.len} invalid TOML files...')
}
}
@@ -199,7 +199,7 @@ fn to_burntsushi(value ast.Value) string {
match value {
ast.Quoted {
json_text := json2.Any(value.text).json_str()
- return '{ "type": "string", "value": $json_text }'
+ return '{ "type": "string", "value": ${json_text} }'
}
ast.DateTime {
// Normalization for json
@@ -214,31 +214,31 @@ fn to_burntsushi(value ast.Value) string {
} else {
'datetime-local'
}
- return '{ "type": "$typ", "value": $json_text }'
+ return '{ "type": "${typ}", "value": ${json_text} }'
}
ast.Date {
json_text := json2.Any(value.text).json_str()
- return '{ "type": "date-local", "value": $json_text }'
+ return '{ "type": "date-local", "value": ${json_text} }'
}
ast.Time {
json_text := json2.Any(value.text).json_str()
- return '{ "type": "time-local", "value": $json_text }'
+ return '{ "type": "time-local", "value": ${json_text} }'
}
ast.Bool {
json_text := json2.Any(value.text.bool()).json_str()
- return '{ "type": "bool", "value": "$json_text" }'
+ return '{ "type": "bool", "value": "${json_text}" }'
}
ast.Null {
json_text := json2.Any(value.text).json_str()
- return '{ "type": "null", "value": $json_text }'
+ return '{ "type": "null", "value": ${json_text} }'
}
ast.Number {
if value.text.contains('inf') || value.text.contains('nan') {
- return '{ "type": "float", "value": "$value.text" }'
+ return '{ "type": "float", "value": "${value.text}" }'
}
if !value.text.starts_with('0x')
&& (value.text.contains('.') || value.text.to_lower().contains('e')) {
- mut val := '$value.f64()'.replace('.e+', '.0e') // JSON notation
+ mut val := '${value.f64()}'.replace('.e+', '.0e') // JSON notation
if !val.contains('.') && val != '0' { // JSON notation
val += '.0'
}
@@ -248,20 +248,20 @@ fn to_burntsushi(value ast.Value) string {
if val == '0.0' {
val = '0'
}
- return '{ "type": "float", "value": "$val" }'
+ return '{ "type": "float", "value": "${val}" }'
}
v := value.i64()
// TODO workaround https://github.com/vlang/v/issues/9507
if v == i64(-9223372036854775807 - 1) {
return '{ "type": "integer", "value": "-9223372036854775808" }'
}
- return '{ "type": "integer", "value": "$v" }'
+ return '{ "type": "integer", "value": "${v}" }'
}
map[string]ast.Value {
mut str := '{ '
for key, val in value {
json_key := json2.Any(key).json_str()
- str += ' $json_key: ${to_burntsushi(val)},'
+ str += ' ${json_key}: ${to_burntsushi(val)},'
}
str = str.trim_right(',')
str += ' }'
diff --git a/vlib/toml/tests/encode_and_decode_test.v b/vlib/toml/tests/encode_and_decode_test.v
index 161f3ae766..6d44474b6c 100644
--- a/vlib/toml/tests/encode_and_decode_test.v
+++ b/vlib/toml/tests/encode_and_decode_test.v
@@ -37,7 +37,7 @@ fn (mut e Employee) from_toml(any toml.Any) {
fn test_encode_and_decode() {
x := Employee{'Peter', 28, 95000.5, true, .worker}
s := toml.encode(x)
- eprintln('Employee x: $s')
+ eprintln('Employee x: ${s}')
assert s == r'name = "Peter"
age = 28
salary = 95000.5
@@ -49,7 +49,7 @@ title = 2'
assert false
return
}
- eprintln('Employee y: $y')
+ eprintln('Employee y: ${y}')
assert y.name == 'Peter'
assert y.age == 28
assert y.salary == 95000.5
diff --git a/vlib/toml/tests/iarna.toml-spec-tests_test.v b/vlib/toml/tests/iarna.toml-spec-tests_test.v
index b06e92cf19..bed5e3ce27 100644
--- a/vlib/toml/tests/iarna.toml-spec-tests_test.v
+++ b/vlib/toml/tests/iarna.toml-spec-tests_test.v
@@ -63,7 +63,7 @@ normalize'
fn run(args []string) ?string {
res := os.execute(args.join(' '))
if res.exit_code != 0 {
- return error('${args[0]} failed with return code ${res.exit_code}.\n$res.output')
+ return error('${args[0]} failed with return code ${res.exit_code}.\n${res.output}')
}
return res.output
}
@@ -74,7 +74,7 @@ fn test_iarna_toml_spec_tests() {
test_root := os.join_path(os.dir(this_file), 'testdata', 'iarna', 'toml-test')
if os.is_dir(test_root) {
valid_test_files := os.walk_ext(os.join_path(test_root, 'values'), '.toml')
- println('Testing $valid_test_files.len valid TOML files...')
+ println('Testing ${valid_test_files.len} valid TOML files...')
mut valid := 0
mut e := 0
for i, valid_test_file in valid_test_files {
@@ -85,31 +85,31 @@ fn test_iarna_toml_spec_tests() {
if !do_large_files && valid_test_file.contains('qa-') {
e++
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" LARGE FILE...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" LARGE FILE...')
continue
}
if relative in valid_exceptions {
e++
idx := valid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID EXCEPTION [$idx/$valid_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
}
toml_doc := toml.parse_file(valid_test_file)?
valid++
}
- println('$valid/$valid_test_files.len TOML files were parsed correctly')
+ println('${valid}/${valid_test_files.len} TOML files were parsed correctly')
if valid_exceptions.len > 0 {
- println('TODO Skipped parsing of $e valid TOML files...')
+ println('TODO Skipped parsing of ${e} valid TOML files...')
}
// If the command-line tool `jq` is installed, value tests can be run as well.
if jq != '' {
- println('Testing value output of $valid_test_files.len valid TOML files using "$jq"...')
+ println('Testing value output of ${valid_test_files.len} valid TOML files using "${jq}"...')
if os.exists(compare_work_dir_root) {
os.rmdir_all(compare_work_dir_root)?
@@ -131,14 +131,14 @@ fn test_iarna_toml_spec_tests() {
if relative in valid_exceptions {
e++
idx := valid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID EXCEPTION [$idx/$valid_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
continue
}
if relative in valid_value_exceptions {
e++
idx := valid_value_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" VALID VALUE EXCEPTION [$idx/$valid_value_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID VALUE EXCEPTION [${idx}/${valid_value_exceptions.len}]...')
continue
}
@@ -151,23 +151,23 @@ fn test_iarna_toml_spec_tests() {
mut converted_json_path := ''
if !uses_json_format {
$if windows {
- println('N/A [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('N/A [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
continue
}
if python == '' {
- println('N/A [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('N/A [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
continue
}
if !do_yaml_conversion || relative in yaml_value_exceptions {
e++
idx := yaml_value_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" YAML VALUE EXCEPTION [$idx/$valid_value_exceptions.len]...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" YAML VALUE EXCEPTION [${idx}/${valid_value_exceptions.len}]...')
continue
}
if !do_large_files && valid_test_file.contains('qa-') {
e++
- println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" LARGE FILE...')
+ println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" LARGE FILE...')
continue
}
@@ -183,7 +183,7 @@ fn test_iarna_toml_spec_tests() {
// Uncomment this print to see/check them.
// eprintln(err.msg() + '\n$contents')
e++
- println('ERR [${i + 1}/$valid_test_files.len] "$valid_test_file" EXCEPTION [$e/$valid_value_exceptions.len]...')
+ println('ERR [${i + 1}/${valid_test_files.len}] "${valid_test_file}" EXCEPTION [${e}/${valid_value_exceptions.len}]...')
continue
}
converted_from_yaml = true
@@ -191,7 +191,7 @@ fn test_iarna_toml_spec_tests() {
}
if !hide_oks {
- println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
+ println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
}
toml_doc := toml.parse_file(valid_test_file)?
@@ -206,28 +206,28 @@ fn test_iarna_toml_spec_tests() {
iarna_json := os.read_file(converted_json_path)?
os.write_file(iarna_toml_json_path, iarna_json)?
- v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
+ v_normalized_json := run([jq, '-S', '-f "${jq_normalize_path}"', v_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)?
- panic(err.msg() + '\n$contents')
+ panic(err.msg() + '\n${contents}')
}
- cmd := [jq, '-S', '-f "$jq_normalize_path"', iarna_toml_json_path]
+ cmd := [jq, '-S', '-f "${jq_normalize_path}"', iarna_toml_json_path]
iarna_normalized_json := run(cmd) or {
contents := os.read_file(v_toml_json_path)?
- panic(err.msg() + '\n$contents\n\ncmd: ${cmd.join(' ')}')
+ panic(err.msg() + '\n${contents}\n\ncmd: ${cmd.join(' ')}')
}
assert iarna_normalized_json == v_normalized_json
valid++
}
- println('$valid/$valid_test_files.len TOML files were parsed correctly and value checked')
+ println('${valid}/${valid_test_files.len} TOML files were parsed correctly and value checked')
if valid_value_exceptions.len > 0 {
- println('TODO Skipped value checks of $e valid TOML files...')
+ println('TODO Skipped value checks of ${e} valid TOML files...')
}
}
invalid_test_files := os.walk_ext(os.join_path(test_root, 'errors'), '.toml')
- println('Testing $invalid_test_files.len invalid TOML files...')
+ println('Testing ${invalid_test_files.len} invalid TOML files...')
mut invalid := 0
e = 0
for i, invalid_test_file in invalid_test_files {
@@ -238,30 +238,30 @@ fn test_iarna_toml_spec_tests() {
if relative in invalid_exceptions {
e++
idx := invalid_exceptions.index(relative) + 1
- println('SKIP [${i + 1}/$invalid_test_files.len] "$invalid_test_file" INVALID EXCEPTION [$idx/$invalid_exceptions.len]...')
+ println('SKIP [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}" INVALID EXCEPTION [${idx}/${invalid_exceptions.len}]...')
continue
}
if !hide_oks {
- println('OK [${i + 1}/$invalid_test_files.len] "$invalid_test_file"...')
+ println('OK [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}"...')
}
if toml_doc := toml.parse_file(invalid_test_file) {
content_that_should_have_failed := os.read_file(invalid_test_file)?
- println(' This TOML should have failed:\n${'-'.repeat(40)}\n$content_that_should_have_failed\n${'-'.repeat(40)}')
+ println(' This TOML should have failed:\n${'-'.repeat(40)}\n${content_that_should_have_failed}\n${'-'.repeat(40)}')
assert false
} else {
if !hide_oks {
- println(' $err.msg()')
+ println(' ${err.msg()}')
}
assert true
}
invalid++
}
- println('$invalid/$invalid_test_files.len TOML files were parsed correctly')
+ println('${invalid}/${invalid_test_files.len} TOML files were parsed correctly')
if invalid_exceptions.len > 0 {
- println('TODO Skipped parsing of $invalid_exceptions.len invalid TOML files...')
+ println('TODO Skipped parsing of ${invalid_exceptions.len} invalid TOML files...')
}
} else {
- println('No test data directory found in "$test_root"')
+ println('No test data directory found in "${test_root}"')
assert true
}
}
@@ -290,7 +290,7 @@ fn to_iarna(value ast.Value, skip_value_map bool) string {
if skip_value_map {
return json_text
}
- return '{ "type": "string", "value": $json_text }'
+ return '{ "type": "string", "value": ${json_text} }'
}
ast.DateTime {
// Normalization for json
@@ -310,14 +310,14 @@ fn to_iarna(value ast.Value, skip_value_map bool) string {
if skip_value_map {
return json_text
}
- return '{ "type": "$typ", "value": "$json_text" }'
+ return '{ "type": "${typ}", "value": "${json_text}" }'
}
ast.Date {
json_text := json2.Any(value.text).json_str()
if skip_value_map {
return json_text
}
- return '{ "type": "date", "value": $json_text }'
+ return '{ "type": "date", "value": ${json_text} }'
}
ast.Time {
mut json_text := json2.Any(value.text).json_str()
@@ -326,47 +326,47 @@ fn to_iarna(value ast.Value, skip_value_map bool) string {
if skip_value_map {
return json_text
}
- return '{ "type": "time", "value": "$json_text" }'
+ return '{ "type": "time", "value": "${json_text}" }'
}
ast.Bool {
json_text := json2.Any(value.text.bool()).json_str()
if skip_value_map {
return json_text
}
- return '{ "type": "bool", "value": "$json_text" }'
+ return '{ "type": "bool", "value": "${json_text}" }'
}
ast.Null {
json_text := json2.Any(value.text).json_str()
if skip_value_map {
return json_text
}
- return '{ "type": "null", "value": $json_text }'
+ return '{ "type": "null", "value": ${json_text} }'
}
ast.Number {
if value.text.contains('inf') {
mut json_text := value.text.replace('inf', '1.7976931348623157e+308') // Inconsistency ???
if skip_value_map {
- return '$json_text'
+ return '${json_text}'
}
- return '{ "type": "float", "value": "$json_text" }'
+ return '{ "type": "float", "value": "${json_text}" }'
}
if value.text.contains('nan') {
mut json_text := 'null'
if skip_value_map {
- return '$json_text'
+ return '${json_text}'
}
- return '{ "type": "float", "value": "$json_text" }'
+ return '{ "type": "float", "value": "${json_text}" }'
}
if !value.text.starts_with('0x')
&& (value.text.contains('.') || value.text.to_lower().contains('e')) {
- mut val := '$value.f64()'.replace('.e+', '.0e') // json notation
+ mut val := '${value.f64()}'.replace('.e+', '.0e') // json notation
if !val.contains('.') && val != '0' { // json notation
val += '.0'
}
if skip_value_map {
- return '$val'
+ return '${val}'
}
- return '{ "type": "float", "value": "$val" }'
+ return '{ "type": "float", "value": "${val}" }'
}
v := value.i64()
// TODO workaround https://github.com/vlang/v/issues/9507
@@ -377,15 +377,15 @@ fn to_iarna(value ast.Value, skip_value_map bool) string {
return '{ "type": "integer", "value": "-9223372036854775808" }'
}
if skip_value_map {
- return '$v'
+ return '${v}'
}
- return '{ "type": "integer", "value": "$v" }'
+ return '{ "type": "integer", "value": "${v}" }'
}
map[string]ast.Value {
mut str := '{ '
for key, val in value {
json_key := json2.Any(key).json_str()
- str += ' $json_key: ${to_iarna(val, skip_value_map)},'
+ str += ' ${json_key}: ${to_iarna(val, skip_value_map)},'
}
str = str.trim_right(',')
str += ' }'
diff --git a/vlib/toml/tests/key_test.v b/vlib/toml/tests/key_test.v
index ab82eb7a5f..677a2d8513 100644
--- a/vlib/toml/tests/key_test.v
+++ b/vlib/toml/tests/key_test.v
@@ -3,7 +3,7 @@ import toml
import toml.to
fn path_by_extension(ext string) string {
- return os.join_path(os.dir(@VEXE), 'vlib/toml/tests/testdata/key_test.$ext')
+ return os.join_path(os.dir(@VEXE), 'vlib/toml/tests/testdata/key_test.${ext}')
}
fn test_keys() {
diff --git a/vlib/toml/tests/large_toml_file_test.v b/vlib/toml/tests/large_toml_file_test.v
index 3bb4c30440..854b11fdac 100644
--- a/vlib/toml/tests/large_toml_file_test.v
+++ b/vlib/toml/tests/large_toml_file_test.v
@@ -14,8 +14,8 @@ fn test_large_file() {
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
'.toml'
if os.exists(toml_file) {
- println('Testing parsing of large (${os.file_size(toml_file)} bytes) "$toml_file"...')
+ println('Testing parsing of large (${os.file_size(toml_file)} bytes) "${toml_file}"...')
toml_doc := toml.parse_file(toml_file) or { panic(err) }
- println('OK [1/1] "$toml_file"...') // So it can be checked with `v -stats test ...`
+ println('OK [1/1] "${toml_file}"...') // So it can be checked with `v -stats test ...`
}
}
diff --git a/vlib/toml/tests/toml_bom_test.v b/vlib/toml/tests/toml_bom_test.v
index 9ccf1fc614..e47f8e88cb 100644
--- a/vlib/toml/tests/toml_bom_test.v
+++ b/vlib/toml/tests/toml_bom_test.v
@@ -37,13 +37,13 @@ fn test_toml_with_bom() {
// Re-cycle bad_toml_doc
mut bad_toml_doc := empty_toml_document
bad_toml_doc = toml.parse_text(toml_text_with_utf16_bom) or {
- println(' $err.msg()')
+ println(' ${err.msg()}')
assert true
empty_toml_document
}
bad_toml_doc = toml.parse_text(toml_text_with_utf32_bom) or {
- println(' $err.msg()')
+ println(' ${err.msg()}')
assert true
empty_toml_document
}
diff --git a/vlib/toml/to/to.v b/vlib/toml/to/to.v
index c06f6a3971..e144c56dc4 100644
--- a/vlib/toml/to/to.v
+++ b/vlib/toml/to/to.v
@@ -60,7 +60,7 @@ fn any_to_json(a toml.Any) string {
mut str := '{'
for key, val in a {
json_key := json2.Any(key)
- str += ' $json_key.json_str(): ${any_to_json(val)},'
+ str += ' ${json_key.json_str()}: ${any_to_json(val)},'
}
str = str.trim_right(',')
str += ' }'
diff --git a/vlib/toml/toml.v b/vlib/toml/toml.v
index 5db03511b7..0a8432410a 100644
--- a/vlib/toml/toml.v
+++ b/vlib/toml/toml.v
@@ -164,7 +164,7 @@ pub fn parse_dotted_key(key string) ![]string {
}
if in_string {
return error(@FN +
- ': could not parse key, missing closing string delimiter `$delim.ascii_str()`')
+ ': could not parse key, missing closing string delimiter `${delim.ascii_str()}`')
}
return out
}
diff --git a/vlib/v/ast/ast.v b/vlib/v/ast/ast.v
index fb074c9591..f290a571bc 100644
--- a/vlib/v/ast/ast.v
+++ b/vlib/v/ast/ast.v
@@ -2157,7 +2157,7 @@ pub fn all_registers(mut t Table, arch pref.Arch) map[string]ScopeObject {
hash_index := name.index('#') or {
panic('all_registers: no hashtag found')
}
- assembled_name := '${name[..hash_index]}$i${name[hash_index + 1..]}'
+ assembled_name := '${name[..hash_index]}${i}${name[hash_index + 1..]}'
res[assembled_name] = AsmRegister{
name: assembled_name
typ: t.bitsize_to_type(bit_size)
@@ -2216,7 +2216,7 @@ fn gen_all_registers(mut t Table, without_numbers []string, with_numbers map[str
for name, max_num in with_numbers {
for i in 0 .. max_num {
hash_index := name.index('#') or { panic('all_registers: no hashtag found') }
- assembled_name := '${name[..hash_index]}$i${name[hash_index + 1..]}'
+ assembled_name := '${name[..hash_index]}${i}${name[hash_index + 1..]}'
res[assembled_name] = AsmRegister{
name: assembled_name
typ: t.bitsize_to_type(bit_size)
diff --git a/vlib/v/ast/attr.v b/vlib/v/ast/attr.v
index 24f72afded..22071c5269 100644
--- a/vlib/v/ast/attr.v
+++ b/vlib/v/ast/attr.v
@@ -30,22 +30,22 @@ pub mut:
}
pub fn (a Attr) debug() string {
- return 'Attr{ name: "$a.name", has_arg: $a.has_arg, arg: "$a.arg", kind: $a.kind, ct_expr: $a.ct_expr, ct_opt: $a.ct_opt, ct_skip: $a.ct_skip}'
+ return 'Attr{ name: "${a.name}", has_arg: ${a.has_arg}, arg: "${a.arg}", kind: ${a.kind}, ct_expr: ${a.ct_expr}, ct_opt: ${a.ct_opt}, ct_skip: ${a.ct_skip}}'
}
// str returns the string representation without square brackets
pub fn (a Attr) str() string {
mut s := ''
mut arg := if a.has_arg {
- s += '$a.name: '
+ s += '${a.name}: '
a.arg
} else {
a.name
}
s += match a.kind {
.plain, .number, .bool { arg }
- .string { "'$arg'" }
- .comptime_define { 'if $arg' }
+ .string { "'${arg}'" }
+ .comptime_define { 'if ${arg}' }
}
return s
}
diff --git a/vlib/v/ast/cflags.v b/vlib/v/ast/cflags.v
index 559325bb26..a1f7ba6b27 100644
--- a/vlib/v/ast/cflags.v
+++ b/vlib/v/ast/cflags.v
@@ -71,7 +71,7 @@ pub fn (mut t Table) parse_cflag(cflg string, mod string, ctimedefines []string)
}
if (name in ['-I', '-l', '-L']) && value == '' {
hint := if name == '-l' { 'library name' } else { 'path' }
- return error('bad #flag `$flag_orig`: missing $hint after `$name`')
+ return error('bad #flag `${flag_orig}`: missing ${hint} after `${name}`')
}
cf := cflag.CFlag{
mod: mod
diff --git a/vlib/v/ast/embed_file.v b/vlib/v/ast/embed_file.v
index 6096659220..1e29745b27 100644
--- a/vlib/v/ast/embed_file.v
+++ b/vlib/v/ast/embed_file.v
@@ -6,5 +6,5 @@ module ast
import hash.fnv1a
pub fn (e EmbeddedFile) hash() u64 {
- return fnv1a.sum64_string('$e.apath, $e.compression_type, $e.is_compressed, $e.len')
+ return fnv1a.sum64_string('${e.apath}, ${e.compression_type}, ${e.is_compressed}, ${e.len}')
}
diff --git a/vlib/v/ast/scope.v b/vlib/v/ast/scope.v
index 3722a7275d..317b3ea4ca 100644
--- a/vlib/v/ast/scope.v
+++ b/vlib/v/ast/scope.v
@@ -190,16 +190,16 @@ pub fn (sc Scope) show(depth int, max_depth int) string {
for _ in 0 .. depth * 4 {
indent += ' '
}
- out += '$indent# $sc.start_pos - $sc.end_pos\n'
+ out += '${indent}# ${sc.start_pos} - ${sc.end_pos}\n'
for _, obj in sc.objects {
match obj {
- ConstField { out += '$indent * const: $obj.name - $obj.typ\n' }
- Var { out += '$indent * var: $obj.name - $obj.typ\n' }
+ ConstField { out += '${indent} * const: ${obj.name} - ${obj.typ}\n' }
+ Var { out += '${indent} * var: ${obj.name} - ${obj.typ}\n' }
else {}
}
}
for _, field in sc.struct_fields {
- out += '$indent * struct_field: $field.struct_type $field.name - $field.typ\n'
+ out += '${indent} * struct_field: ${field.struct_type} ${field.name} - ${field.typ}\n'
}
if max_depth == 0 || depth < max_depth - 1 {
for i, _ in sc.children {
diff --git a/vlib/v/ast/str.v b/vlib/v/ast/str.v
index 9dd9c9ee33..5cf4fd4f71 100644
--- a/vlib/v/ast/str.v
+++ b/vlib/v/ast/str.v
@@ -21,21 +21,21 @@ pub fn (node &FnDecl) modname() string {
// it is used in table.used_fns and v.markused.
pub fn (node &FnDecl) fkey() string {
if node.is_method {
- return '${int(node.receiver.typ)}.$node.name'
+ return '${int(node.receiver.typ)}.${node.name}'
}
return node.name
}
pub fn (node &Fn) fkey() string {
if node.is_method {
- return '${int(node.receiver_type)}.$node.name'
+ return '${int(node.receiver_type)}.${node.name}'
}
return node.name
}
pub fn (node &CallExpr) fkey() string {
if node.is_method {
- return '${int(node.receiver_type)}.$node.name'
+ return '${int(node.receiver_type)}.${node.name}'
}
return node.name
}
@@ -142,9 +142,9 @@ fn stringify_fn_after_name(node &FnDecl, mut f strings.Builder, t &Table, cur_mo
f.write_string(' struct {')
struct_ := arg_sym.info as Struct
for field in struct_.fields {
- f.write_string(' $field.name ${t.type_to_str(field.typ)}')
+ f.write_string(' ${field.name} ${t.type_to_str(field.typ)}')
if field.has_default_expr {
- f.write_string(' = $field.default_expr')
+ f.write_string(' = ${field.default_expr}')
}
}
if struct_.fields.len > 0 {
@@ -253,27 +253,6 @@ pub fn (lit &StringInterLiteral) get_fspec_braces(i int) (string, bool) {
}
break
}
- CallExpr {
- if sub_expr.args.len != 0 || sub_expr.concrete_types.len != 0
- || sub_expr.or_block.kind in [.propagate_option, .propagate_result]
- || sub_expr.or_block.stmts.len > 0 {
- needs_braces = true
- } else if sub_expr.left is CallExpr {
- sub_expr = sub_expr.left
- continue
- } else if sub_expr.left is CastExpr || sub_expr.left is IndexExpr {
- needs_braces = true
- }
- break
- }
- SelectorExpr {
- if sub_expr.field_name[0] == `@` {
- needs_braces = true
- break
- }
- sub_expr = sub_expr.expr
- continue
- }
else {
needs_braces = true
break
@@ -312,18 +291,18 @@ pub fn (x Expr) str() string {
return x.str()
}
DumpExpr {
- return 'dump($x.expr.str())'
+ return 'dump(${x.expr.str()})'
}
ArrayInit {
mut fields := []string{}
if x.has_len {
- fields << 'len: $x.len_expr.str()'
+ fields << 'len: ${x.len_expr.str()}'
}
if x.has_cap {
- fields << 'cap: $x.cap_expr.str()'
+ fields << 'cap: ${x.cap_expr.str()}'
}
if x.has_default {
- fields << 'init: $x.default_expr.str()'
+ fields << 'init: ${x.default_expr.str()}'
}
if fields.len > 0 {
return '[]T{${fields.join(', ')}}'
@@ -332,10 +311,10 @@ pub fn (x Expr) str() string {
}
}
AsCast {
- return '$x.expr.str() as ${global_table.type_to_str(x.typ)}'
+ return '${x.expr.str()} as ${global_table.type_to_str(x.typ)}'
}
AtExpr {
- return '$x.val'
+ return '${x.val}'
}
CTempVar {
return x.orig.str()
@@ -344,7 +323,7 @@ pub fn (x Expr) str() string {
return x.val.str()
}
CastExpr {
- return '${x.typname}($x.expr.str())'
+ return '${x.typname}(${x.expr.str()})'
}
CallExpr {
sargs := args2str(x.args)
@@ -356,45 +335,45 @@ pub fn (x Expr) str() string {
''
}
if x.is_method {
- return '${x.left.str()}.${x.name}($sargs)$propagate_suffix'
+ return '${x.left.str()}.${x.name}(${sargs})${propagate_suffix}'
}
if x.name.starts_with('${x.mod}.') {
- return util.strip_main_name('${x.name}($sargs)$propagate_suffix')
+ return util.strip_main_name('${x.name}(${sargs})${propagate_suffix}')
}
if x.mod == '' && x.name == '' {
- return x.left.str() + '($sargs)$propagate_suffix'
+ return x.left.str() + '(${sargs})${propagate_suffix}'
}
if x.name.contains('.') {
- return '${x.name}($sargs)$propagate_suffix'
+ return '${x.name}(${sargs})${propagate_suffix}'
}
- return '${x.mod}.${x.name}($sargs)$propagate_suffix'
+ return '${x.mod}.${x.name}(${sargs})${propagate_suffix}'
}
CharLiteral {
- return '`$x.val`'
+ return '`${x.val}`'
}
Comment {
if x.is_multi {
lines := x.text.split_into_lines()
- return '/* $lines.len lines comment */'
+ return '/* ${lines.len} lines comment */'
} else {
text := x.text.trim('\x01').trim_space()
- return '´// $text´'
+ return '´// ${text}´'
}
}
ComptimeSelector {
- return '${x.left}.$$x.field_expr'
+ return '${x.left}.$${x.field_expr}'
}
ConcatExpr {
return x.vals.map(it.str()).join(',')
}
EnumVal {
- return '.$x.val'
+ return '.${x.val}'
}
FloatLiteral, IntegerLiteral {
return x.val.clone()
}
GoExpr {
- return 'go $x.call_expr'
+ return 'go ${x.call_expr}'
}
Ident {
return x.name.clone()
@@ -419,16 +398,16 @@ pub fn (x Expr) str() string {
return parts.join('')
}
IndexExpr {
- return '$x.left.str()[$x.index.str()]'
+ return '${x.left.str()}[${x.index.str()}]'
}
InfixExpr {
- return '$x.left.str() $x.op.str() $x.right.str()'
+ return '${x.left.str()} ${x.op.str()} ${x.right.str()}'
}
MapInit {
mut pairs := []string{}
for ik, kv in x.keys {
mv := x.vals[ik].str()
- pairs << '$kv: $mv'
+ pairs << '${kv}: ${mv}'
}
return 'map{ ${pairs.join(' ')} }'
}
@@ -436,13 +415,13 @@ pub fn (x Expr) str() string {
return 'nil'
}
ParExpr {
- return '($x.expr)'
+ return '(${x.expr})'
}
PostfixExpr {
if x.op == .question {
- return '$x.expr ?'
+ return '${x.expr} ?'
}
- return '$x.expr$x.op'
+ return '${x.expr}${x.op}'
}
PrefixExpr {
return x.op.str() + x.right.str()
@@ -450,10 +429,10 @@ pub fn (x Expr) str() string {
RangeExpr {
mut s := '..'
if x.has_low {
- s = '$x.low ' + s
+ s = '${x.low} ' + s
}
if x.has_high {
- s = s + ' $x.high'
+ s = s + ' ${x.high}'
}
return s
}
@@ -461,16 +440,16 @@ pub fn (x Expr) str() string {
return 'ast.SelectExpr'
}
SelectorExpr {
- return '${x.expr.str()}.$x.field_name'
+ return '${x.expr.str()}.${x.field_name}'
}
SizeOf {
if x.is_type {
return 'sizeof(${global_table.type_to_str(x.typ)})'
}
- return 'sizeof($x.expr)'
+ return 'sizeof(${x.expr})'
}
OffsetOf {
- return '__offsetof(${global_table.type_to_str(x.struct_type)}, $x.field)'
+ return '__offsetof(${global_table.type_to_str(x.struct_type)}, ${x.field})'
}
StringInterLiteral {
mut res := strings.new_builder(50)
@@ -495,19 +474,19 @@ pub fn (x Expr) str() string {
return res.str()
}
StringLiteral {
- return "'$x.val'"
+ return "'${x.val}'"
}
TypeNode {
- return 'TypeNode($x.typ)'
+ return 'TypeNode(${x.typ})'
}
TypeOf {
- return 'typeof($x.expr.str())'
+ return 'typeof(${x.expr.str()})'
}
Likely {
- return '_likely_($x.expr.str())'
+ return '_likely_(${x.expr.str()})'
}
UnsafeExpr {
- return 'unsafe { $x.expr }'
+ return 'unsafe { ${x.expr} }'
}
None {
return 'none'
@@ -531,7 +510,7 @@ pub fn (x Expr) str() string {
}
StructInit {
sname := global_table.sym(x.typ).name
- return '$sname{....}'
+ return '${sname}{....}'
}
ArrayDecompose {
return 'ast.ArrayDecompose'
@@ -564,14 +543,14 @@ pub fn (x Expr) str() string {
return 'ast.SqlExpr'
}
}
- return '[unhandled expr type $x.type_name()]'
+ return '[unhandled expr type ${x.type_name()}]'
}
pub fn (a CallArg) str() string {
if a.is_mut {
- return 'mut $a.expr.str()'
+ return 'mut ${a.expr.str()}'
}
- return '$a.expr.str()'
+ return '${a.expr.str()}'
}
pub fn args2str(args []CallArg) string {
@@ -583,9 +562,9 @@ pub fn args2str(args []CallArg) string {
}
pub fn (node &BranchStmt) str() string {
- mut s := '$node.kind'
+ mut s := '${node.kind}'
if node.label.len > 0 {
- s += ' $node.label'
+ s += ' ${node.label}'
}
return s
}
@@ -593,7 +572,7 @@ pub fn (node &BranchStmt) str() string {
pub fn (node Stmt) str() string {
match node {
AssertStmt {
- return 'assert $node.expr'
+ return 'assert ${node.expr}'
}
AssignStmt {
mut out := ''
@@ -609,7 +588,7 @@ pub fn (node Stmt) str() string {
out += ','
}
}
- out += ' $node.op.str() '
+ out += ' ${node.op.str()} '
for i, val in node.right {
out += val.str()
if i < node.right.len - 1 {
@@ -629,31 +608,31 @@ pub fn (node Stmt) str() string {
return node.expr.str()
}
FnDecl {
- return 'fn ${node.name}( $node.params.len params ) { $node.stmts.len stmts }'
+ return 'fn ${node.name}( ${node.params.len} params ) { ${node.stmts.len} stmts }'
}
EnumDecl {
- return 'enum $node.name { $node.fields.len fields }'
+ return 'enum ${node.name} { ${node.fields.len} fields }'
}
ForStmt {
if node.is_inf {
return 'for {'
}
- return 'for $node.cond {'
+ return 'for ${node.cond} {'
}
Module {
- return 'module $node.name'
+ return 'module ${node.name}'
}
Import {
- mut out := 'import $node.mod'
+ mut out := 'import ${node.mod}'
if node.alias.len > 0 {
- out += ' as $node.alias'
+ out += ' as ${node.alias}'
}
return out
}
Return {
mut out := 'return'
for i, val in node.exprs {
- out += ' $val'
+ out += ' ${val}'
if i < node.exprs.len - 1 {
out += ','
}
@@ -661,17 +640,17 @@ pub fn (node Stmt) str() string {
return out
}
StructDecl {
- return 'struct $node.name { $node.fields.len fields }'
+ return 'struct ${node.name} { ${node.fields.len} fields }'
}
else {
- return '[unhandled stmt str type: $node.type_name() ]'
+ return '[unhandled stmt str type: ${node.type_name()} ]'
}
}
}
fn field_to_string(f ConstField) string {
x := f.name.trim_string_left(f.mod + '.')
- return '$x = $f.expr'
+ return '${x} = ${f.expr}'
}
pub fn (e ComptimeForKind) str() string {
diff --git a/vlib/v/ast/table.v b/vlib/v/ast/table.v
index 716c943525..26cf87bfe2 100644
--- a/vlib/v/ast/table.v
+++ b/vlib/v/ast/table.v
@@ -232,7 +232,7 @@ pub fn (t &Table) fn_type_signature(f &Fn) string {
opt := if f.return_type.has_flag(.optional) { 'option_' } else { '' }
res := if f.return_type.has_flag(.result) { 'result_' } else { '' }
- sig += '__$opt$res$sym.cname'
+ sig += '__${opt}${res}${sym.cname}'
}
return sig
}
@@ -246,7 +246,7 @@ pub fn (t &Table) fn_type_source_signature(f &Fn) string {
}
// Note: arg name is only added for fmt, else it would causes errors with generics
if t.is_fmt && arg.name.len > 0 {
- sig += '$arg.name '
+ sig += '${arg.name} '
}
arg_type_sym := t.sym(arg.typ)
sig += arg_type_sym.name
@@ -262,11 +262,11 @@ pub fn (t &Table) fn_type_source_signature(f &Fn) string {
} else if f.return_type != void_type {
return_type_sym := t.sym(f.return_type)
if f.return_type.has_flag(.optional) {
- sig += ' ?$return_type_sym.name'
+ sig += ' ?${return_type_sym.name}'
} else if f.return_type.has_flag(.result) {
- sig += ' !$return_type_sym.name'
+ sig += ' !${return_type_sym.name}'
} else {
- sig += ' $return_type_sym.name'
+ sig += ' ${return_type_sym.name}'
}
}
return sig
@@ -275,10 +275,10 @@ pub fn (t &Table) fn_type_source_signature(f &Fn) string {
pub fn (t &Table) is_same_method(f &Fn, func &Fn) string {
if f.return_type != func.return_type {
s := t.type_to_str(f.return_type)
- return 'expected return type `$s`'
+ return 'expected return type `${s}`'
}
if f.params.len != func.params.len {
- return 'expected $f.params.len parameter(s), not $func.params.len'
+ return 'expected ${f.params.len} parameter(s), not ${func.params.len}'
}
// interface name() other mut name() : error
@@ -298,9 +298,9 @@ pub fn (t &Table) is_same_method(f &Fn, func &Fn) string {
exps := t.type_to_str(f.params[i].typ)
gots := t.type_to_str(func.params[i].typ)
if has_unexpected_type {
- return 'expected `$exps`, not `$gots` for parameter $i'
+ return 'expected `${exps}`, not `${gots}` for parameter ${i}'
} else {
- return 'expected `$exps` which is immutable, not `mut $gots`'
+ return 'expected `${exps}` which is immutable, not `mut ${gots}`'
}
}
}
@@ -339,7 +339,7 @@ pub fn (mut t TypeSymbol) register_method(new_fn Fn) int {
pub fn (t &Table) register_aggregate_method(mut sym TypeSymbol, name string) !Fn {
if sym.kind != .aggregate {
- t.panic('Unexpected type symbol: $sym.kind')
+ t.panic('Unexpected type symbol: ${sym.kind}')
}
agg_info := sym.info as Aggregate
// an aggregate always has at least 2 types
@@ -352,10 +352,10 @@ pub fn (t &Table) register_aggregate_method(mut sym TypeSymbol, name string) !Fn
found_once = true
new_fn = type_method
} else if !new_fn.method_equals(type_method) {
- return error('method `${t.type_to_str(typ)}.$name` signature is different')
+ return error('method `${t.type_to_str(typ)}.${name}` signature is different')
}
} else {
- return error('unknown method: `${t.type_to_str(typ)}.$name`')
+ return error('unknown method: `${t.type_to_str(typ)}.${name}`')
}
}
// register the method in the aggregate, so lookup is faster next time
@@ -387,7 +387,7 @@ pub fn (t &Table) find_method(s &TypeSymbol, name string) !Fn {
}
ts = t.type_symbols[ts.parent_idx]
}
- return error('unknown method `$name`')
+ return error('unknown method `${name}`')
}
[params]
@@ -433,7 +433,7 @@ pub fn (t &Table) find_method_from_embeds(sym &TypeSymbol, method_name string) !
if found_methods.len == 1 {
return found_methods[0], embed_of_found_methods
} else if found_methods.len > 1 {
- return error('ambiguous method `$method_name`')
+ return error('ambiguous method `${method_name}`')
}
} else if sym.info is Interface {
mut found_methods := []Fn{}
@@ -453,7 +453,7 @@ pub fn (t &Table) find_method_from_embeds(sym &TypeSymbol, method_name string) !
if found_methods.len == 1 {
return found_methods[0], embed_of_found_methods
} else if found_methods.len > 1 {
- return error('ambiguous method `$method_name`')
+ return error('ambiguous method `${method_name}`')
}
} else if sym.info is Aggregate {
for typ in sym.info.types {
@@ -492,7 +492,7 @@ pub fn (t &Table) get_embed_methods(sym &TypeSymbol) []Fn {
fn (t &Table) register_aggregate_field(mut sym TypeSymbol, name string) !StructField {
if sym.kind != .aggregate {
- t.panic('Unexpected type symbol: $sym.kind')
+ t.panic('Unexpected type symbol: ${sym.kind}')
}
mut agg_info := sym.info as Aggregate
// an aggregate always has at least 2 types
@@ -505,7 +505,7 @@ fn (t &Table) register_aggregate_field(mut sym TypeSymbol, name string) !StructF
found_once = true
new_field = type_field
} else if new_field.typ != type_field.typ {
- return error('field `${t.type_to_str(typ)}.$name` type is different')
+ return error('field `${t.type_to_str(typ)}.${name}` type is different')
}
new_field = StructField{
...new_field
@@ -513,7 +513,7 @@ fn (t &Table) register_aggregate_field(mut sym TypeSymbol, name string) !StructF
is_pub: new_field.is_pub && type_field.is_pub
}
} else {
- return error('type `${t.type_to_str(typ)}` has no field or method `$name`')
+ return error('type `${t.type_to_str(typ)}` has no field or method `${name}`')
}
}
agg_info.fields << new_field
@@ -568,7 +568,7 @@ pub fn (t &Table) find_field(s &TypeSymbol, name string) !StructField {
}
// mut info := ts.info as SumType
// TODO a more detailed error so that it's easier to fix?
- return error('field `$name` does not exist or have the same type in all sumtype variants')
+ return error('field `${name}` does not exist or have the same type in all sumtype variants')
}
else {}
}
@@ -600,7 +600,7 @@ pub fn (t &Table) find_field_from_embeds(sym &TypeSymbol, field_name string) !(S
if found_fields.len == 1 {
return found_fields[0], embeds_of_found_fields
} else if found_fields.len > 1 {
- return error('ambiguous field `$field_name`')
+ return error('ambiguous field `${field_name}`')
}
} else if sym.info is Aggregate {
for typ in sym.info.types {
@@ -713,7 +713,7 @@ pub fn (t &Table) sym(typ Type) &TypeSymbol {
return t.type_symbols[idx]
}
// this should never happen
- t.panic('sym: invalid type (typ=$typ idx=$idx). Compiler bug. This should never happen. Please report the bug using `v bug file.v`.
+ t.panic('sym: invalid type (typ=${typ} idx=${idx}). Compiler bug. This should never happen. Please report the bug using `v bug file.v`.
')
return ast.invalid_type_symbol
}
@@ -730,7 +730,7 @@ pub fn (t &Table) final_sym(typ Type) &TypeSymbol {
return t.type_symbols[idx]
}
// this should never happen
- t.panic('final_sym: invalid type (typ=$typ idx=$idx). Compiler bug. This should never happen. Please report the bug using `v bug file.v`.')
+ t.panic('final_sym: invalid type (typ=${typ} idx=${idx}). Compiler bug. This should never happen. Please report the bug using `v bug file.v`.')
return ast.invalid_type_symbol
}
@@ -765,7 +765,7 @@ pub fn (t &Table) unaliased_type(typ Type) Type {
fn (mut t Table) rewrite_already_registered_symbol(typ TypeSymbol, existing_idx int) int {
existing_symbol := t.type_symbols[existing_idx]
$if trace_rewrite_already_registered_symbol ? {
- eprintln('>> rewrite_already_registered_symbol sym: $typ.name | existing_idx: $existing_idx | existing_symbol: $existing_symbol.name')
+ eprintln('>> rewrite_already_registered_symbol sym: ${typ.name} | existing_idx: ${existing_idx} | existing_symbol: ${existing_symbol.name}')
}
if existing_symbol.kind == .placeholder {
// override placeholder
@@ -805,7 +805,7 @@ pub fn (mut t Table) register_sym(sym TypeSymbol) int {
mut idx := -2
$if trace_register_sym ? {
defer {
- eprintln('>> register_sym: ${sym.name:-60} | idx: $idx')
+ eprintln('>> register_sym: ${sym.name:-60} | idx: ${idx}')
}
}
mut existing_idx := t.type_idxs[sym.name]
@@ -889,7 +889,7 @@ pub fn (t &Table) array_name(elem_type Type) string {
ptr := if elem_type.is_ptr() { '&'.repeat(elem_type.nr_muls()) } else { '' }
opt := if elem_type.has_flag(.optional) { '?' } else { '' }
res := if elem_type.has_flag(.result) { '!' } else { '' }
- return '[]$opt$res$ptr$elem_type_sym.name'
+ return '[]${opt}${res}${ptr}${elem_type_sym.name}'
}
[inline]
@@ -900,9 +900,9 @@ pub fn (t &Table) array_cname(elem_type Type) string {
res := if elem_type.has_flag(.result) { '_result_' } else { '' }
if elem_type_sym.cname.contains('<') {
type_name := elem_type_sym.cname.replace_each(['<', '_T_', ', ', '_', '>', ''])
- return 'Array_$opt$res$type_name$suffix'
+ return 'Array_${opt}${res}${type_name}${suffix}'
} else {
- return 'Array_$opt$res$elem_type_sym.cname$suffix'
+ return 'Array_${opt}${res}${elem_type_sym.cname}${suffix}'
}
}
@@ -919,20 +919,20 @@ pub fn (t &Table) array_fixed_name(elem_type Type, size int, size_expr Expr) str
} else {
size_expr.str()
}
- return '[$size_str]$opt$res$ptr$elem_type_sym.name'
+ return '[${size_str}]${opt}${res}${ptr}${elem_type_sym.name}'
}
[inline]
pub fn (t &Table) array_fixed_cname(elem_type Type, size int) string {
elem_type_sym := t.sym(elem_type)
- suffix := if elem_type.is_ptr() { '_ptr$elem_type.nr_muls()' } else { '' }
+ suffix := if elem_type.is_ptr() { '_ptr${elem_type.nr_muls()}' } else { '' }
opt := if elem_type.has_flag(.optional) { '_option_' } else { '' }
res := if elem_type.has_flag(.result) { '_result_' } else { '' }
if elem_type_sym.cname.contains('<') {
type_name := elem_type_sym.cname.replace_each(['<', '_T_', ', ', '_', '>', ''])
- return 'Array_fixed_$opt$res$type_name${suffix}_$size'
+ return 'Array_fixed_${opt}${res}${type_name}${suffix}_${size}'
} else {
- return 'Array_fixed_$opt$res$elem_type_sym.cname${suffix}_$size'
+ return 'Array_fixed_${opt}${res}${elem_type_sym.cname}${suffix}_${size}'
}
}
@@ -945,7 +945,7 @@ pub fn (t &Table) chan_name(elem_type Type, is_mut bool) string {
} else if elem_type.is_ptr() {
ptr = '&'
}
- return 'chan $ptr$elem_type_sym.name'
+ return 'chan ${ptr}${elem_type_sym.name}'
}
[inline]
@@ -957,7 +957,7 @@ pub fn (t &Table) chan_cname(elem_type Type, is_mut bool) string {
} else if elem_type.is_ptr() {
suffix = '_ptr'
}
- return 'chan_$elem_type_sym.cname' + suffix
+ return 'chan_${elem_type_sym.cname}' + suffix
}
[inline]
@@ -967,7 +967,7 @@ pub fn (t &Table) promise_name(return_type Type) string {
}
return_type_sym := t.sym(return_type)
- return 'Promise<$return_type_sym.name, JS.Any>'
+ return 'Promise<${return_type_sym.name}, JS.Any>'
}
[inline]
@@ -995,7 +995,7 @@ pub fn (t &Table) thread_name(return_type Type) string {
ptr := if return_type.is_ptr() { '&' } else { '' }
opt := if return_type.has_flag(.optional) { '?' } else { '' }
res := if return_type.has_flag(.result) { '!' } else { '' }
- return 'thread $opt$res$ptr$return_type_sym.name'
+ return 'thread ${opt}${res}${ptr}${return_type_sym.name}'
}
[inline]
@@ -1013,7 +1013,7 @@ pub fn (t &Table) thread_cname(return_type Type) string {
suffix := if return_type.is_ptr() { '_ptr' } else { '' }
opt := if return_type.has_flag(.optional) { '_option_' } else { '' }
res := if return_type.has_flag(.result) { '_result_' } else { '' }
- return '__v_thread_$opt$res$return_type_sym.cname$suffix'
+ return '__v_thread_${opt}${res}${return_type_sym.cname}${suffix}'
}
// map_source_name generates the original name for the v source.
@@ -1025,7 +1025,7 @@ pub fn (t &Table) map_name(key_type Type, value_type Type) string {
ptr := if value_type.is_ptr() { '&'.repeat(value_type.nr_muls()) } else { '' }
opt := if value_type.has_flag(.optional) { '?' } else { '' }
res := if value_type.has_flag(.result) { '!' } else { '' }
- return 'map[$key_type_sym.name]$opt$res$ptr$value_type_sym.name'
+ return 'map[${key_type_sym.name}]${opt}${res}${ptr}${value_type_sym.name}'
}
[inline]
@@ -1037,9 +1037,9 @@ pub fn (t &Table) map_cname(key_type Type, value_type Type) string {
res := if value_type.has_flag(.result) { '_result_' } else { '' }
if value_type_sym.cname.contains('<') {
type_name := value_type_sym.cname.replace_each(['<', '_T_', ', ', '_', '>', ''])
- return 'Map_${key_type_sym.cname}_$opt$res$type_name$suffix'
+ return 'Map_${key_type_sym.cname}_${opt}${res}${type_name}${suffix}'
} else {
- return 'Map_${key_type_sym.cname}_$opt$res$value_type_sym.cname$suffix'
+ return 'Map_${key_type_sym.cname}_${opt}${res}${value_type_sym.cname}${suffix}'
}
}
@@ -1189,8 +1189,8 @@ pub fn (mut t Table) find_or_register_multi_return(mr_typs []Type) int {
for i, mr_typ in mr_typs {
mr_type_sym := t.sym(mktyp(mr_typ))
ref, cref := if mr_typ.is_ptr() { '&', 'ref_' } else { '', '' }
- name += '$ref$mr_type_sym.name'
- cname += '_$cref$mr_type_sym.cname'
+ name += '${ref}${mr_type_sym.name}'
+ cname += '_${cref}${mr_type_sym.cname}'
if i < mr_typs.len - 1 {
name += ', '
}
@@ -1438,7 +1438,7 @@ pub fn (mut t Table) complete_interface_check() {
}
if t.does_type_implement_interface(tk, idecl.typ) {
$if trace_types_implementing_each_interface ? {
- eprintln('>>> tsym.mod: $tsym.mod | tsym.name: $tsym.name | tk: $tk | idecl.name: $idecl.name | idecl.typ: $idecl.typ')
+ eprintln('>>> tsym.mod: ${tsym.mod} | tsym.name: ${tsym.name} | tk: ${tk} | idecl.name: ${idecl.name} | idecl.typ: ${idecl.typ}')
}
t.iface_types[idecl.name] << tk
}
@@ -1707,7 +1707,7 @@ pub fn (mut t Table) resolve_generic_to_concrete(generic_type Type, generic_name
}
Struct, Interface, SumType {
if sym.info.is_generic {
- mut nrt := '$sym.name<'
+ mut nrt := '${sym.name}<'
for i in 0 .. sym.info.generic_types.len {
if ct := t.resolve_generic_to_concrete(sym.info.generic_types[i],
generic_names, concrete_types)
@@ -1839,7 +1839,7 @@ pub fn (mut t Table) unwrap_generic_type(typ Type, generic_names []string, concr
if !ts.info.is_generic {
return typ
}
- nrt = '$ts.name<'
+ nrt = '${ts.name}<'
c_nrt = '${ts.cname}_T_'
for i in 0 .. ts.info.generic_types.len {
if ct := t.resolve_generic_to_concrete(ts.info.generic_types[i], generic_names,
@@ -2066,7 +2066,7 @@ pub fn (mut t Table) generic_insts_to_concrete() {
Struct {
mut parent_info := parent.info as Struct
if !parent_info.is_generic {
- util.verror('generic error', 'struct `$parent.name` is not a generic struct, cannot instantiate to the concrete types')
+ util.verror('generic error', 'struct `${parent.name}` is not a generic struct, cannot instantiate to the concrete types')
continue
}
mut fields := parent_info.fields.clone()
@@ -2106,13 +2106,13 @@ pub fn (mut t Table) generic_insts_to_concrete() {
}
}
} else {
- util.verror('generic error', 'the number of generic types of struct `$parent.name` is inconsistent with the concrete types')
+ util.verror('generic error', 'the number of generic types of struct `${parent.name}` is inconsistent with the concrete types')
}
}
Interface {
mut parent_info := parent.info as Interface
if !parent_info.is_generic {
- util.verror('generic error', 'interface `$parent.name` is not a generic interface, cannot instantiate to the concrete types')
+ util.verror('generic error', 'interface `${parent.name}` is not a generic interface, cannot instantiate to the concrete types')
continue
}
if parent_info.generic_types.len == info.concrete_types.len {
@@ -2163,13 +2163,13 @@ pub fn (mut t Table) generic_insts_to_concrete() {
sym.kind = parent.kind
sym.methods = all_methods
} else {
- util.verror('generic error', 'the number of generic types of interface `$parent.name` is inconsistent with the concrete types')
+ util.verror('generic error', 'the number of generic types of interface `${parent.name}` is inconsistent with the concrete types')
}
}
SumType {
mut parent_info := parent.info as SumType
if !parent_info.is_generic {
- util.verror('generic error', 'sumtype `$parent.name` is not a generic sumtype, cannot instantiate to the concrete types')
+ util.verror('generic error', 'sumtype `${parent.name}` is not a generic sumtype, cannot instantiate to the concrete types')
continue
}
if parent_info.generic_types.len == info.concrete_types.len {
@@ -2209,7 +2209,7 @@ pub fn (mut t Table) generic_insts_to_concrete() {
sym.is_pub = true
sym.kind = parent.kind
} else {
- util.verror('generic error', 'the number of generic types of sumtype `$parent.name` is inconsistent with the concrete types')
+ util.verror('generic error', 'the number of generic types of sumtype `${parent.name}` is inconsistent with the concrete types')
}
}
else {}
diff --git a/vlib/v/ast/types.v b/vlib/v/ast/types.v
index 0cae9edaba..e91c6043a8 100644
--- a/vlib/v/ast/types.v
+++ b/vlib/v/ast/types.v
@@ -209,7 +209,7 @@ pub fn (t Type) ref() Type {
pub fn (t Type) deref() Type {
nr_muls := (int(t) >> 16) & 0xff
if nr_muls == 0 {
- panic('deref: type `$t` is not a pointer')
+ panic('deref: type `${t}` is not a pointer')
}
return int(t) & 0xff00ffff | int(u32(nr_muls - 1) << 16)
}
@@ -242,8 +242,8 @@ pub fn (t Type) has_flag(flag TypeFlag) bool {
pub fn (ts TypeSymbol) debug() []string {
mut res := []string{}
ts.dbg_common(mut res)
- res << 'info: $ts.info'
- res << 'methods ($ts.methods.len): ' + ts.methods.map(it.str()).join(', ')
+ res << 'info: ${ts.info}'
+ res << 'methods (${ts.methods.len}): ' + ts.methods.map(it.str()).join(', ')
return res
}
@@ -255,18 +255,18 @@ pub fn (ts TypeSymbol) dbg() []string {
}
fn (ts TypeSymbol) dbg_common(mut res []string) {
- res << 'idx: 0x$ts.idx.hex()'
- res << 'parent_idx: 0x$ts.parent_idx.hex()'
- res << 'mod: $ts.mod'
- res << 'name: $ts.name'
- res << 'cname: $ts.cname'
- res << 'kind: $ts.kind'
- res << 'is_pub: $ts.is_pub'
- res << 'language: $ts.language'
+ res << 'idx: 0x${ts.idx.hex()}'
+ res << 'parent_idx: 0x${ts.parent_idx.hex()}'
+ res << 'mod: ${ts.mod}'
+ res << 'name: ${ts.name}'
+ res << 'cname: ${ts.cname}'
+ res << 'kind: ${ts.kind}'
+ res << 'is_pub: ${ts.is_pub}'
+ res << 'language: ${ts.language}'
}
pub fn (t Type) str() string {
- return 'ast.Type(0x$t.hex() = ${u32(t)})'
+ return 'ast.Type(0x${t.hex()} = ${u32(t)})'
}
pub fn (t &Table) type_str(typ Type) string {
@@ -278,7 +278,7 @@ pub fn (t Type) debug() []string {
mut res := []string{}
res << 'idx: 0x${t.idx().hex():-8}'
res << 'type: 0x${t.hex():-8}'
- res << 'nr_muls: $t.nr_muls()'
+ res << 'nr_muls: ${t.nr_muls()}'
if t.has_flag(.optional) {
res << 'optional'
}
@@ -631,7 +631,7 @@ pub fn (t TypeSymbol) str() string {
[noreturn]
fn (t &TypeSymbol) no_info_panic(fname string) {
- panic('$fname: no info for type: $t.name')
+ panic('${fname}: no info for type: ${t.name}')
}
[inline]
@@ -1192,7 +1192,7 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
} else {
if sym.info is Array {
elem_str := t.type_to_str_using_aliases(sym.info.elem_type, import_aliases)
- res = '[]$elem_str'
+ res = '[]${elem_str}'
} else {
res = 'array'
}
@@ -1202,9 +1202,9 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
info := sym.info as ArrayFixed
elem_str := t.type_to_str_using_aliases(info.elem_type, import_aliases)
if info.size_expr is EmptyExpr {
- res = '[$info.size]$elem_str'
+ res = '[${info.size}]${elem_str}'
} else {
- res = '[$info.size_expr]$elem_str'
+ res = '[${info.size_expr}]${elem_str}'
}
}
.chan {
@@ -1218,7 +1218,7 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
elem_type = elem_type.set_nr_muls(elem_type.nr_muls() - 1)
}
elem_str := t.type_to_str_using_aliases(elem_type, import_aliases)
- res = 'chan $mut_str$elem_str'
+ res = 'chan ${mut_str}${elem_str}'
}
}
.function {
@@ -1246,7 +1246,7 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
info := sym.info as Map
key_str := t.type_to_str_using_aliases(info.key_type, import_aliases)
val_str := t.type_to_str_using_aliases(info.value_type, import_aliases)
- res = 'map[$key_str]$val_str'
+ res = 'map[${key_str}]${val_str}'
}
.multi_return {
res = '('
@@ -1337,10 +1337,10 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
res = strings.repeat(`&`, nr_muls) + res
}
if typ.has_flag(.optional) {
- res = '?$res'
+ res = '?${res}'
}
if typ.has_flag(.result) {
- res = '!$res'
+ res = '!${res}'
}
return res
}
@@ -1627,7 +1627,7 @@ pub fn (s Struct) get_field(name string) StructField {
if field := s.find_field(name) {
return field
}
- panic('unknown field `$name`')
+ panic('unknown field `${name}`')
}
pub fn (s &SumType) find_field(name string) ?StructField {
diff --git a/vlib/v/builder/builder.v b/vlib/v/builder/builder.v
index 7049e0b309..2c3afd67bd 100644
--- a/vlib/v/builder/builder.v
+++ b/vlib/v/builder/builder.v
@@ -190,14 +190,14 @@ pub fn (mut b Builder) parse_imports() {
import_path := b.find_module_path(mod, ast_file.path) or {
// v.parsers[i].error_with_token_index('cannot import module "$mod" (not found)', v.parsers[i].import_ast.get_import_tok_idx(mod))
// break
- b.parsed_files[i].errors << b.error_with_pos('cannot import module "$mod" (not found)',
+ b.parsed_files[i].errors << b.error_with_pos('cannot import module "${mod}" (not found)',
ast_file.path, imp.pos)
break
}
v_files := b.v_files_from_dir(import_path)
if v_files.len == 0 {
// v.parsers[i].error_with_token_index('cannot import module "$mod" (no .v files in "$import_path")', v.parsers[i].import_ast.get_import_tok_idx(mod))
- b.parsed_files[i].errors << b.error_with_pos('cannot import module "$mod" (no .v files in "$import_path")',
+ b.parsed_files[i].errors << b.error_with_pos('cannot import module "${mod}" (no .v files in "${import_path}")',
ast_file.path, imp.pos)
continue
}
@@ -212,7 +212,7 @@ pub fn (mut b Builder) parse_imports() {
sname := name.all_after_last('.')
smod := mod.all_after_last('.')
if sname != smod {
- msg := 'bad module definition: $ast_file.path imports module "$mod" but $file.path is defined as module `$name`'
+ msg := 'bad module definition: ${ast_file.path} imports module "${mod}" but ${file.path} is defined as module `${name}`'
b.parsed_files[i].errors << b.error_with_pos(msg, ast_file.path, imp.pos)
}
}
@@ -313,13 +313,13 @@ pub fn (b Builder) v_files_from_dir(dir string) []string {
println('looks like you are trying to build V with an old command')
println('use `v -o v cmd/v` instead of `v -o v compiler`')
}
- verror("$dir doesn't exist")
+ verror("${dir} doesn't exist")
} else if !os.is_dir(dir) {
- verror("$dir isn't a directory!")
+ verror("${dir} isn't a directory!")
}
mut files := os.ls(dir) or { panic(err) }
if b.pref.is_verbose {
- println('v_files_from_dir ("$dir")')
+ println('v_files_from_dir ("${dir}")')
}
res := b.pref.should_compile_filtered_files(dir, files)
if res.len == 0 {
@@ -327,7 +327,7 @@ pub fn (b Builder) v_files_from_dir(dir string) []string {
src_path := os.join_path(dir, 'src')
if os.is_dir(src_path) {
if b.pref.is_verbose {
- println('v_files_from_dir ("$src_path") (/src/)')
+ println('v_files_from_dir ("${src_path}") (/src/)')
}
files = os.ls(src_path) or { panic(err) }
return b.pref.should_compile_filtered_files(src_path, files)
@@ -382,11 +382,11 @@ pub fn (b &Builder) find_module_path(mod string, fpath string) ?string {
for search_path in module_lookup_paths {
try_path := os.join_path(search_path, mod_path)
if b.pref.is_verbose {
- println(' >> trying to find $mod in $try_path ..')
+ println(' >> trying to find ${mod} in ${try_path} ..')
}
if os.is_dir(try_path) {
if b.pref.is_verbose {
- println(' << found $try_path .')
+ println(' << found ${try_path} .')
}
return try_path
}
@@ -397,14 +397,14 @@ pub fn (b &Builder) find_module_path(mod string, fpath string) ?string {
p1 := path_parts[0..i].join(os.path_separator)
try_path := os.join_path(p1, mod_path)
if b.pref.is_verbose {
- println(' >> trying to find $mod in $try_path ..')
+ println(' >> trying to find ${mod} in ${try_path} ..')
}
if os.is_dir(try_path) {
return try_path
}
}
smodule_lookup_paths := module_lookup_paths.join(', ')
- return error('module "$mod" not found in:\n$smodule_lookup_paths')
+ return error('module "${mod}" not found in:\n${smodule_lookup_paths}')
}
pub fn (b &Builder) show_total_warns_and_errors_stats() {
@@ -427,9 +427,9 @@ pub fn (b &Builder) show_total_warns_and_errors_stats() {
nstring := util.bold(nr_notices.str())
if b.pref.check_only {
- println('summary: $estring V errors, $wstring V warnings, $nstring V notices')
+ println('summary: ${estring} V errors, ${wstring} V warnings, ${nstring} V notices')
} else {
- println('checker summary: $estring V errors, $wstring V warnings, $nstring V notices')
+ println('checker summary: ${estring} V errors, ${wstring} V warnings, ${nstring} V notices')
}
}
}
@@ -457,7 +457,7 @@ pub fn (mut b Builder) print_warnings_and_errors() {
if !b.pref.skip_warnings {
for err in file.notices {
kind := if b.pref.is_verbose {
- '$err.reporter notice #$b.nr_notices:'
+ '${err.reporter} notice #${b.nr_notices}:'
} else {
'notice:'
}
@@ -469,7 +469,7 @@ pub fn (mut b Builder) print_warnings_and_errors() {
for file in b.parsed_files {
for err in file.errors {
kind := if b.pref.is_verbose {
- '$err.reporter error #$b.nr_errors:'
+ '${err.reporter} error #${b.nr_errors}:'
} else {
'error:'
}
@@ -481,7 +481,7 @@ pub fn (mut b Builder) print_warnings_and_errors() {
if !b.pref.skip_warnings {
for err in file.warnings {
kind := if b.pref.is_verbose {
- '$err.reporter warning #$b.nr_warnings:'
+ '${err.reporter} warning #${b.nr_warnings}:'
} else {
'warning:'
}
@@ -498,15 +498,15 @@ pub fn (mut b Builder) print_warnings_and_errors() {
}
if b.pref.is_verbose && b.checker.nr_warnings > 1 {
- println('$b.checker.nr_warnings warnings')
+ println('${b.checker.nr_warnings} warnings')
}
if b.pref.is_verbose && b.checker.nr_notices > 1 {
- println('$b.checker.nr_notices notices')
+ println('${b.checker.nr_notices} notices')
}
if b.checker.nr_notices > 0 && !b.pref.skip_warnings {
for err in b.checker.notices {
kind := if b.pref.is_verbose {
- '$err.reporter notice #$b.checker.nr_notices:'
+ '${err.reporter} notice #${b.checker.nr_notices}:'
} else {
'notice:'
}
@@ -516,7 +516,7 @@ pub fn (mut b Builder) print_warnings_and_errors() {
if b.checker.nr_warnings > 0 && !b.pref.skip_warnings {
for err in b.checker.warnings {
kind := if b.pref.is_verbose {
- '$err.reporter warning #$b.checker.nr_warnings:'
+ '${err.reporter} warning #${b.checker.nr_warnings}:'
} else {
'warning:'
}
@@ -525,12 +525,12 @@ pub fn (mut b Builder) print_warnings_and_errors() {
}
//
if b.pref.is_verbose && b.checker.nr_errors > 1 {
- println('$b.checker.nr_errors errors')
+ println('${b.checker.nr_errors} errors')
}
if b.checker.nr_errors > 0 {
for err in b.checker.errors {
kind := if b.pref.is_verbose {
- '$err.reporter error #$b.checker.nr_errors:'
+ '${err.reporter} error #${b.checker.nr_errors}:'
} else {
'error:'
}
@@ -563,7 +563,7 @@ pub fn (mut b Builder) print_warnings_and_errors() {
}
if redefines.len > 0 {
util.show_compiler_message('builder error:',
- message: 'redefinition of function `$fn_name`'
+ message: 'redefinition of function `${fn_name}`'
)
for redefine in redefines {
util.show_compiler_message('conflicting declaration:',
diff --git a/vlib/v/builder/builder_test.v b/vlib/v/builder/builder_test.v
index ab392686dc..425106f8df 100644
--- a/vlib/v/builder/builder_test.v
+++ b/vlib/v/builder/builder_test.v
@@ -33,7 +33,7 @@ fn test_conditional_executable_removal() {
assert executable !in after_run_file_list
assert os.execute('${os.quoted_path(vexe)} .').exit_code == 0
- assert os.execute('./$executable').output.trim_space() == 'Hello World!'
+ assert os.execute('./${executable}').output.trim_space() == 'Hello World!'
after_compilation__ := os.ls(test_path)?
dump(after_compilation__)
assert executable in after_compilation__
diff --git a/vlib/v/builder/cbuilder/cbuilder.v b/vlib/v/builder/cbuilder/cbuilder.v
index 6bbb84bafa..b0a3fccb76 100644
--- a/vlib/v/builder/cbuilder/cbuilder.v
+++ b/vlib/v/builder/cbuilder/cbuilder.v
@@ -60,7 +60,7 @@ pub fn gen_c(mut b builder.Builder, v_files []string) string {
pub fn build_c(mut b builder.Builder, v_files []string, out_file string) {
b.out_name_c = out_file
b.pref.out_name_c = os.real_path(out_file)
- b.info('build_c($out_file)')
+ b.info('build_c(${out_file})')
output2 := gen_c(mut b, v_files)
os.write_file(out_file, output2) or { panic(err) }
if b.pref.is_stats {
diff --git a/vlib/v/builder/cbuilder/parallel_cc.v b/vlib/v/builder/cbuilder/parallel_cc.v
index f751f46631..133c2e3a32 100644
--- a/vlib/v/builder/cbuilder/parallel_cc.v
+++ b/vlib/v/builder/cbuilder/parallel_cc.v
@@ -9,7 +9,7 @@ import sync.pool
fn parallel_cc(mut b builder.Builder, header string, res string, out_str string, out_fn_start_pos []int) {
c_files := util.nr_jobs
- println('> c_files: $c_files | util.nr_jobs: $util.nr_jobs')
+ println('> c_files: ${c_files} | util.nr_jobs: ${util.nr_jobs}')
out_h := header.replace_once('static char * v_typeof_interface_IError', 'char * v_typeof_interface_IError')
os.write_file('out.h', out_h) or { panic(err) }
// Write generated stuff in `g.out` before and after the `out_fn_start_pos` locations,
@@ -33,7 +33,7 @@ fn parallel_cc(mut b builder.Builder, header string, res string, out_str string,
// out_fn_start_pos.sort()
for i, fn_pos in out_fn_start_pos {
if prev_fn_pos >= out_str.len || fn_pos >= out_str.len || prev_fn_pos > fn_pos {
- println('> EXITING i=$i out of $out_fn_start_pos.len prev_pos=$prev_fn_pos fn_pos=$fn_pos')
+ println('> EXITING i=${i} out of ${out_fn_start_pos.len} prev_pos=${prev_fn_pos} fn_pos=${fn_pos}')
break
}
if i == 0 {
@@ -60,9 +60,9 @@ fn parallel_cc(mut b builder.Builder, header string, res string, out_str string,
nthreads := c_files + 2
pp.set_max_jobs(nthreads)
pp.work_on_items(o_postfixes)
- eprintln('> C compilation on $nthreads threads, working on $o_postfixes.len files took: $sw.elapsed().milliseconds() ms')
+ eprintln('> C compilation on ${nthreads} threads, working on ${o_postfixes.len} files took: ${sw.elapsed().milliseconds()} ms')
link_cmd := '${os.quoted_path(cbuilder.cc_compiler)} -o ${os.quoted_path(b.pref.out_name)} out_0.o ${fnames.map(it.replace('.c',
- '.o')).join(' ')} out_x.o -lpthread $cbuilder.cc_ldflags'
+ '.o')).join(' ')} out_x.o -lpthread ${cbuilder.cc_ldflags}'
sw_link := time.new_stopwatch()
link_res := os.execute(link_cmd)
eprint_time('link_cmd', link_cmd, link_res, sw_link)
@@ -71,14 +71,14 @@ fn parallel_cc(mut b builder.Builder, header string, res string, out_str string,
fn build_parallel_o_cb(mut p pool.PoolProcessor, idx int, wid int) voidptr {
postfix := p.get_item(idx)
sw := time.new_stopwatch()
- cmd := '${os.quoted_path(cbuilder.cc_compiler)} $cbuilder.cc_cflags -c -w -o out_${postfix}.o out_${postfix}.c'
+ cmd := '${os.quoted_path(cbuilder.cc_compiler)} ${cbuilder.cc_cflags} -c -w -o out_${postfix}.o out_${postfix}.c'
res := os.execute(cmd)
eprint_time('c cmd', cmd, res, sw)
return unsafe { nil }
}
fn eprint_time(label string, cmd string, res os.Result, sw time.StopWatch) {
- eprintln('> $label: `$cmd` => $res.exit_code , $sw.elapsed().milliseconds() ms')
+ eprintln('> ${label}: `${cmd}` => ${res.exit_code} , ${sw.elapsed().milliseconds()} ms')
if res.exit_code != 0 {
eprintln(res.output)
}
diff --git a/vlib/v/builder/cc.v b/vlib/v/builder/cc.v
index 26f9166e07..d824f59e1a 100644
--- a/vlib/v/builder/cc.v
+++ b/vlib/v/builder/cc.v
@@ -53,7 +53,7 @@ fn (mut v Builder) post_process_c_compiler_output(res os.Result) {
for tmpfile in v.pref.cleanup_files {
if os.is_file(tmpfile) {
if v.pref.is_verbose {
- eprintln('>> remove tmp file: $tmpfile')
+ eprintln('>> remove tmp file: ${tmpfile}')
}
os.rm(tmpfile) or {}
}
@@ -89,9 +89,9 @@ fn (mut v Builder) post_process_c_compiler_output(res os.Result) {
fn (mut v Builder) show_cc(cmd string, response_file string, response_file_content string) {
if v.pref.is_verbose || v.pref.show_cc {
- println('> C compiler cmd: $cmd')
+ println('> C compiler cmd: ${cmd}')
if v.pref.show_cc && !v.pref.no_rsp {
- println('> C compiler response file "$response_file":')
+ println('> C compiler response file "${response_file}":')
println(response_file_content)
}
}
@@ -335,7 +335,7 @@ fn (mut v Builder) setup_ccompiler_options(ccompiler string) {
}
}
// The C file we are compiling
- ccoptions.source_args << '"$v.out_name_c"'
+ ccoptions.source_args << '"${v.out_name_c}"'
if v.pref.os == .macos {
ccoptions.source_args << '-x none'
}
@@ -385,8 +385,8 @@ fn (mut v Builder) setup_ccompiler_options(ccompiler string) {
ccoptions.env_cflags = os.getenv('CFLAGS')
ccoptions.env_ldflags = os.getenv('LDFLAGS')
$if trace_ccoptions ? {
- println('>>> setup_ccompiler_options ccompiler: $ccompiler')
- println('>>> setup_ccompiler_options ccoptions: $ccoptions')
+ println('>>> setup_ccompiler_options ccompiler: ${ccompiler}')
+ println('>>> setup_ccompiler_options ccoptions: ${ccoptions}')
}
v.ccoptions = ccoptions
// setup the cache too, so that different compilers/options do not interfere:
@@ -447,7 +447,7 @@ fn (mut v Builder) setup_output_name() {
v.pref.out_name += '.exe'
}
// Output executable name
- v.log('cc() isprod=$v.pref.is_prod outname=$v.pref.out_name')
+ v.log('cc() isprod=${v.pref.is_prod} outname=${v.pref.out_name}')
if v.pref.is_shared {
if !v.pref.out_name.ends_with(v.ccoptions.shared_postfix) {
v.pref.out_name += v.ccoptions.shared_postfix
@@ -457,18 +457,18 @@ fn (mut v Builder) setup_output_name() {
v.pref.out_name = v.pref.cache_manager.mod_postfix_with_key2cpath(v.pref.path,
'.o', v.pref.path) // v.out_name
if v.pref.is_verbose {
- println('Building $v.pref.path to $v.pref.out_name ...')
+ println('Building ${v.pref.path} to ${v.pref.out_name} ...')
}
- v.pref.cache_manager.mod_save(v.pref.path, '.description.txt', v.pref.path, '${v.pref.path:-30} @ $v.pref.cache_manager.vopts\n') or {
+ v.pref.cache_manager.mod_save(v.pref.path, '.description.txt', v.pref.path, '${v.pref.path:-30} @ ${v.pref.cache_manager.vopts}\n') or {
panic(err)
}
// println('v.ast.imports:')
// println(v.ast.imports)
}
if os.is_dir(v.pref.out_name) {
- verror("'$v.pref.out_name' is a directory")
+ verror("'${v.pref.out_name}' is a directory")
}
- v.ccoptions.o_args << '-o "$v.pref.out_name"'
+ v.ccoptions.o_args << '-o "${v.pref.out_name}"'
}
pub fn (mut v Builder) cc() {
@@ -476,7 +476,7 @@ pub fn (mut v Builder) cc() {
return
}
if v.pref.is_verbose {
- println('builder.cc() pref.out_name="$v.pref.out_name"')
+ println('builder.cc() pref.out_name="${v.pref.out_name}"')
}
if v.pref.only_check_syntax {
if v.pref.is_verbose {
@@ -502,7 +502,7 @@ pub fn (mut v Builder) cc() {
ends_with_js := v.pref.out_name.ends_with('.js')
if ends_with_c || ends_with_js {
v.pref.skip_running = true
- msg_mv := 'os.mv_by_cp $v.out_name_c => $v.pref.out_name'
+ msg_mv := 'os.mv_by_cp ${v.out_name_c} => ${v.pref.out_name}'
util.timing_start(msg_mv)
// v.out_name_c may be on a different partition than v.out_name
os.mv_by_cp(v.out_name_c, v.pref.out_name) or { panic(err) }
@@ -546,7 +546,7 @@ pub fn (mut v Builder) cc() {
for file in v.parsed_files {
if file.imports.any(it.mod.contains('sync')) {
$if trace_stdatomic_gen ? {
- eprintln('> creating $cpp_atomic_h_path ...')
+ eprintln('> creating ${cpp_atomic_h_path} ...')
}
cppgenv := '${@VEXEROOT}/thirdparty/stdatomic/nix/cpp/gen.v'
os.execute('${os.quoted_path(vexe)} run ${os.quoted_path(cppgenv)} ${os.quoted_path(ccompiler)}')
@@ -569,16 +569,16 @@ pub fn (mut v Builder) cc() {
all_args := v.all_args(v.ccoptions)
v.dump_c_options(all_args)
str_args := all_args.join(' ')
- mut cmd := '${os.quoted_path(ccompiler)} $str_args'
+ mut cmd := '${os.quoted_path(ccompiler)} ${str_args}'
mut response_file := ''
mut response_file_content := str_args
if !v.pref.no_rsp {
response_file = '${v.out_name_c}.rsp'
response_file_content = str_args.replace('\\', '\\\\')
- rspexpr := '@$response_file'
+ rspexpr := '@${response_file}'
cmd = '${os.quoted_path(ccompiler)} ${os.quoted_path(rspexpr)}'
os.write_file(response_file, response_file_content) or {
- verror('Unable to write to C response file "$response_file"')
+ verror('Unable to write to C response file "${response_file}"')
}
}
if !v.ccoptions.debug_mode {
@@ -606,16 +606,16 @@ pub fn (mut v Builder) cc() {
v.show_c_compiler_output(res)
}
os.chdir(original_pwd) or {}
- vcache.dlog('| Builder.' + @FN, '> v.pref.use_cache: $v.pref.use_cache | v.pref.retry_compilation: $v.pref.retry_compilation')
- vcache.dlog('| Builder.' + @FN, '> cmd res.exit_code: $res.exit_code | cmd: $cmd')
- vcache.dlog('| Builder.' + @FN, '> response_file_content:\n$response_file_content')
+ vcache.dlog('| Builder.' + @FN, '> v.pref.use_cache: ${v.pref.use_cache} | v.pref.retry_compilation: ${v.pref.retry_compilation}')
+ vcache.dlog('| Builder.' + @FN, '> cmd res.exit_code: ${res.exit_code} | cmd: ${cmd}')
+ vcache.dlog('| Builder.' + @FN, '> response_file_content:\n${response_file_content}')
if res.exit_code != 0 {
if ccompiler.contains('tcc.exe') {
// a TCC problem? Retry with the system cc:
if tried_compilation_commands.len > 1 {
- eprintln('Recompilation loop detected (ccompiler: $ccompiler):')
+ eprintln('Recompilation loop detected (ccompiler: ${ccompiler}):')
for recompile_command in tried_compilation_commands {
- eprintln(' $recompile_command')
+ eprintln(' ${recompile_command}')
}
exit(101)
}
@@ -623,14 +623,14 @@ pub fn (mut v Builder) cc() {
tcc_output = res
v.pref.default_c_compiler()
if v.pref.is_verbose {
- eprintln('Compilation with tcc failed. Retrying with $v.pref.ccompiler ...')
+ eprintln('Compilation with tcc failed. Retrying with ${v.pref.ccompiler} ...')
}
continue
}
}
if res.exit_code == 127 {
verror('C compiler error, while attempting to run: \n' +
- '-----------------------------------------------------------\n' + '$cmd\n' +
+ '-----------------------------------------------------------\n' + '${cmd}\n' +
'-----------------------------------------------------------\n' +
'Probably your C compiler is missing. \n' +
'Please reinstall it, or make it available in your PATH.\n\n' +
@@ -649,22 +649,22 @@ pub fn (mut v Builder) cc() {
}
// Print the C command
if v.pref.is_verbose {
- println('$ccompiler')
+ println('${ccompiler}')
println('=========\n')
}
break
}
if v.pref.compress {
- ret := os.system('strip $v.pref.out_name')
+ ret := os.system('strip ${v.pref.out_name}')
if ret != 0 {
println('strip failed')
return
}
// Note: upx --lzma can sometimes fail with NotCompressibleException
// See https://github.com/vlang/v/pull/3528
- mut ret2 := os.system('upx --lzma -qqq $v.pref.out_name')
+ mut ret2 := os.system('upx --lzma -qqq ${v.pref.out_name}')
if ret2 != 0 {
- ret2 = os.system('upx -qqq $v.pref.out_name')
+ ret2 = os.system('upx -qqq ${v.pref.out_name}')
}
if ret2 != 0 {
println('upx failed')
@@ -696,9 +696,9 @@ fn (mut b Builder) ensure_linuxroot_exists(sysroot string) {
}
if !os.is_dir(sysroot) {
println('Downloading files for Linux cross compilation (~22MB) ...')
- os.system('git clone $crossrepo_url $sysroot')
+ os.system('git clone ${crossrepo_url} ${sysroot}')
if !os.exists(sysroot_git_config_path) {
- verror('Failed to clone `$crossrepo_url` to `$sysroot`')
+ verror('Failed to clone `${crossrepo_url}` to `${sysroot}`')
}
os.chmod(os.join_path(sysroot, 'ld.lld'), 0o755) or { panic(err) }
}
@@ -723,10 +723,10 @@ fn (mut b Builder) cc_linux_cross() {
cc_args << '-c'
cc_args << '-target x86_64-linux-gnu'
cc_args << defines
- cc_args << '-I $sysroot/include '
+ cc_args << '-I ${sysroot}/include '
cc_args << others
- cc_args << '-o "$obj_file"'
- cc_args << '-c "$b.out_name_c"'
+ cc_args << '-o "${obj_file}"'
+ cc_args << '-c "${b.out_name_c}"'
cc_args << libs
b.dump_c_options(cc_args)
mut cc_name := 'cc'
@@ -745,15 +745,15 @@ fn (mut b Builder) cc_linux_cross() {
verror(cc_res.output)
return
}
- mut linker_args := ['-L$sysroot/usr/lib/x86_64-linux-gnu/', '-L$sysroot/lib/x86_64-linux-gnu',
- '--sysroot=$sysroot', '-v', '-o $out_name', '-m elf_x86_64',
- '-dynamic-linker /lib/x86_64-linux-gnu/ld-linux-x86-64.so.2',
- '$sysroot/crt1.o $sysroot/crti.o $obj_file', '-lc', '-lcrypto', '-lssl', '-lpthread',
- '$sysroot/crtn.o', '-lm']
+ mut linker_args := ['-L${sysroot}/usr/lib/x86_64-linux-gnu/',
+ '-L${sysroot}/lib/x86_64-linux-gnu', '--sysroot=${sysroot}', '-v', '-o ${out_name}',
+ '-m elf_x86_64', '-dynamic-linker /lib/x86_64-linux-gnu/ld-linux-x86-64.so.2',
+ '${sysroot}/crt1.o ${sysroot}/crti.o ${obj_file}', '-lc', '-lcrypto', '-lssl', '-lpthread',
+ '${sysroot}/crtn.o', '-lm']
linker_args << cflags.c_options_only_object_files()
// -ldl
b.dump_c_options(linker_args)
- mut ldlld := '$sysroot/ld.lld'
+ mut ldlld := '${sysroot}/ld.lld'
$if windows {
ldlld = 'ld.lld.exe'
}
@@ -782,8 +782,8 @@ fn (mut c Builder) cc_windows_cross() {
}
c.pref.out_name = os.quoted_path(c.pref.out_name)
mut args := []string{}
- args << '$c.pref.cflags'
- args << '-o $c.pref.out_name'
+ args << '${c.pref.cflags}'
+ args << '-o ${c.pref.out_name}'
args << '-w -L.'
//
cflags := c.get_os_cflags()
@@ -807,13 +807,13 @@ fn (mut c Builder) cc_windows_cross() {
}
mut libs := []string{}
if false && c.pref.build_mode == .default_mode {
- builtin_o := '"$pref.default_module_path/vlib/builtin.o"'
+ builtin_o := '"${pref.default_module_path}/vlib/builtin.o"'
libs << builtin_o
if !os.exists(builtin_o) {
- verror('$builtin_o not found')
+ verror('${builtin_o} not found')
}
for imp in c.table.imports {
- libs << '"$pref.default_module_path/vlib/${imp}.o"'
+ libs << '"${pref.default_module_path}/vlib/${imp}.o"'
}
}
// add the thirdparty .o files, produced by all the #flag directives:
@@ -882,7 +882,7 @@ fn (mut c Builder) cc_windows_cross() {
}
fn (mut b Builder) build_thirdparty_obj_files() {
- b.log('build_thirdparty_obj_files: v.ast.cflags: $b.table.cflags')
+ b.log('build_thirdparty_obj_files: v.ast.cflags: ${b.table.cflags}')
for flag in b.get_os_cflags() {
if flag.value.ends_with('.o') {
rest_of_module_flags := b.get_rest_of_module_cflags(flag)
@@ -901,10 +901,10 @@ fn (mut v Builder) build_thirdparty_obj_file(mod string, path string, moduleflag
obj_path := os.real_path(path)
cfile := '${obj_path[..obj_path.len - 2]}.c'
opath := v.pref.cache_manager.mod_postfix_with_key2cpath(mod, '.o', obj_path)
- mut rebuild_reason_message := '$obj_path not found, building it in $opath ...'
+ mut rebuild_reason_message := '${obj_path} not found, building it in ${opath} ...'
if os.exists(opath) {
if os.exists(cfile) && os.file_last_mod_unix(opath) < os.file_last_mod_unix(cfile) {
- rebuild_reason_message = '$opath is older than $cfile, rebuilding ...'
+ rebuild_reason_message = '${opath} is older than ${cfile}, rebuilding ...'
} else {
return
}
@@ -931,18 +931,18 @@ fn (mut v Builder) build_thirdparty_obj_file(mod string, path string, moduleflag
all_options << '-c ${os.quoted_path(cfile)}'
cc_options := v.thirdparty_object_args(v.ccoptions, all_options).join(' ')
- cmd := '${os.quoted_path(v.pref.ccompiler)} $cc_options'
+ cmd := '${os.quoted_path(v.pref.ccompiler)} ${cc_options}'
$if trace_thirdparty_obj_files ? {
- println('>>> build_thirdparty_obj_files cmd: $cmd')
+ println('>>> build_thirdparty_obj_files cmd: ${cmd}')
}
res := os.execute(cmd)
os.chdir(current_folder) or {}
if res.exit_code != 0 {
- eprintln('failed thirdparty object build cmd:\n$cmd')
+ eprintln('failed thirdparty object build cmd:\n${cmd}')
verror(res.output)
return
}
- v.pref.cache_manager.mod_save(mod, '.description.txt', obj_path, '${obj_path:-30} @ $cmd\n') or {
+ v.pref.cache_manager.mod_save(mod, '.description.txt', obj_path, '${obj_path:-30} @ ${cmd}\n') or {
panic(err)
}
if res.output != '' {
diff --git a/vlib/v/builder/cc_windows.v b/vlib/v/builder/cc_windows.v
index 2fe71b77e4..fa580afd61 100644
--- a/vlib/v/builder/cc_windows.v
+++ b/vlib/v/builder/cc_windows.v
@@ -13,7 +13,7 @@ pub fn (mut v Builder) find_win_cc() ! {
ccompiler_version_res := os.execute('${os.quoted_path(v.pref.ccompiler)} -v')
if ccompiler_version_res.exit_code != 0 {
if v.pref.is_verbose {
- println('$v.pref.ccompiler not found, looking for msvc...')
+ println('${v.pref.ccompiler} not found, looking for msvc...')
}
find_msvc(v.pref.m64) or {
if v.pref.is_verbose {
diff --git a/vlib/v/builder/compile.v b/vlib/v/builder/compile.v
index 0948e82dd8..d6195366e2 100644
--- a/vlib/v/builder/compile.v
+++ b/vlib/v/builder/compile.v
@@ -86,12 +86,12 @@ fn (mut b Builder) run_compiled_executable_and_exit() {
run_file := if b.pref.backend.is_js() {
node_basename := $if windows { 'node.exe' } $else { 'node' }
os.find_abs_path_of_executable(node_basename) or {
- panic('Could not find `$node_basename` in system path. Do you have Node.js installed?')
+ panic('Could not find `${node_basename}` in system path. Do you have Node.js installed?')
}
} else if b.pref.backend == .golang {
go_basename := $if windows { 'go.exe' } $else { 'go' }
os.find_abs_path_of_executable(go_basename) or {
- panic('Could not find `$go_basename` in system path. Do you have Go installed?')
+ panic('Could not find `${go_basename}` in system path. Do you have Go installed?')
}
} else {
compiled_file
@@ -106,7 +106,7 @@ fn (mut b Builder) run_compiled_executable_and_exit() {
mut run_process := os.new_process(run_file)
run_process.set_args(run_args)
if b.pref.is_verbose {
- println('running $run_process.filename with arguments $run_process.args')
+ println('running ${run_process.filename} with arguments ${run_process.args}')
}
// Ignore sigint and sigquit while running the compiled file,
// so ^C doesn't prevent v from deleting the compiled file.
@@ -132,7 +132,7 @@ fn eshcb(_ os.Signal) {
[noreturn]
fn serror(reason string, e IError) {
- eprintln('could not $reason handler')
+ eprintln('could not ${reason} handler')
panic(e)
}
@@ -141,11 +141,11 @@ fn (mut v Builder) cleanup_run_executable_after_exit(exefile string) {
return
}
if v.pref.reuse_tmpc {
- v.pref.vrun_elog('keeping executable: $exefile , because -keepc was passed')
+ v.pref.vrun_elog('keeping executable: ${exefile} , because -keepc was passed')
return
}
if !v.executable_exists {
- v.pref.vrun_elog('remove run executable: $exefile')
+ v.pref.vrun_elog('remove run executable: ${exefile}')
os.rm(exefile) or {}
}
}
@@ -173,7 +173,7 @@ pub fn (mut v Builder) set_module_lookup_paths() {
v.module_search_paths << v.compiled_dir
x := os.join_path(v.compiled_dir, 'modules')
if v.pref.is_verbose {
- println('x: "$x"')
+ println('x: "${x}"')
}
v.module_search_paths << os.join_path(v.compiled_dir, 'modules')
v.module_search_paths << v.pref.lookup_path
@@ -188,7 +188,7 @@ pub fn (v Builder) get_builtin_files() []string {
v.log('v.pref.no_builtin is true, get_builtin_files == []')
return []
}
- v.log('v.pref.lookup_path: $v.pref.lookup_path')
+ v.log('v.pref.lookup_path: ${v.pref.lookup_path}')
// Lookup for built-in folder in lookup path.
// Assumption: `builtin/` folder implies usable implementation of builtin
for location in v.pref.lookup_path {
@@ -226,7 +226,7 @@ pub fn (v &Builder) get_user_files() []string {
return []
}
mut dir := v.pref.path
- v.log('get_v_files($dir)')
+ v.log('get_v_files(${dir})')
// Need to store user files separately, because they have to be added after
// libs, but we dont know which libs need to be added yet
mut user_files := []string{}
@@ -263,7 +263,7 @@ pub fn (v &Builder) get_user_files() []string {
v_test_runner_prelude = os.join_path(preludes_path, 'test_runner_${v_test_runner_prelude}.v')
}
if !os.is_file(v_test_runner_prelude) || !os.is_readable(v_test_runner_prelude) {
- eprintln('test runner error: File $v_test_runner_prelude should be readable.')
+ eprintln('test runner error: File ${v_test_runner_prelude} should be readable.')
verror('supported test runners are: tap, json, simple, normal')
}
user_files << v_test_runner_prelude
@@ -280,7 +280,7 @@ pub fn (v &Builder) get_user_files() []string {
is_test := v.pref.is_test
mut is_internal_module_test := false
if is_test {
- tcontent := util.read_file(dir) or { verror('$dir does not exist') }
+ tcontent := util.read_file(dir) or { verror('${dir} does not exist') }
slines := tcontent.split_into_lines()
for sline in slines {
line := sline.trim_space()
@@ -299,7 +299,7 @@ pub fn (v &Builder) get_user_files() []string {
// v volt/slack_test.v: compile all .v files to get the environment
single_test_v_file := os.real_path(dir)
if v.pref.is_verbose {
- v.log('> Compiling an internal module _test.v file $single_test_v_file .')
+ v.log('> Compiling an internal module _test.v file ${single_test_v_file} .')
v.log('> That brings in all other ordinary .v files in the same module too .')
}
user_files << single_test_v_file
@@ -307,7 +307,7 @@ pub fn (v &Builder) get_user_files() []string {
}
does_exist := os.exists(dir)
if !does_exist {
- verror("$dir doesn't exist")
+ verror("${dir} doesn't exist")
}
is_real_file := does_exist && !os.is_dir(dir)
resolved_link := if is_real_file && os.is_link(dir) { os.real_path(dir) } else { dir }
@@ -317,18 +317,18 @@ pub fn (v &Builder) get_user_files() []string {
// Just compile one file and get parent dir
user_files << single_v_file
if v.pref.is_verbose {
- v.log('> just compile one file: "$single_v_file"')
+ v.log('> just compile one file: "${single_v_file}"')
}
} else if os.is_dir(dir) {
if v.pref.is_verbose {
- v.log('> add all .v files from directory "$dir" ...')
+ v.log('> add all .v files from directory "${dir}" ...')
}
// Add .v files from the directory being compiled
user_files << v.v_files_from_dir(dir)
} else {
println('usage: `v file.v` or `v directory`')
ext := os.file_ext(dir)
- println('unknown file extension `$ext`')
+ println('unknown file extension `${ext}`')
exit(1)
}
if user_files.len == 0 {
@@ -336,7 +336,7 @@ pub fn (v &Builder) get_user_files() []string {
exit(1)
}
if v.pref.is_verbose {
- v.log('user_files: $user_files')
+ v.log('user_files: ${user_files}')
}
return user_files
}
diff --git a/vlib/v/builder/interpreterbuilder/v_interpret_test.v b/vlib/v/builder/interpreterbuilder/v_interpret_test.v
index e647832cc2..f0950466c5 100644
--- a/vlib/v/builder/interpreterbuilder/v_interpret_test.v
+++ b/vlib/v/builder/interpreterbuilder/v_interpret_test.v
@@ -5,11 +5,11 @@ import term
const vexe = @VEXE
fn interpreter_wrap(a string) string {
- return 'fn main() {$a}'
+ return 'fn main() {${a}}'
}
fn interp_test(expression string, expected string) ! {
- tmpdir := os.join_path(os.vtmp_dir(), 'v', 'interpret_test_$rand.ulid()')
+ tmpdir := os.join_path(os.vtmp_dir(), 'v', 'interpret_test_${rand.ulid()}')
os.mkdir_all(tmpdir) or {}
defer {
os.rmdir_all(tmpdir) or {}
@@ -19,16 +19,16 @@ fn interp_test(expression string, expected string) ! {
outfile := os.join_path(tmpdir, 'output.txt')
os.write_file(tmpfile, interpreter_wrap(expression))!
if os.system('${os.quoted_path(vexe)} interpret ${os.quoted_path(tmpfile)} > ${os.quoted_path(outfile)}') != 0 {
- eprintln('>>> Failed to interpret V expression: |$expression|')
+ eprintln('>>> Failed to interpret V expression: |${expression}|')
return error('v interp')
}
res := os.read_file(outfile)!
output := res.trim_space()
if output != expected {
eprintln('>>> The output of the V expression, is not the same as the expected one')
- eprintln(' V expression: $expression')
- eprintln(' output: |$output|')
- eprintln(' expected: |$expected|')
+ eprintln(' V expression: ${expression}')
+ eprintln(' output: |${output}|')
+ eprintln(' expected: |${expected}|')
return error('test')
}
println('${term.colorize(term.green, 'OK')} ${term.colorize(term.bright_blue, expression.replace('\n',
diff --git a/vlib/v/builder/jsbuilder/jsbuilder.v b/vlib/v/builder/jsbuilder/jsbuilder.v
index bd53cf4e45..2b3941750a 100644
--- a/vlib/v/builder/jsbuilder/jsbuilder.v
+++ b/vlib/v/builder/jsbuilder/jsbuilder.v
@@ -29,7 +29,7 @@ pub fn compile_js(mut b builder.Builder) {
pub fn build_js(mut b builder.Builder, v_files []string, out_file string) {
b.out_name_js = out_file
- b.info('build_js($out_file)')
+ b.info('build_js(${out_file})')
output := gen_js(mut b, v_files)
os.write_file(out_file, output) or { panic(err) }
if b.pref.is_stats {
diff --git a/vlib/v/builder/msvc_windows.v b/vlib/v/builder/msvc_windows.v
index 175cb8cfc0..6c6bed6b61 100644
--- a/vlib/v/builder/msvc_windows.v
+++ b/vlib/v/builder/msvc_windows.v
@@ -117,11 +117,11 @@ fn new_windows_kit(kit_root string, target_arch string) !WindowsKit {
highest_path = f
}
}
- kit_lib_highest := kit_lib + '\\$highest_path'
+ kit_lib_highest := kit_lib + '\\${highest_path}'
kit_include_highest := kit_lib_highest.replace('Lib', 'Include')
return WindowsKit{
- um_lib_path: kit_lib_highest + '\\um\\$target_arch'
- ucrt_lib_path: kit_lib_highest + '\\ucrt\\$target_arch'
+ um_lib_path: kit_lib_highest + '\\um\\${target_arch}'
+ ucrt_lib_path: kit_lib_highest + '\\ucrt\\${target_arch}'
um_include_path: kit_include_highest + '\\um'
ucrt_include_path: kit_include_highest + '\\ucrt'
shared_include_path: kit_include_highest + '\\shared'
@@ -162,22 +162,22 @@ fn find_vs_by_reg(vswhere_dir string, host_arch string, target_arch string) !VsI
// VSWhere is guaranteed to be installed at this location now
// If its not there then end user needs to update their visual studio
// installation!
- res := os.execute('"$vswhere_dir\\Microsoft Visual Studio\\Installer\\vswhere.exe" -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath')
+ res := os.execute('"${vswhere_dir}\\Microsoft Visual Studio\\Installer\\vswhere.exe" -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath')
// println('res: "$res"')
if res.exit_code != 0 {
return error_with_code(res.output, res.exit_code)
}
res_output := res.output.trim_space()
- version := os.read_file('$res_output\\VC\\Auxiliary\\Build\\Microsoft.VCToolsVersion.default.txt') or {
+ version := os.read_file('${res_output}\\VC\\Auxiliary\\Build\\Microsoft.VCToolsVersion.default.txt') or {
// println('Unable to find msvc version')
return error('Unable to find vs installation')
}
// println('version: $version')
v := version.trim_space()
- lib_path := '$res_output\\VC\\Tools\\MSVC\\$v\\lib\\$target_arch'
- include_path := '$res_output\\VC\\Tools\\MSVC\\$v\\include'
- if os.exists('$lib_path\\vcruntime.lib') {
- p := '$res_output\\VC\\Tools\\MSVC\\$v\\bin\\Host$host_arch\\$target_arch'
+ lib_path := '${res_output}\\VC\\Tools\\MSVC\\${v}\\lib\\${target_arch}'
+ include_path := '${res_output}\\VC\\Tools\\MSVC\\${v}\\include'
+ if os.exists('${lib_path}\\vcruntime.lib') {
+ p := '${res_output}\\VC\\Tools\\MSVC\\${v}\\bin\\Host${host_arch}\\${target_arch}'
// println('$lib_path $include_path')
return VsInstallation{
exe_path: p
@@ -185,7 +185,7 @@ fn find_vs_by_reg(vswhere_dir string, host_arch string, target_arch string) !VsI
include_path: include_path
}
}
- println('Unable to find vs installation (attempted to use lib path "$lib_path")')
+ println('Unable to find vs installation (attempted to use lib path "${lib_path}")')
return error('Unable to find vs exe folder')
} $else {
return error('Host OS does not support finding a Visual Studio installation')
@@ -203,8 +203,8 @@ fn find_vs_by_env(host_arch string, target_arch string) !VsInstallation {
return error('empty VCToolsInstallDir')
}
- bin_dir := '${vc_tools_dir}bin\\Host$host_arch\\$target_arch'
- lib_path := '${vc_tools_dir}lib\\$target_arch'
+ bin_dir := '${vc_tools_dir}bin\\Host${host_arch}\\${target_arch}'
+ lib_path := '${vc_tools_dir}lib\\${target_arch}'
include_path := '${vc_tools_dir}include'
return VsInstallation{
@@ -261,7 +261,7 @@ pub fn (mut v Builder) cc_msvc() {
mut a := []string{}
//
env_cflags := os.getenv('CFLAGS')
- mut all_cflags := '$env_cflags $v.pref.cflags'
+ mut all_cflags := '${env_cflags} ${v.pref.cflags}'
if all_cflags != ' ' {
a << all_cflags
}
@@ -272,7 +272,7 @@ pub fn (mut v Builder) cc_msvc() {
// `/volatile:ms` enables atomic volatile (gcc _Atomic)
// `/Fo` sets the object file name - needed so we can clean up after ourselves properly
// `/F 16777216` changes the stack size to 16MB, see https://docs.microsoft.com/en-us/cpp/build/reference/f-set-stack-size?view=msvc-170
- a << ['-w', '/we4013', '/volatile:ms', '/Fo"$out_name_obj"', '/F 16777216']
+ a << ['-w', '/we4013', '/volatile:ms', '/Fo"${out_name_obj}"', '/F 16777216']
if v.pref.is_prod {
a << '/O2'
}
@@ -281,7 +281,7 @@ pub fn (mut v Builder) cc_msvc() {
a << '/D_DEBUG'
// /Zi generates a .pdb
// /Fd sets the pdb file name (so its not just vc140 all the time)
- a << ['/Zi', '/Fd"$out_name_pdb"']
+ a << ['/Zi', '/Fd"${out_name_pdb}"']
} else {
a << '/MD'
a << '/DNDEBUG'
@@ -341,7 +341,7 @@ pub fn (mut v Builder) cc_msvc() {
a << real_libs.join(' ')
a << '/link'
a << '/NOLOGO'
- a << '/OUT:"$v.pref.out_name"'
+ a << '/OUT:"${v.pref.out_name}"'
a << r.library_paths()
if !all_cflags.contains('/DEBUG') {
// only use /DEBUG, if the user *did not* provide its own:
@@ -361,14 +361,14 @@ pub fn (mut v Builder) cc_msvc() {
args := a.join(' ')
// write args to a file so that we dont smash createprocess
os.write_file(out_name_cmd_line, args) or {
- verror('Unable to write response file to "$out_name_cmd_line"')
+ verror('Unable to write response file to "${out_name_cmd_line}"')
}
- cmd := '"$r.full_cl_exe_path" "@$out_name_cmd_line"'
+ cmd := '"${r.full_cl_exe_path}" "@${out_name_cmd_line}"'
// It is hard to see it at first, but the quotes above ARE balanced :-| ...
// Also the double quotes at the start ARE needed.
v.show_cc(cmd, out_name_cmd_line, args)
if os.user_os() != 'windows' && !v.pref.out_name.ends_with('.c') {
- verror('Cannot build with msvc on $os.user_os()')
+ verror('Cannot build with msvc on ${os.user_os()}')
}
util.timing_start('C msvc')
res := os.execute(cmd)
@@ -401,7 +401,7 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(mod string, path string,
// println('$obj_path already built.')
return
}
- println('$obj_path not found, building it (with msvc)...')
+ println('${obj_path} not found, building it (with msvc)...')
cfile := '${path_without_o_postfix}.c'
flags := msvc_string_flags(moduleflags)
inc_dirs := flags.inc_paths.join(' ')
@@ -409,7 +409,7 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(mod string, path string,
//
mut oargs := []string{}
env_cflags := os.getenv('CFLAGS')
- mut all_cflags := '$env_cflags $v.pref.cflags'
+ mut all_cflags := '${env_cflags} ${v.pref.cflags}'
if all_cflags != ' ' {
oargs << all_cflags
}
@@ -427,22 +427,22 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(mod string, path string,
oargs << defines
oargs << msvc.include_paths()
oargs << inc_dirs
- oargs << '/c "$cfile"'
- oargs << '/Fo"$obj_path"'
+ oargs << '/c "${cfile}"'
+ oargs << '/Fo"${obj_path}"'
env_ldflags := os.getenv('LDFLAGS')
if env_ldflags != '' {
oargs << env_ldflags
}
v.dump_c_options(oargs)
str_oargs := oargs.join(' ')
- cmd := '"$msvc.full_cl_exe_path" $str_oargs'
+ cmd := '"${msvc.full_cl_exe_path}" ${str_oargs}'
// Note: the quotes above ARE balanced.
$if trace_thirdparty_obj_files ? {
- println('>>> build_thirdparty_obj_file_with_msvc cmd: $cmd')
+ println('>>> build_thirdparty_obj_file_with_msvc cmd: ${cmd}')
}
res := os.execute(cmd)
if res.exit_code != 0 {
- println('msvc: failed to build a thirdparty object; cmd: $cmd')
+ println('msvc: failed to build a thirdparty object; cmd: ${cmd}')
verror(res.output)
}
println(res.output)
@@ -471,7 +471,7 @@ pub fn msvc_string_flags(cflags []cflag.CFlag) MsvcStringFlags {
// by the compiler
if flag.name == '-l' {
if flag.value.ends_with('.dll') {
- verror('MSVC cannot link against a dll (`#flag -l $flag.value`)')
+ verror('MSVC cannot link against a dll (`#flag -l ${flag.value}`)')
}
// MSVC has no method of linking against a .dll
// TODO: we should look for .defs aswell
@@ -480,7 +480,7 @@ pub fn msvc_string_flags(cflags []cflag.CFlag) MsvcStringFlags {
} else if flag.name == '-I' {
inc_paths << flag.format()
} else if flag.name == '-D' {
- defines << '/D$flag.value'
+ defines << '/D${flag.value}'
} else if flag.name == '-L' {
lib_paths << flag.value
lib_paths << flag.value + os.path_separator + 'msvc'
@@ -514,16 +514,16 @@ pub fn msvc_string_flags(cflags []cflag.CFlag) MsvcStringFlags {
fn (r MsvcResult) include_paths() []string {
mut res := []string{cap: 4}
if r.ucrt_include_path != '' {
- res << '-I "$r.ucrt_include_path"'
+ res << '-I "${r.ucrt_include_path}"'
}
if r.vs_include_path != '' {
- res << '-I "$r.vs_include_path"'
+ res << '-I "${r.vs_include_path}"'
}
if r.um_include_path != '' {
- res << '-I "$r.um_include_path"'
+ res << '-I "${r.um_include_path}"'
}
if r.shared_include_path != '' {
- res << '-I "$r.shared_include_path"'
+ res << '-I "${r.shared_include_path}"'
}
return res
}
@@ -531,13 +531,13 @@ fn (r MsvcResult) include_paths() []string {
fn (r MsvcResult) library_paths() []string {
mut res := []string{cap: 3}
if r.ucrt_lib_path != '' {
- res << '/LIBPATH:"$r.ucrt_lib_path"'
+ res << '/LIBPATH:"${r.ucrt_lib_path}"'
}
if r.um_lib_path != '' {
- res << '/LIBPATH:"$r.um_lib_path"'
+ res << '/LIBPATH:"${r.um_lib_path}"'
}
if r.vs_lib_path != '' {
- res << '/LIBPATH:"$r.vs_lib_path"'
+ res << '/LIBPATH:"${r.vs_lib_path}"'
}
return res
}
diff --git a/vlib/v/builder/rebuilding.v b/vlib/v/builder/rebuilding.v
index fecd0571e9..c30fa4de3f 100644
--- a/vlib/v/builder/rebuilding.v
+++ b/vlib/v/builder/rebuilding.v
@@ -15,11 +15,11 @@ pub fn (mut b Builder) rebuild_modules() {
}
all_files := b.parsed_files.map(it.path)
$if trace_invalidations ? {
- eprintln('> rebuild_modules all_files: $all_files')
+ eprintln('> rebuild_modules all_files: ${all_files}')
}
invalidations := b.find_invalidated_modules_by_files(all_files)
$if trace_invalidations ? {
- eprintln('> rebuild_modules invalidations: $invalidations')
+ eprintln('> rebuild_modules invalidations: ${invalidations}')
}
if invalidations.len > 0 {
vexe := pref.vexe_path()
@@ -77,9 +77,9 @@ pub fn (mut b Builder) find_invalidated_modules_by_files(all_files []string) []s
for mm in b.mod_invalidates_mods[k] {
m[mm] = true
}
- eprintln('> module `$k` invalidates: $m.keys()')
+ eprintln('> module `${k}` invalidates: ${m.keys()}')
for fpath in v {
- eprintln(' $fpath')
+ eprintln(' ${fpath}')
}
}
}
@@ -106,12 +106,12 @@ pub fn (mut b Builder) find_invalidated_modules_by_files(all_files []string) []s
}
}
$if trace_invalidations ? {
- eprintln('invalidated_paths: $invalidated_paths')
+ eprintln('invalidated_paths: ${invalidated_paths}')
}
mut rebuild_everything := false
for cycle := 0; true; cycle++ {
$if trace_invalidations ? {
- eprintln('> cycle: $cycle | invalidated_paths: $invalidated_paths')
+ eprintln('> cycle: ${cycle} | invalidated_paths: ${invalidated_paths}')
}
mut new_invalidated_paths := map[string]int{}
for npath, _ in invalidated_paths {
@@ -136,7 +136,7 @@ pub fn (mut b Builder) find_invalidated_modules_by_files(all_files []string) []s
}
}
$if trace_invalidations ? {
- eprintln('> npath -> invalidated_mods | $npath -> $invalidated_mods')
+ eprintln('> npath -> invalidated_mods | ${npath} -> ${invalidated_mods}')
}
mpath := os.dir(npath)
invalidated_mod_paths[mpath]++
@@ -161,8 +161,8 @@ pub fn (mut b Builder) find_invalidated_modules_by_files(all_files []string) []s
}
}
$if trace_invalidations ? {
- eprintln('invalidated_mod_paths: $invalidated_mod_paths')
- eprintln('rebuild_everything: $rebuild_everything')
+ eprintln('invalidated_mod_paths: ${invalidated_mod_paths}')
+ eprintln('rebuild_everything: ${rebuild_everything}')
}
if invalidated_mod_paths.len > 0 {
impaths := invalidated_mod_paths.keys()
@@ -184,10 +184,10 @@ fn (mut b Builder) v_build_module(vexe string, imp_path string) {
vroot := os.dir(vexe)
os.chdir(vroot) or {}
boptions := b.pref.build_options.join(' ')
- rebuild_cmd := '${os.quoted_path(vexe)} $boptions build-module ${os.quoted_path(imp_path)}'
- vcache.dlog('| Builder.' + @FN, 'vexe: $vexe | imp_path: $imp_path | rebuild_cmd: $rebuild_cmd')
+ rebuild_cmd := '${os.quoted_path(vexe)} ${boptions} build-module ${os.quoted_path(imp_path)}'
+ vcache.dlog('| Builder.' + @FN, 'vexe: ${vexe} | imp_path: ${imp_path} | rebuild_cmd: ${rebuild_cmd}')
$if trace_v_build_module ? {
- eprintln('> Builder.v_build_module: $rebuild_cmd')
+ eprintln('> Builder.v_build_module: ${rebuild_cmd}')
}
os.system(rebuild_cmd)
}
@@ -195,11 +195,11 @@ fn (mut b Builder) v_build_module(vexe string, imp_path string) {
fn (mut b Builder) rebuild_cached_module(vexe string, imp_path string) string {
res := b.pref.cache_manager.mod_exists(imp_path, '.o', imp_path) or {
if b.pref.is_verbose {
- println('Cached $imp_path .o file not found... Building .o file for $imp_path')
+ println('Cached ${imp_path} .o file not found... Building .o file for ${imp_path}')
}
b.v_build_module(vexe, imp_path)
rebuilded_o := b.pref.cache_manager.mod_exists(imp_path, '.o', imp_path) or {
- panic('could not rebuild cache module for $imp_path, error: $err.msg()')
+ panic('could not rebuild cache module for ${imp_path}, error: ${err.msg()}')
}
return rebuilded_o
}
@@ -217,7 +217,7 @@ fn (mut b Builder) handle_usecache(vexe string) {
for ast_file in b.parsed_files {
if b.pref.is_test && ast_file.mod.name != 'main' {
imp_path := b.find_module_path(ast_file.mod.name, ast_file.path) or {
- verror('cannot import module "$ast_file.mod.name" (not found)')
+ verror('cannot import module "${ast_file.mod.name}" (not found)')
break
}
obj_path := b.rebuild_cached_module(vexe, imp_path)
@@ -245,7 +245,7 @@ fn (mut b Builder) handle_usecache(vexe string) {
continue
}
imp_path := b.find_module_path(imp, ast_file.path) or {
- verror('cannot import module "$imp" (not found)')
+ verror('cannot import module "${imp}" (not found)')
break
}
obj_path := b.rebuild_cached_module(vexe, imp_path)
@@ -343,17 +343,17 @@ pub fn (mut b Builder) rebuild(backend_cb FnBackend) {
mut sall_v_source_bytes := all_v_source_bytes.str()
sall_v_source_lines = util.bold('${sall_v_source_lines:10s}')
sall_v_source_bytes = util.bold('${sall_v_source_bytes:10s}')
- println(' V source code size: $sall_v_source_lines lines, $sall_v_source_bytes bytes')
+ println(' V source code size: ${sall_v_source_lines} lines, ${sall_v_source_bytes} bytes')
//
mut slines := b.stats_lines.str()
mut sbytes := b.stats_bytes.str()
slines = util.bold('${slines:10s}')
sbytes = util.bold('${sbytes:10s}')
- println('generated target code size: $slines lines, $sbytes bytes')
+ println('generated target code size: ${slines} lines, ${sbytes} bytes')
//
vlines_per_second := int(1_000_000.0 * f64(all_v_source_lines) / f64(compilation_time_micros))
svlines_per_second := util.bold(vlines_per_second.str())
- println('compilation took: $scompilation_time_ms ms, compilation speed: $svlines_per_second vlines/s')
+ println('compilation took: ${scompilation_time_ms} ms, compilation speed: ${svlines_per_second} vlines/s')
}
}
@@ -361,8 +361,8 @@ pub fn (mut b Builder) get_vtmp_filename(base_file_name string, postfix string)
vtmp := os.vtmp_dir()
mut uniq := ''
if !b.pref.reuse_tmpc {
- uniq = '.$rand.u64()'
+ uniq = '.${rand.u64()}'
}
- fname := os.file_name(os.real_path(base_file_name)) + '$uniq$postfix'
+ fname := os.file_name(os.real_path(base_file_name)) + '${uniq}${postfix}'
return os.real_path(os.join_path(vtmp, fname))
}
diff --git a/vlib/v/callgraph/callgraph.v b/vlib/v/callgraph/callgraph.v
index 48918bd121..45731324f5 100644
--- a/vlib/v/callgraph/callgraph.v
+++ b/vlib/v/callgraph/callgraph.v
@@ -11,7 +11,7 @@ pub fn show(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.File)
mut mapper := &Mapper{
pref: pref
table: table
- dg: dotgraph.new('CallGraph', 'CallGraph for $pref.path', 'green')
+ dg: dotgraph.new('CallGraph', 'CallGraph for ${pref.path}', 'green')
}
// Node14 [shape="box",label="PrivateBase",URL="$classPrivateBase.html"];
// Node15 -> Node9 [dir=back,color="midnightblue",fontsize=10,style="solid"];
@@ -73,7 +73,7 @@ fn (mut m Mapper) fn_name(fname string, receiver_type ast.Type, is_method bool)
return fname
}
rec_sym := m.table.sym(receiver_type)
- return '${rec_sym.name}.$fname'
+ return '${rec_sym.name}.${fname}'
}
fn (mut m Mapper) dot_fn_name(fname string, recv_type ast.Type, is_method bool) string {
diff --git a/vlib/v/cflag/cflags.v b/vlib/v/cflag/cflags.v
index ef4f9746b4..9bae1fd31f 100644
--- a/vlib/v/cflag/cflags.v
+++ b/vlib/v/cflag/cflags.v
@@ -17,7 +17,7 @@ pub mut:
}
pub fn (c &CFlag) str() string {
- return 'CFlag{ name: "$c.name" value: "$c.value" mod: "$c.mod" os: "$c.os" cached: "$c.cached" }'
+ return 'CFlag{ name: "${c.name}" value: "${c.value}" mod: "${c.mod}" os: "${c.os}" cached: "${c.cached}" }'
}
const fexisting_literal = r'$first_existing'
@@ -41,7 +41,7 @@ pub fn (cf &CFlag) eval() string {
continue cflag_eval_outer_loop
}
}
- panic('>> error: none of the paths $svalues exist')
+ panic('>> error: none of the paths ${svalues} exist')
continue
}
}
@@ -59,13 +59,13 @@ pub fn (cf &CFlag) format() string {
value = cf.eval()
}
if cf.name in ['-l', '-Wa', '-Wl', '-Wp'] && value.len > 0 {
- return '$cf.name$value'.trim_space()
+ return '${cf.name}${value}'.trim_space()
}
// convert to absolute path
if cf.name == '-I' || cf.name == '-L' || value.ends_with('.o') {
value = '"' + os.real_path(value) + '"'
}
- return '$cf.name $value'.trim_space()
+ return '${cf.name} ${value}'.trim_space()
}
// TODO: implement msvc specific c_options_before_target and c_options_after_target ...
@@ -122,7 +122,7 @@ pub fn (cflags []CFlag) defines_others_libs() ([]string, []string, []string) {
continue
}
if copt.ends_with('.a') {
- libs << '"$copt"'
+ libs << '"${copt}"'
continue
}
if copt.starts_with('-D') {
diff --git a/vlib/v/checker/assign.v b/vlib/v/checker/assign.v
index c87561ed9c..e1571617a5 100644
--- a/vlib/v/checker/assign.v
+++ b/vlib/v/checker/assign.v
@@ -33,7 +33,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
right_type_sym := c.table.sym(right_type)
if right_type_sym.kind == .multi_return {
if node.right.len > 1 {
- c.error('cannot use multi-value $right_type_sym.name in single-value context',
+ c.error('cannot use multi-value ${right_type_sym.name} in single-value context',
right.pos())
}
node.right_types = right_type_sym.mr_info().types
@@ -63,10 +63,10 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
// If it's a void type, it's an unknown variable, already had an error earlier.
return
}
- c.error('assignment mismatch: $node.left.len variable(s) but `${right_first.name}()` returns $right_len value(s)',
+ c.error('assignment mismatch: ${node.left.len} variable(s) but `${right_first.name}()` returns ${right_len} value(s)',
node.pos)
} else {
- c.error('assignment mismatch: $node.left.len variable(s) $right_len value(s)',
+ c.error('assignment mismatch: ${node.left.len} variable(s) ${right_len} value(s)',
node.pos)
}
return
@@ -160,7 +160,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
if mut left is ast.Ident && mut right is ast.Ident {
if !c.inside_unsafe && left_type.is_ptr() && left.is_mut() && right_type.is_ptr()
&& !right.is_mut() {
- c.error('`$right.name` is immutable, cannot have a mutable reference to an immutable object',
+ c.error('`${right.name}` is immutable, cannot have a mutable reference to an immutable object',
right.pos)
}
}
@@ -180,11 +180,11 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
type_sym := c.table.sym(obj.typ.set_nr_muls(0))
if !type_sym.is_heap() && !c.pref.translated && !c.file.is_translated {
suggestion := if type_sym.kind == .struct_ {
- 'declaring `$type_sym.name` as `[heap]`'
+ 'declaring `${type_sym.name}` as `[heap]`'
} else {
- 'wrapping the `$type_sym.name` object in a `struct` declared as `[heap]`'
+ 'wrapping the `${type_sym.name}` object in a `struct` declared as `[heap]`'
}
- c.error('`$right.name` cannot be assigned outside `unsafe` blocks as it might refer to an object stored on stack. Consider ${suggestion}.',
+ c.error('`${right.name}` cannot be assigned outside `unsafe` blocks as it might refer to an object stored on stack. Consider ${suggestion}.',
right.pos)
}
}
@@ -197,8 +197,8 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
left_sym := c.table.sym(left_type)
if left_sym.kind != .function {
c.warn(
- 'cannot assign a reference to a value (this will be an error soon) left=${c.table.type_str(left_type)} $left_type.is_ptr() ' +
- 'right=${c.table.type_str(right_type)} $right_type.is_real_pointer() ptr=$right_type.is_ptr()',
+ 'cannot assign a reference to a value (this will be an error soon) left=${c.table.type_str(left_type)} ${left_type.is_ptr()} ' +
+ 'right=${c.table.type_str(right_type)} ${right_type.is_real_pointer()} ptr=${right_type.is_ptr()}',
node.pos)
}
}
@@ -212,12 +212,12 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
c.error('cannot modify blank `_` identifier', left.pos)
}
} else if left.info !is ast.IdentVar {
- c.error('cannot assign to $left.kind `$left.name`', left.pos)
+ c.error('cannot assign to ${left.kind} `${left.name}`', left.pos)
} else {
if is_decl {
c.check_valid_snake_case(left.name, 'variable name', left.pos)
if reserved_type_names_chk.matches(left.name) {
- c.error('invalid use of reserved type `$left.name` as a variable name',
+ c.error('invalid use of reserved type `${left.name}` as a variable name',
left.pos)
}
if right is ast.Nil && !c.inside_unsafe {
@@ -227,7 +227,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
// x := nil
// println(x)
// }`
- c.error('use of untyped nil in assignment (use `unsafe` | $c.inside_unsafe)',
+ c.error('use of untyped nil in assignment (use `unsafe` | ${c.inside_unsafe})',
right.pos())
}
}
@@ -280,10 +280,10 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
}
}
if is_decl {
- full_name := '${left.mod}.$left.name'
+ full_name := '${left.mod}.${left.name}'
if obj := c.file.global_scope.find(full_name) {
if obj is ast.ConstField {
- c.warn('duplicate of a const name `$full_name`', left.pos)
+ c.warn('duplicate of a const name `${full_name}`', left.pos)
}
}
}
@@ -321,11 +321,11 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
}
}
if is_decl {
- c.error('non-name `$left` on left side of `:=`', left.pos())
+ c.error('non-name `${left}` on left side of `:=`', left.pos())
}
if node.op == .assign && (left.is_literal() || left is ast.StructInit) {
- c.error('non-name literal value `$left` on left side of `=`', left.pos())
+ c.error('non-name literal value `${left}` on left side of `=`', left.pos())
}
}
}
@@ -351,7 +351,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
&& !left.is_blank_ident() && right is ast.Ident
if old_assign_error_condition {
// Do not allow `a = b`, only `a = b.clone()`
- c.error('use `array2 $node.op.str() array1.clone()` instead of `array2 $node.op.str() array1` (or use `unsafe`)',
+ c.error('use `array2 ${node.op.str()} array1.clone()` instead of `array2 ${node.op.str()} array1` (or use `unsafe`)',
node.pos)
}
// Do not allow `a = val.array_field`, only `a = val.array_field.clone()`
@@ -364,7 +364,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
// no point to show the notice, if the old error was already shown:
if !old_assign_error_condition {
mut_str := if node.op == .decl_assign { 'mut ' } else { '' }
- c.note('use `${mut_str}array2 $node.op.str() array1.clone()` instead of `${mut_str}array2 $node.op.str() array1` (or use `unsafe`)',
+ c.note('use `${mut_str}array2 ${node.op.str()} array1.clone()` instead of `${mut_str}array2 ${node.op.str()} array1` (or use `unsafe`)',
node.pos)
}
}
@@ -405,7 +405,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
}
right_is_ptr := right_type.is_ptr() || right_sym.is_pointer()
if !right_is_ptr && node.op == .assign && right_type_unwrapped.is_number() {
- c.error('cannot assign to `$left`: ' +
+ c.error('cannot assign to `${left}`: ' +
c.expected_msg(right_type_unwrapped, left_type_unwrapped), right.pos())
}
if !right_sym.is_number() && !left_type.has_flag(.shared_f)
@@ -416,7 +416,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
rtype = rtype.deref()
}
right_name := c.table.type_to_str(rtype)
- c.error('mismatched types `$left_name` and `$right_name`', node.pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`', node.pos)
}
}
// Single side check
@@ -425,41 +425,41 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
.plus_assign, .minus_assign {
if left_type == ast.string_type {
if node.op != .plus_assign {
- c.error('operator `$node.op` not defined on left operand type `$left_sym.name`',
+ c.error('operator `${node.op}` not defined on left operand type `${left_sym.name}`',
left.pos())
}
if right_type != ast.string_type {
- c.error('invalid right operand: $left_sym.name $node.op $right_sym.name',
+ c.error('invalid right operand: ${left_sym.name} ${node.op} ${right_sym.name}',
right.pos())
}
} else if !left_sym.is_number()
&& left_sym.kind !in [.byteptr, .charptr, .struct_, .alias] {
- c.error('operator `$node.op` not defined on left operand type `$left_sym.name`',
+ c.error('operator `${node.op}` not defined on left operand type `${left_sym.name}`',
left.pos())
} else if !right_sym.is_number()
&& left_sym.kind !in [.byteptr, .charptr, .struct_, .alias] {
- c.error('invalid right operand: $left_sym.name $node.op $right_sym.name',
+ c.error('invalid right operand: ${left_sym.name} ${node.op} ${right_sym.name}',
right.pos())
}
}
.mult_assign, .div_assign {
if !left_sym.is_number() && !c.table.final_sym(left_type_unwrapped).is_int()
&& left_sym.kind !in [.struct_, .alias] {
- c.error('operator $node.op.str() not defined on left operand type `$left_sym.name`',
+ c.error('operator ${node.op.str()} not defined on left operand type `${left_sym.name}`',
left.pos())
} else if !right_sym.is_number() && !c.table.final_sym(left_type_unwrapped).is_int()
&& left_sym.kind !in [.struct_, .alias] {
- c.error('operator $node.op.str() not defined on right operand type `$right_sym.name`',
+ c.error('operator ${node.op.str()} not defined on right operand type `${right_sym.name}`',
right.pos())
}
}
.and_assign, .or_assign, .xor_assign, .mod_assign, .left_shift_assign,
.right_shift_assign {
if !left_sym.is_int() && !c.table.final_sym(left_type_unwrapped).is_int() {
- c.error('operator $node.op.str() not defined on left operand type `$left_sym.name`',
+ c.error('operator ${node.op.str()} not defined on left operand type `${left_sym.name}`',
left.pos())
} else if !right_sym.is_int() && !c.table.final_sym(right_type_unwrapped).is_int() {
- c.error('operator $node.op.str() not defined on right operand type `$right_sym.name`',
+ c.error('operator ${node.op.str()} not defined on right operand type `${right_sym.name}`',
right.pos())
}
}
@@ -527,7 +527,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
right_name := c.table.type_to_str(right_type_unwrapped)
parent_sym := c.table.final_sym(left_type_unwrapped)
if left_sym.kind == .alias && right_sym.kind != .alias {
- c.error('mismatched types `$left_name` and `$right_name`', node.pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`', node.pos)
}
extracted_op := match node.op {
.plus_assign { '+' }
@@ -542,19 +542,19 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
}
if method := left_sym.find_method(extracted_op) {
if method.return_type != left_type_unwrapped {
- c.error('operator `$extracted_op` must return `$left_name` to be used as an assignment operator',
+ c.error('operator `${extracted_op}` must return `${left_name}` to be used as an assignment operator',
node.pos)
}
} else {
if parent_sym.is_primitive() {
- c.error('cannot use operator methods on type alias for `$parent_sym.name`',
+ c.error('cannot use operator methods on type alias for `${parent_sym.name}`',
node.pos)
}
if left_name == right_name {
- c.error('undefined operation `$left_name` $extracted_op `$right_name`',
+ c.error('undefined operation `${left_name}` ${extracted_op} `${right_name}`',
node.pos)
} else {
- c.error('mismatched types `$left_name` and `$right_name`', node.pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`', node.pos)
}
}
}
@@ -571,7 +571,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
node.pos)
}
} else {
- c.error('cannot assign to `$left`: $err.msg()', right.pos())
+ c.error('cannot assign to `${left}`: ${err.msg()}', right.pos())
}
}
}
@@ -607,7 +607,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
right_type0 = v.typ
}
if !c.inside_unsafe && assigned_var.is_mut() && !right_node.right.is_mut() {
- c.error('`$right_node.right.name` is immutable, cannot have a mutable reference to it',
+ c.error('`${right_node.right.name}` is immutable, cannot have a mutable reference to it',
right_node.pos)
}
}
@@ -618,7 +618,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
if right_sym.kind == .chan {
chan_info := right_sym.chan_info()
if chan_info.elem_type.is_ptr() && !chan_info.is_mut {
- c.error('cannot have a mutable reference to object from `$right_sym.name`',
+ c.error('cannot have a mutable reference to object from `${right_sym.name}`',
right_node.pos)
}
}
diff --git a/vlib/v/checker/check_types.v b/vlib/v/checker/check_types.v
index cbfedd8b7a..98616bf553 100644
--- a/vlib/v/checker/check_types.v
+++ b/vlib/v/checker/check_types.v
@@ -272,20 +272,20 @@ pub fn (mut c Checker) check_expected_call_arg(got ast.Type, expected_ ast.Type,
|| (!got.is_ptr() && !expected.is_ptr()
&& got_typ_sym.name != expected_typ_sym.name) {
got_typ_str, expected_typ_str := c.get_string_names_of(got, expected)
- return error('cannot use `$got_typ_str` as `$expected_typ_str`')
+ return error('cannot use `${got_typ_str}` as `${expected_typ_str}`')
}
return
}
if got == ast.void_type {
- return error('`$arg.expr` (no value) used as value')
+ return error('`${arg.expr}` (no value) used as value')
}
got_typ_str, expected_typ_str := c.get_string_names_of(got, expected)
- return error('cannot use `$got_typ_str` as `$expected_typ_str`')
+ return error('cannot use `${got_typ_str}` as `${expected_typ_str}`')
}
if got != ast.void_type {
got_typ_str, expected_typ_str := c.get_string_names_of(got, expected)
- return error('cannot use `$got_typ_str` as `$expected_typ_str`')
+ return error('cannot use `${got_typ_str}` as `${expected_typ_str}`')
}
}
@@ -419,10 +419,10 @@ pub fn (mut c Checker) check_matching_function_symbols(got_type_sym &ast.TypeSym
exp_arg_pointedness := if exp_arg_is_ptr { 'a pointer' } else { 'NOT a pointer' }
got_arg_pointedness := if got_arg_is_ptr { 'a pointer' } else { 'NOT a pointer' }
if exp_fn.name.len == 0 {
- c.add_error_detail('expected argument ${i + 1} to be $exp_arg_pointedness, but the passed argument ${
- i + 1} is $got_arg_pointedness')
+ c.add_error_detail('expected argument ${i + 1} to be ${exp_arg_pointedness}, but the passed argument ${
+ i + 1} is ${got_arg_pointedness}')
} else {
- c.add_error_detail('`$exp_fn.name`\'s expected argument `$exp_arg.name` to be $exp_arg_pointedness, but the passed argument `$got_arg.name` is $got_arg_pointedness')
+ c.add_error_detail('`${exp_fn.name}`\'s expected argument `${exp_arg.name}` to be ${exp_arg_pointedness}, but the passed argument `${got_arg.name}` is ${got_arg_pointedness}')
}
return false
} else if exp_arg_is_ptr && got_arg_is_ptr {
@@ -450,13 +450,13 @@ fn (mut c Checker) check_shift(mut node ast.InfixExpr, left_type_ ast.Type, righ
// allow `bool << 2` in translated C code
return ast.int_type
}
- c.error('invalid operation: shift on type `$left_sym.name`', node.left.pos())
+ c.error('invalid operation: shift on type `${left_sym.name}`', node.left.pos())
return ast.void_type
}
if !right_type.is_int() && !c.pref.translated {
left_sym := c.table.sym(left_type)
right_sym := c.table.sym(right_type)
- c.error('cannot shift non-integer type `$right_sym.name` into type `$left_sym.name`',
+ c.error('cannot shift non-integer type `${right_sym.name}` into type `${left_sym.name}`',
node.right.pos())
return ast.void_type
}
@@ -499,7 +499,7 @@ fn (mut c Checker) check_shift(mut node ast.InfixExpr, left_type_ ast.Type, righ
left_type_final := ast.Type(left_sym_final.idx)
if node.op == .left_shift && left_type_final.is_signed() && !(c.inside_unsafe
&& c.is_generated) {
- c.note('shifting a value from a signed type `$left_sym_final.name` can change the sign',
+ c.note('shifting a value from a signed type `${left_sym_final.name}` can change the sign',
node.left.pos())
}
if node.ct_right_value_evaled {
@@ -524,7 +524,7 @@ fn (mut c Checker) check_shift(mut node ast.InfixExpr, left_type_ ast.Type, righ
else { 64 }
}
if ival > moffset && !c.pref.translated && !c.file.is_translated {
- c.error('shift count for type `$left_sym_final.name` too large (maximum: $moffset bits)',
+ c.error('shift count for type `${left_sym_final.name}` too large (maximum: ${moffset} bits)',
node.right.pos())
return left_type
}
@@ -543,7 +543,7 @@ fn (mut c Checker) check_shift(mut node ast.InfixExpr, left_type_ ast.Type, righ
}
}
else {
- c.error('unknown shift operator: $node.op', node.pos)
+ c.error('unknown shift operator: ${node.op}', node.pos)
return left_type
}
}
@@ -635,7 +635,7 @@ pub fn (mut c Checker) check_expected(got ast.Type, expected ast.Type) ! {
fn (c &Checker) expected_msg(got ast.Type, expected ast.Type) string {
exps := c.table.type_to_str(expected)
gots := c.table.type_to_str(got)
- return 'expected `$exps`, not `$gots`'
+ return 'expected `${exps}`, not `${gots}`'
}
pub fn (mut c Checker) symmetric_check(left ast.Type, right ast.Type) bool {
@@ -832,13 +832,13 @@ pub fn (mut c Checker) infer_fn_generic_types(func ast.Fn, mut node ast.CallExpr
}
}
if typ == ast.void_type {
- c.error('could not infer generic type `$gt_name` in call to `$func.name`',
+ c.error('could not infer generic type `${gt_name}` in call to `${func.name}`',
node.pos)
return
}
if c.pref.is_verbose {
s := c.table.type_to_str(typ)
- println('inferred `$func.name<$s>`')
+ println('inferred `${func.name}<${s}>`')
}
inferred_types << c.unwrap_generic(typ)
node.concrete_types << typ
diff --git a/vlib/v/checker/checker.v b/vlib/v/checker/checker.v
index 21a19c46c2..aec7e8e830 100644
--- a/vlib/v/checker/checker.v
+++ b/vlib/v/checker/checker.v
@@ -167,13 +167,13 @@ pub fn (mut c Checker) check(ast_file_ &ast.File) {
for sym in ast_import.syms {
full_name := ast_import.mod + '.' + sym.name
if full_name in c.const_names {
- c.error('cannot selectively import constant `$sym.name` from `$ast_import.mod`, import `$ast_import.mod` and use `$full_name` instead',
+ c.error('cannot selectively import constant `${sym.name}` from `${ast_import.mod}`, import `${ast_import.mod}` and use `${full_name}` instead',
sym.pos)
}
}
for j in 0 .. i {
if ast_import.mod == ast_file.imports[j].mod {
- c.error('`$ast_import.mod` was already imported on line ${
+ c.error('`${ast_import.mod}` was already imported on line ${
ast_file.imports[j].mod_pos.line_nr + 1}', ast_import.mod_pos)
}
}
@@ -221,7 +221,7 @@ pub fn (mut c Checker) check_scope_vars(sc &ast.Scope) {
match obj {
ast.Var {
if !obj.is_used && obj.name[0] != `_` {
- c.warn('unused variable: `$obj.name`', obj.pos)
+ c.warn('unused variable: `${obj.name}`', obj.pos)
}
if obj.is_mut && !obj.is_changed && !c.is_builtin_mod && obj.name != 'it' {
// if obj.is_mut && !obj.is_changed && !c.is_builtin { //TODO C error bad field not checked
@@ -262,7 +262,7 @@ pub fn (mut c Checker) check_files(ast_files []&ast.File) {
mut files_from_main_module := []&ast.File{}
for i in 0 .. ast_files.len {
mut file := ast_files[i]
- c.timers.start('checker_check $file.path')
+ c.timers.start('checker_check ${file.path}')
c.check(file)
if file.mod.name == 'main' {
files_from_main_module << file
@@ -271,7 +271,7 @@ pub fn (mut c Checker) check_files(ast_files []&ast.File) {
has_main_fn = true
}
}
- c.timers.show('checker_check $file.path')
+ c.timers.show('checker_check ${file.path}')
}
if has_main_mod_file && !has_main_fn && files_from_main_module.len > 0 {
if c.pref.is_script && !c.pref.is_test {
@@ -300,7 +300,7 @@ pub fn (mut c Checker) check_files(ast_files []&ast.File) {
mut post_process_generic_fns_iterations := 0
for {
$if trace_post_process_generic_fns_loop ? {
- eprintln('>>>>>>>>> recheck_generic_fns loop iteration: $post_process_generic_fns_iterations')
+ eprintln('>>>>>>>>> recheck_generic_fns loop iteration: ${post_process_generic_fns_iterations}')
}
for file in ast_files {
if file.generic_fns.len > 0 {
@@ -319,7 +319,7 @@ pub fn (mut c Checker) check_files(ast_files []&ast.File) {
post_process_generic_fns_iterations++
}
$if trace_post_process_generic_fns_loop ? {
- eprintln('>>>>>>>>> recheck_generic_fns loop done, iteration: $post_process_generic_fns_iterations')
+ eprintln('>>>>>>>>> recheck_generic_fns loop done, iteration: ${post_process_generic_fns_iterations}')
}
// restore the original c.file && c.mod after post processing
c.change_current_file(last_file)
@@ -397,10 +397,10 @@ fn (mut c Checker) check_valid_snake_case(name string, identifier string, pos to
return
}
if !c.pref.is_vweb && name.len > 0 && (name[0] == `_` || name.contains('._')) {
- c.error('$identifier `$name` cannot start with `_`', pos)
+ c.error('${identifier} `${name}` cannot start with `_`', pos)
}
if !c.pref.experimental && util.contains_capital(name) {
- c.error('$identifier `$name` cannot contain uppercase letters, use snake_case instead',
+ c.error('${identifier} `${name}` cannot contain uppercase letters, use snake_case instead',
pos)
}
}
@@ -413,7 +413,7 @@ fn stripped_name(name string) string {
fn (mut c Checker) check_valid_pascal_case(name string, identifier string, pos token.Pos) {
sname := stripped_name(name)
if sname.len > 0 && !sname[0].is_capital() && !c.pref.translated && !c.file.is_translated {
- c.error('$identifier `$name` must begin with capital letter', pos)
+ c.error('${identifier} `${name}` must begin with capital letter', pos)
}
}
@@ -433,16 +433,16 @@ pub fn (mut c Checker) alias_type_decl(node ast.AliasTypeDecl) {
c.ensure_type_exists(node.parent_type, node.type_pos) or { return }
mut typ_sym := c.table.sym(node.parent_type)
if typ_sym.kind in [.placeholder, .int_literal, .float_literal] {
- c.error('unknown type `$typ_sym.name`', node.type_pos)
+ c.error('unknown type `${typ_sym.name}`', node.type_pos)
} else if typ_sym.kind == .alias {
orig_sym := c.table.sym((typ_sym.info as ast.Alias).parent_type)
- c.error('type `$typ_sym.str()` is an alias, use the original alias type `$orig_sym.name` instead',
+ c.error('type `${typ_sym.str()}` is an alias, use the original alias type `${orig_sym.name}` instead',
node.type_pos)
} else if typ_sym.kind == .chan {
c.error('aliases of `chan` types are not allowed.', node.type_pos)
} else if typ_sym.kind == .function {
orig_sym := c.table.type_to_str(node.parent_type)
- c.error('type `$typ_sym.str()` is an alias, use the original alias type `$orig_sym` instead',
+ c.error('type `${typ_sym.str()}` is an alias, use the original alias type `${orig_sym}` instead',
node.type_pos)
} else if typ_sym.kind == .struct_ {
if mut typ_sym.info is ast.Struct {
@@ -450,7 +450,7 @@ pub fn (mut c Checker) alias_type_decl(node ast.AliasTypeDecl) {
for ct in typ_sym.info.concrete_types {
ct_sym := c.table.sym(ct)
if ct_sym.kind == .placeholder {
- c.error('unknown type `$ct_sym.name`', node.type_pos)
+ c.error('unknown type `${ct_sym.name}`', node.type_pos)
}
}
}
@@ -465,13 +465,13 @@ pub fn (mut c Checker) fn_type_decl(node ast.FnTypeDecl) {
c.ensure_type_exists(fn_info.return_type, fn_info.return_type_pos) or {}
ret_sym := c.table.sym(fn_info.return_type)
if ret_sym.kind == .placeholder {
- c.error('unknown type `$ret_sym.name`', fn_info.return_type_pos)
+ c.error('unknown type `${ret_sym.name}`', fn_info.return_type_pos)
}
for arg in fn_info.params {
c.ensure_type_exists(arg.typ, arg.type_pos) or { return }
arg_sym := c.table.sym(arg.typ)
if arg_sym.kind == .placeholder {
- c.error('unknown type `$arg_sym.name`', arg.type_pos)
+ c.error('unknown type `${arg_sym.name}`', arg.type_pos)
}
}
}
@@ -486,10 +486,10 @@ pub fn (mut c Checker) sum_type_decl(node ast.SumTypeDecl) {
c.ensure_type_exists(variant.typ, variant.pos) or {}
mut sym := c.table.sym(variant.typ)
if sym.name in names_used {
- c.error('sum type $node.name cannot hold the type `$sym.name` more than once',
+ c.error('sum type ${node.name} cannot hold the type `${sym.name}` more than once',
variant.pos)
} else if sym.kind in [.placeholder, .int_literal, .float_literal] {
- c.error('unknown type `$sym.name`', variant.pos)
+ c.error('unknown type `${sym.name}`', variant.pos)
} else if sym.kind == .interface_ && sym.language != .js {
c.error('sum type cannot hold an interface', variant.pos)
} else if sym.kind == .struct_ && sym.language == .js {
@@ -497,20 +497,20 @@ pub fn (mut c Checker) sum_type_decl(node ast.SumTypeDecl) {
} else if mut sym.info is ast.Struct {
if sym.info.is_generic {
if !variant.typ.has_flag(.generic) {
- c.error('generic struct `$sym.name` must specify generic type names, e.g. Foo',
+ c.error('generic struct `${sym.name}` must specify generic type names, e.g. Foo',
variant.pos)
}
if node.generic_types.len == 0 {
- c.error('generic sumtype `$node.name` must specify generic type names, e.g. Foo',
+ c.error('generic sumtype `${node.name}` must specify generic type names, e.g. Foo',
node.name_pos)
} else {
for typ in sym.info.generic_types {
if typ !in node.generic_types {
sumtype_type_names := node.generic_types.map(c.table.type_to_str(it)).join(', ')
- generic_sumtype_name := '$node.name<$sumtype_type_names>'
+ generic_sumtype_name := '${node.name}<${sumtype_type_names}>'
variant_type_names := sym.info.generic_types.map(c.table.type_to_str(it)).join(', ')
- generic_variant_name := '$sym.name<$variant_type_names>'
- c.error('generic type name `${c.table.sym(typ).name}` of generic struct `$generic_variant_name` is not mentioned in sumtype `$generic_sumtype_name`',
+ generic_variant_name := '${sym.name}<${variant_type_names}>'
+ c.error('generic type name `${c.table.sym(typ).name}` of generic struct `${generic_variant_name}` is not mentioned in sumtype `${generic_sumtype_name}`',
variant.pos)
}
}
@@ -518,12 +518,12 @@ pub fn (mut c Checker) sum_type_decl(node ast.SumTypeDecl) {
}
} else if sym.info is ast.FnType {
func := (sym.info as ast.FnType).func
- if c.table.sym(func.return_type).name.ends_with('.$node.name') {
- c.error('sum type `$node.name` cannot be defined recursively', variant.pos)
+ if c.table.sym(func.return_type).name.ends_with('.${node.name}') {
+ c.error('sum type `${node.name}` cannot be defined recursively', variant.pos)
}
for param in func.params {
- if c.table.sym(param.typ).name.ends_with('.$node.name') {
- c.error('sum type `$node.name` cannot be defined recursively', variant.pos)
+ if c.table.sym(param.typ).name.ends_with('.${node.name}') {
+ c.error('sum type `${node.name}` cannot be defined recursively', variant.pos)
}
}
}
@@ -538,7 +538,7 @@ pub fn (mut c Checker) sum_type_decl(node ast.SumTypeDecl) {
pub fn (mut c Checker) expand_iface_embeds(idecl &ast.InterfaceDecl, level int, iface_embeds []ast.InterfaceEmbedding) []ast.InterfaceEmbedding {
// eprintln('> expand_iface_embeds: idecl.name: $idecl.name | level: $level | iface_embeds.len: $iface_embeds.len')
if level > checker.iface_level_cutoff_limit {
- c.error('too many interface embedding levels: $level, for interface `$idecl.name`',
+ c.error('too many interface embedding levels: ${level}, for interface `${idecl.name}`',
idecl.pos)
return []
}
@@ -586,7 +586,7 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
if mut expr.obj is ast.Var {
if !expr.obj.is_mut && !c.pref.translated && !c.file.is_translated
&& !c.inside_unsafe {
- c.error('`$expr.name` is immutable, declare it with `mut` to make it mutable',
+ c.error('`${expr.name}` is immutable, declare it with `mut` to make it mutable',
expr.pos)
}
expr.obj.is_changed = true
@@ -594,10 +594,10 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
if expr.name !in c.locked_names {
if c.locked_names.len > 0 || c.rlocked_names.len > 0 {
if expr.name in c.rlocked_names {
- c.error('$expr.name has an `rlock` but needs a `lock`',
+ c.error('${expr.name} has an `rlock` but needs a `lock`',
expr.pos)
} else {
- c.error('$expr.name must be added to the `lock` list above',
+ c.error('${expr.name} must be added to the `lock` list above',
expr.pos)
}
}
@@ -609,7 +609,7 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
if !c.inside_unsafe && !c.pref.translated {
// TODO fix this in c2v, do not allow modification of all consts
// in translated code
- c.error('cannot modify constant `$expr.name`', expr.pos)
+ c.error('cannot modify constant `${expr.name}`', expr.pos)
}
}
}
@@ -633,7 +633,7 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
else {}
}
if elem_type.has_flag(.shared_f) {
- c.error('you have to create a handle and `lock` it to modify `shared` $kind element',
+ c.error('you have to create a handle and `lock` it to modify `shared` ${kind} element',
expr.left.pos().extend(expr.pos))
}
to_lock, pos = c.fail_if_immutable(expr.left)
@@ -663,18 +663,18 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
}
if !has_field {
type_str := c.table.type_to_str(expr.expr_type)
- c.error('unknown field `${type_str}.$expr.field_name`', expr.pos)
+ c.error('unknown field `${type_str}.${expr.field_name}`', expr.pos)
return '', expr.pos
}
if field_info.typ.has_flag(.shared_f) {
- expr_name := '${expr.expr}.$expr.field_name'
+ expr_name := '${expr.expr}.${expr.field_name}'
if expr_name !in c.locked_names {
if c.locked_names.len > 0 || c.rlocked_names.len > 0 {
if expr_name in c.rlocked_names {
- c.error('$expr_name has an `rlock` but needs a `lock`',
+ c.error('${expr_name} has an `rlock` but needs a `lock`',
expr.pos)
} else {
- c.error('$expr_name must be added to the `lock` list above',
+ c.error('${expr_name} must be added to the `lock` list above',
expr.pos)
}
return '', expr.pos
@@ -685,7 +685,7 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
} else {
if !field_info.is_mut && !c.pref.translated && !c.file.is_translated {
type_str := c.table.type_to_str(expr.expr_type)
- c.error('field `$expr.field_name` of struct `$type_str` is immutable',
+ c.error('field `${expr.field_name}` of struct `${type_str}` is immutable',
expr.pos)
}
to_lock, pos = c.fail_if_immutable(expr.expr)
@@ -699,12 +699,12 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
interface_info := typ_sym.info as ast.Interface
mut field_info := interface_info.find_field(expr.field_name) or {
type_str := c.table.type_to_str(expr.expr_type)
- c.error('unknown field `${type_str}.$expr.field_name`', expr.pos)
+ c.error('unknown field `${type_str}.${expr.field_name}`', expr.pos)
return '', expr.pos
}
if !field_info.is_mut {
type_str := c.table.type_to_str(expr.expr_type)
- c.error('field `$expr.field_name` of interface `$type_str` is immutable',
+ c.error('field `${expr.field_name}` of interface `${type_str}` is immutable',
expr.pos)
return '', expr.pos
}
@@ -714,12 +714,12 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
sumtype_info := typ_sym.info as ast.SumType
mut field_info := sumtype_info.find_field(expr.field_name) or {
type_str := c.table.type_to_str(expr.expr_type)
- c.error('unknown field `${type_str}.$expr.field_name`', expr.pos)
+ c.error('unknown field `${type_str}.${expr.field_name}`', expr.pos)
return '', expr.pos
}
if !field_info.is_mut {
type_str := c.table.type_to_str(expr.expr_type)
- c.error('field `$expr.field_name` of sumtype `$type_str` is immutable',
+ c.error('field `${expr.field_name}` of sumtype `${type_str}` is immutable',
expr.pos)
return '', expr.pos
}
@@ -729,7 +729,7 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
// should only happen in `builtin` and unsafe blocks
inside_builtin := c.file.mod.name == 'builtin'
if !inside_builtin && !c.inside_unsafe {
- c.error('`$typ_sym.kind` can not be modified', expr.pos)
+ c.error('`${typ_sym.kind}` can not be modified', expr.pos)
return '', expr.pos
}
}
@@ -737,7 +737,7 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
c.fail_if_immutable(expr.expr)
}
else {
- c.error('unexpected symbol `$typ_sym.kind`', expr.pos)
+ c.error('unexpected symbol `${typ_sym.kind}`', expr.pos)
return '', expr.pos
}
}
@@ -764,13 +764,13 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
}
else {
if !expr.is_pure_literal() {
- c.error('unexpected expression `$expr.type_name()`', expr.pos())
+ c.error('unexpected expression `${expr.type_name()}`', expr.pos())
return '', expr.pos()
}
}
}
if explicit_lock_needed {
- c.error('`$to_lock` is `shared` and needs explicit lock for `$expr.type_name()`',
+ c.error('`${to_lock}` is `shared` and needs explicit lock for `${expr.type_name()}`',
pos)
to_lock = ''
}
@@ -782,7 +782,7 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
return true
}
$if debug_interface_type_implements ? {
- eprintln('> type_implements typ: $typ.debug() (`${c.table.type_to_str(typ)}`) | inter_typ: $interface_type.debug() (`${c.table.type_to_str(interface_type)}`)')
+ eprintln('> type_implements typ: ${typ.debug()} (`${c.table.type_to_str(typ)}`) | inter_typ: ${interface_type.debug()} (`${c.table.type_to_str(interface_type)}`)')
}
utyp := c.unwrap_generic(typ)
typ_sym := c.table.sym(utyp)
@@ -836,7 +836,7 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
styp := c.table.type_to_str(utyp)
if typ_sym.kind == .interface_ && inter_sym.kind == .interface_ && !styp.starts_with('JS.')
&& !inter_sym.name.starts_with('JS.') {
- c.error('cannot implement interface `$inter_sym.name` with a different interface `$styp`',
+ c.error('cannot implement interface `${inter_sym.name}` with a different interface `${styp}`',
pos)
}
imethods := if inter_sym.kind == .interface_ {
@@ -860,7 +860,7 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
// <<
typ_sym.find_method_with_generic_parent(imethod.name) or {
- c.error("`$styp` doesn't implement method `$imethod.name` of interface `$inter_sym.name`",
+ c.error("`${styp}` doesn't implement method `${imethod.name}` of interface `${inter_sym.name}`",
pos)
continue
}
@@ -869,9 +869,9 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
if msg.len > 0 {
sig := c.table.fn_signature(imethod, skip_receiver: false)
typ_sig := c.table.fn_signature(method, skip_receiver: false)
- c.add_error_detail('$inter_sym.name has `$sig`')
- c.add_error_detail(' $typ_sym.name has `$typ_sig`')
- c.error('`$styp` incorrectly implements method `$imethod.name` of interface `$inter_sym.name`: $msg',
+ c.add_error_detail('${inter_sym.name} has `${sig}`')
+ c.add_error_detail(' ${typ_sym.name} has `${typ_sig}`')
+ c.error('`${styp}` incorrectly implements method `${imethod.name}` of interface `${inter_sym.name}`: ${msg}',
pos)
return false
}
@@ -884,11 +884,11 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
if ifield.typ != field.typ {
exp := c.table.type_to_str(ifield.typ)
got := c.table.type_to_str(field.typ)
- c.error('`$styp` incorrectly implements field `$ifield.name` of interface `$inter_sym.name`, expected `$exp`, got `$got`',
+ c.error('`${styp}` incorrectly implements field `${ifield.name}` of interface `${inter_sym.name}`, expected `${exp}`, got `${got}`',
pos)
return false
} else if ifield.is_mut && !(field.is_mut || field.is_global) {
- c.error('`$styp` incorrectly implements interface `$inter_sym.name`, field `$ifield.name` must be mutable',
+ c.error('`${styp}` incorrectly implements interface `${inter_sym.name}`, field `${ifield.name}` must be mutable',
pos)
return false
}
@@ -903,7 +903,7 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
// do nothing, necessary warnings are already printed
} else {
// <<
- c.error("`$styp` doesn't implement field `$ifield.name` of interface `$inter_sym.name`",
+ c.error("`${styp}` doesn't implement field `${ifield.name}` of interface `${inter_sym.name}`",
pos)
}
}
@@ -930,10 +930,10 @@ pub fn (mut c Checker) check_expr_opt_call(expr ast.Expr, ret_type ast.Type) ast
return_modifier := if expr.return_type.has_flag(.optional) { '?' } else { '!' }
if expr.or_block.kind == .absent {
if c.inside_defer {
- c.error('${expr.name}() returns $return_modifier_kind, so it should have an `or {}` block at the end',
+ c.error('${expr.name}() returns ${return_modifier_kind}, so it should have an `or {}` block at the end',
expr.pos)
} else {
- c.error('${expr.name}() returns $return_modifier_kind, so it should have either an `or {}` block, or `$return_modifier` at the end',
+ c.error('${expr.name}() returns ${return_modifier_kind}, so it should have either an `or {}` block, or `${return_modifier}` at the end',
expr.pos)
}
} else {
@@ -941,13 +941,13 @@ pub fn (mut c Checker) check_expr_opt_call(expr ast.Expr, ret_type ast.Type) ast
}
return ret_type.clear_flag(.optional).clear_flag(.result)
} else if expr.or_block.kind == .block {
- c.error('unexpected `or` block, the function `$expr.name` does neither return an optional nor a result',
+ c.error('unexpected `or` block, the function `${expr.name}` does neither return an optional nor a result',
expr.or_block.pos)
} else if expr.or_block.kind == .propagate_option {
- c.error('unexpected `?`, the function `$expr.name` does not return an optional',
+ c.error('unexpected `?`, the function `${expr.name}` does not return an optional',
expr.or_block.pos)
} else if expr.or_block.kind == .propagate_result {
- c.error('unexpected `!`, the function `$expr.name` does not return a result',
+ c.error('unexpected `!`, the function `${expr.name}` does not return a result',
expr.or_block.pos)
}
} else if expr is ast.IndexExpr {
@@ -963,7 +963,7 @@ pub fn (mut c Checker) check_or_expr(node ast.OrExpr, ret_type ast.Type, expr_re
if c.table.cur_fn != unsafe { nil } && !c.table.cur_fn.return_type.has_flag(.optional)
&& !c.table.cur_fn.is_main && !c.table.cur_fn.is_test && !c.inside_const {
c.add_instruction_for_optional_type()
- c.error('to propagate the call, `$c.table.cur_fn.name` must return an optional type',
+ c.error('to propagate the call, `${c.table.cur_fn.name}` must return an optional type',
node.pos)
}
if !expr_return_type.has_flag(.optional) {
@@ -981,7 +981,7 @@ pub fn (mut c Checker) check_or_expr(node ast.OrExpr, ret_type ast.Type, expr_re
if c.table.cur_fn != unsafe { nil } && !c.table.cur_fn.return_type.has_flag(.result)
&& !c.table.cur_fn.is_main && !c.table.cur_fn.is_test && !c.inside_const {
c.add_instruction_for_result_type()
- c.error('to propagate the call, `$c.table.cur_fn.name` must return a result type',
+ c.error('to propagate the call, `${c.table.cur_fn.name}` must return a result type',
node.pos)
}
if !expr_return_type.has_flag(.result) {
@@ -1030,7 +1030,7 @@ fn (mut c Checker) check_or_last_stmt(stmt ast.Stmt, ret_type ast.Type, expr_ret
return
}
expected_type_name := c.table.type_to_str(ret_type.clear_flag(.optional).clear_flag(.result))
- c.error('`or` block must provide a default value of type `$expected_type_name`, or return/continue/break or call a [noreturn] function like panic(err) or exit(1)',
+ c.error('`or` block must provide a default value of type `${expected_type_name}`, or return/continue/break or call a [noreturn] function like panic(err) or exit(1)',
stmt.expr.pos())
} else {
if ret_type.is_ptr() && last_stmt_typ.is_pointer()
@@ -1039,7 +1039,7 @@ fn (mut c Checker) check_or_last_stmt(stmt ast.Stmt, ret_type ast.Type, expr_ret
}
type_name := c.table.type_to_str(last_stmt_typ)
expected_type_name := c.table.type_to_str(ret_type.clear_flag(.optional).clear_flag(.result))
- c.error('wrong return type `$type_name` in the `or {}` block, expected `$expected_type_name`',
+ c.error('wrong return type `${type_name}` in the `or {}` block, expected `${expected_type_name}`',
stmt.expr.pos())
}
}
@@ -1053,7 +1053,7 @@ fn (mut c Checker) check_or_last_stmt(stmt ast.Stmt, ret_type ast.Type, expr_ret
ast.Return {}
else {
expected_type_name := c.table.type_to_str(ret_type.clear_flag(.optional).clear_flag(.result))
- c.error('last statement in the `or {}` block should be an expression of type `$expected_type_name` or exit parent scope',
+ c.error('last statement in the `or {}` block should be an expression of type `${expected_type_name}` or exit parent scope',
stmt.pos)
}
}
@@ -1086,7 +1086,7 @@ fn (mut c Checker) check_or_last_stmt(stmt ast.Stmt, ret_type ast.Type, expr_ret
// opt_returning_string() or { ... 123 }
type_name := c.table.type_to_str(stmt.typ)
expr_return_type_name := c.table.type_to_str(expr_return_type)
- c.error('the default expression type in the `or` block should be `$expr_return_type_name`, instead you gave a value of type `$type_name`',
+ c.error('the default expression type in the `or` block should be `${expr_return_type_name}`, instead you gave a value of type `${type_name}`',
stmt.expr.pos())
}
}
@@ -1099,7 +1099,7 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
using_new_err_struct_save := c.using_new_err_struct
// TODO remove; this avoids a breaking change in syntax
- if '$node.expr' == 'err' {
+ if '${node.expr}' == 'err' {
c.using_new_err_struct = true
}
@@ -1136,7 +1136,8 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
} else if node.field_name == 'idx' {
return ast.int_type
}
- c.error('invalid field `.$node.field_name` for type `$node.expr`', node.pos)
+ c.error('invalid field `.${node.field_name}` for type `${node.expr}`',
+ node.pos)
return ast.string_type
}
}
@@ -1157,7 +1158,7 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
if typ == ast.void_type_idx {
// This means that the field has an undefined type.
// This error was handled before.
- c.error('`$node.expr` does not return a value', node.pos)
+ c.error('`${node.expr}` does not return a value', node.pos)
node.expr_type = ast.void_type
return ast.void_type
}
@@ -1186,7 +1187,7 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
return ast.u32_type
}
}
- mut unknown_field_msg := 'type `$sym.name` has no field named `$field_name`'
+ mut unknown_field_msg := 'type `${sym.name}` has no field named `${field_name}`'
mut has_field := false
mut field := ast.StructField{}
if field_name.len > 0 && field_name[0].is_capital() && sym.info is ast.Struct
@@ -1253,10 +1254,10 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
if sym.idx == ast.error_type_idx && !c.is_just_builtin_mod
&& (field_name == 'msg' || field_name == 'code') {
method := c.table.find_method(sym, field_name) or {
- c.error('invalid `IError` interface implementation: $err', node.pos)
+ c.error('invalid `IError` interface implementation: ${err}', node.pos)
return ast.void_type
}
- c.note('the `.$field_name` field on `IError` is deprecated, and will be removed after 2022-06-01, use `.${field_name}()` instead.',
+ c.note('the `.${field_name}` field on `IError` is deprecated, and will be removed after 2022-06-01, use `.${field_name}()` instead.',
node.pos)
return method.return_type
}
@@ -1266,7 +1267,7 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
is_used_outside := sym.mod != c.mod
if is_used_outside && !field.is_pub && sym.language != .c {
unwrapped_sym := c.table.sym(c.unwrap_generic(typ))
- c.error('field `${unwrapped_sym.name}.$field_name` is not public', node.pos)
+ c.error('field `${unwrapped_sym.name}.${field_name}` is not public', node.pos)
}
field_sym := c.table.sym(field.typ)
if field.is_deprecated && is_used_outside {
@@ -1296,11 +1297,11 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
rec_sym := c.table.sym(receiver.set_nr_muls(0))
if !rec_sym.is_heap() {
suggestion := if rec_sym.kind == .struct_ {
- 'declaring `$rec_sym.name` as `[heap]`'
+ 'declaring `${rec_sym.name}` as `[heap]`'
} else {
- 'wrapping the `$rec_sym.name` object in a `struct` declared as `[heap]`'
+ 'wrapping the `${rec_sym.name}` object in a `struct` declared as `[heap]`'
}
- c.error('method `${c.table.type_to_str(receiver.idx())}.$method.name` cannot be used as a variable outside `unsafe` blocks as its receiver might refer to an object stored on stack. Consider ${suggestion}.',
+ c.error('method `${c.table.type_to_str(receiver.idx())}.${method.name}` cannot be used as a variable outside `unsafe` blocks as its receiver might refer to an object stored on stack. Consider ${suggestion}.',
node.expr.pos().extend(node.pos))
}
}
@@ -1320,7 +1321,7 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
return ast.int_type
}
- c.error('`$unwrapped_sym.name` has no property `$node.field_name`', node.pos)
+ c.error('`${unwrapped_sym.name}` has no property `${node.field_name}`', node.pos)
}
} else {
if sym.info is ast.Struct {
@@ -1356,7 +1357,7 @@ pub fn (mut c Checker) const_decl(mut node ast.ConstDecl) {
...field.pos
len: util.no_cur_mod(field.name, c.mod).len
}
- c.error('duplicate const `$field.name`', name_pos)
+ c.error('duplicate const `${field.name}`', name_pos)
}
c.const_names << field.name
}
@@ -1454,7 +1455,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
signed, enum_umin, enum_umax = false, 0, 0xFFFF_FFFF_FFFF_FFFF
}
else {
- c.error('`$senum_type` is not one of `i8`,`i16`,`int`,`i64`,`u8`,`u16`,`u32`,`u64`',
+ c.error('`${senum_type}` is not one of `i8`,`i16`,`int`,`i64`,`u8`,`u16`,`u32`,`u64`',
node.pos)
}
}
@@ -1465,11 +1466,11 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
for i, mut field in node.fields {
if !c.pref.experimental && util.contains_capital(field.name) {
// TODO C2V uses hundreds of enums with capitals, remove -experimental check once it's handled
- c.error('field name `$field.name` cannot contain uppercase letters, use snake_case instead',
+ c.error('field name `${field.name}` cannot contain uppercase letters, use snake_case instead',
field.pos)
}
if _ := seen_enum_field_names[field.name] {
- c.error('duplicate enum field name `$field.name`', field.pos)
+ c.error('duplicate enum field name `${field.name}`', field.pos)
}
seen_enum_field_names[field.name] = i
if field.has_expr {
@@ -1482,7 +1483,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
val := field.expr.val.i64()
ival = val
if val < enum_imin || val >= enum_imax {
- c.error('enum value `$field.expr.val` overflows the enum type `$senum_type`, values of which have to be in [$enum_imin, $enum_imax]',
+ c.error('enum value `${field.expr.val}` overflows the enum type `${senum_type}`, values of which have to be in [${enum_imin}, ${enum_imax}]',
field.expr.pos)
overflows = true
}
@@ -1490,7 +1491,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
val := field.expr.val.u64()
uval = val
if val >= enum_umax {
- c.error('enum value `$field.expr.val` overflows the enum type `$senum_type`, values of which have to be in [$enum_umin, $enum_umax]',
+ c.error('enum value `${field.expr.val}` overflows the enum type `${senum_type}`, values of which have to be in [${enum_umin}, ${enum_umax}]',
field.expr.pos)
overflows = true
}
@@ -1498,7 +1499,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
if !overflows && !c.pref.translated && !c.file.is_translated
&& !node.is_multi_allowed {
if (signed && ival in iseen) || (!signed && uval in useen) {
- c.error('enum value `$field.expr.val` already exists', field.expr.pos)
+ c.error('enum value `${field.expr.val}` already exists', field.expr.pos)
}
}
if signed {
@@ -1521,7 +1522,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
fe_type := c.cast_expr(mut field.expr)
if node.typ != fe_type {
sfe_type := c.table.type_to_str(fe_type)
- c.error('the type of the enum value `$sfe_type` != the enum type itself `$senum_type`',
+ c.error('the type of the enum value `${sfe_type}` != the enum type itself `${senum_type}`',
field.expr.pos)
}
if !fe_type.is_pure_int() {
@@ -1547,7 +1548,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
if iseen.len > 0 {
ilast := iseen.last()
if ilast == enum_imax {
- c.error('enum value overflows type `$senum_type`, which has a maximum value of $enum_imax',
+ c.error('enum value overflows type `${senum_type}`, which has a maximum value of ${enum_imax}',
field.pos)
} else if !c.pref.translated && !c.file.is_translated && !node.is_multi_allowed
&& ilast + 1 in iseen {
@@ -1561,7 +1562,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
if useen.len > 0 {
ulast := useen.last()
if ulast == enum_umax {
- c.error('enum value overflows type `$senum_type`, which has a maximum value of $enum_umax',
+ c.error('enum value overflows type `${senum_type}`, which has a maximum value of ${enum_umax}',
field.pos)
} else if !c.pref.translated && !c.file.is_translated && !node.is_multi_allowed
&& ulast + 1 in useen {
@@ -1593,7 +1594,7 @@ fn (mut c Checker) stmt(node_ ast.Stmt) {
mut node := unsafe { node_ }
$if trace_checker ? {
ntype := typeof(node).replace('v.ast.', '')
- eprintln('checking: ${c.file.path:-30} | pos: ${node.pos.line_str():-39} | node: $ntype | $node')
+ eprintln('checking: ${c.file.path:-30} | pos: ${node.pos.line_str():-39} | node: ${ntype} | ${node}')
}
c.expected_type = ast.void_type
match mut node {
@@ -1751,14 +1752,14 @@ fn (mut c Checker) assert_stmt(node ast.AssertStmt) {
assert_type := c.check_expr_opt_call(node.expr, c.expr(node.expr))
if assert_type != ast.bool_type_idx {
atype_name := c.table.sym(assert_type).name
- c.error('assert can be used only with `bool` expressions, but found `$atype_name` instead',
+ c.error('assert can be used only with `bool` expressions, but found `${atype_name}` instead',
node.pos)
}
if node.extra !is ast.EmptyExpr {
extra_type := c.expr(node.extra)
if extra_type != ast.string_type {
extra_type_name := c.table.sym(extra_type).name
- c.error('assert allows only a single string as its second argument, but found `$extra_type_name` instead',
+ c.error('assert allows only a single string as its second argument, but found `${extra_type_name}` instead',
node.extra_pos)
}
}
@@ -1779,14 +1780,14 @@ fn (mut c Checker) block(node ast.Block) {
fn (mut c Checker) branch_stmt(node ast.BranchStmt) {
if c.inside_defer {
- c.error('`$node.kind.str()` is not allowed in defer statements', node.pos)
+ c.error('`${node.kind.str()}` is not allowed in defer statements', node.pos)
}
if c.in_for_count == 0 {
- c.error('$node.kind.str() statement not within a loop', node.pos)
+ c.error('${node.kind.str()} statement not within a loop', node.pos)
}
if node.label.len > 0 {
if node.label != c.loop_label {
- c.error('invalid label name `$node.label`', node.pos)
+ c.error('invalid label name `${node.label}`', node.pos)
}
}
}
@@ -1795,14 +1796,14 @@ fn (mut c Checker) global_decl(mut node ast.GlobalDecl) {
for mut field in node.fields {
c.check_valid_snake_case(field.name, 'global name', field.pos)
if field.name in c.global_names {
- c.error('duplicate global `$field.name`', field.pos)
+ c.error('duplicate global `${field.name}`', field.pos)
}
- if '${c.mod}.$field.name' in c.const_names {
- c.error('duplicate global and const `$field.name`', field.pos)
+ if '${c.mod}.${field.name}' in c.const_names {
+ c.error('duplicate global and const `${field.name}`', field.pos)
}
sym := c.table.sym(field.typ)
if sym.kind == .placeholder {
- c.error('unknown type `$sym.name`', field.typ_pos)
+ c.error('unknown type `${sym.name}`', field.typ_pos)
}
if field.has_expr {
if field.expr is ast.AnonFn && field.name == 'main' {
@@ -1861,7 +1862,7 @@ fn (mut c Checker) asm_stmt(mut stmt ast.AsmStmt) {
if template.name !in ['skip', 'space', 'byte', 'word', 'short', 'int', 'long', 'quad',
'globl', 'global', 'section', 'text', 'data', 'bss', 'fill', 'org', 'previous',
'string', 'asciz', 'ascii'] { // all tcc-supported assembler directives
- c.error('unknown assembler directive: `$template.name`', template.pos)
+ c.error('unknown assembler directive: `${template.name}`', template.pos)
}
}
for mut arg in template.args {
@@ -1949,13 +1950,13 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
c.error(err.msg(), node.pos)
return
}
- node.val = '$node.kind $vroot'
+ node.val = '${node.kind} ${vroot}'
node.main = vroot
flag = vroot
}
if flag.contains('@VEXEROOT') {
vroot := flag.replace('@VEXEROOT', os.dir(pref.vexe_path()))
- node.val = '$node.kind $vroot'
+ node.val = '${node.kind} ${vroot}'
node.main = vroot
flag = vroot
}
@@ -1964,7 +1965,7 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
c.error(err.msg(), node.pos)
return
}
- node.val = '$node.kind $vroot'
+ node.val = '${node.kind} ${vroot}'
node.main = vroot
flag = vroot
}
@@ -1992,7 +1993,7 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
if fcontent := os.read_file(node.main) {
node.val = fcontent
} else {
- mut missing_message := 'The file $original_flag, needed for insertion by module `$node.mod`,'
+ mut missing_message := 'The file ${original_flag}, needed for insertion by module `${node.mod}`,'
if os.is_file(node.main) {
missing_message += ' is not readable.'
} else {
@@ -2009,7 +2010,7 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
args := if node.main.contains('--') {
node.main.split(' ')
} else {
- '--cflags --libs $node.main'.split(' ')
+ '--cflags --libs ${node.main}'.split(' ')
}
mut m := pkgconfig.main(args) or {
c.error(err.msg(), node.pos)
@@ -2056,7 +2057,7 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
for deprecated in ['@VMOD', '@VMODULE', '@VPATH', '@VLIB_PATH'] {
if flag.contains(deprecated) {
if !flag.contains('@VMODROOT') {
- c.error('$deprecated had been deprecated, use @VMODROOT instead.',
+ c.error('${deprecated} had been deprecated, use @VMODROOT instead.',
node.pos)
}
}
@@ -2073,7 +2074,7 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
node.pos)
}
} else {
- c.error('expected `#define`, `#flag`, `#include`, `#insert` or `#pkgconfig` not $node.val',
+ c.error('expected `#define`, `#flag`, `#include`, `#insert` or `#pkgconfig` not ${node.val}',
node.pos)
}
}
@@ -2083,29 +2084,30 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
fn (mut c Checker) import_stmt(node ast.Import) {
c.check_valid_snake_case(node.alias, 'module alias', node.pos)
for sym in node.syms {
- name := '${node.mod}.$sym.name'
+ name := '${node.mod}.${sym.name}'
if sym.name[0].is_capital() {
if type_sym := c.table.find_sym(name) {
if type_sym.kind != .placeholder {
if !type_sym.is_pub {
- c.error('module `$node.mod` type `$sym.name` is private', sym.pos)
+ c.error('module `${node.mod}` type `${sym.name}` is private',
+ sym.pos)
}
continue
}
}
- c.error('module `$node.mod` has no type `$sym.name`', sym.pos)
+ c.error('module `${node.mod}` has no type `${sym.name}`', sym.pos)
continue
}
if func := c.table.find_fn(name) {
if !func.is_pub {
- c.error('module `$node.mod` function `${sym.name}()` is private', sym.pos)
+ c.error('module `${node.mod}` function `${sym.name}()` is private', sym.pos)
}
continue
}
if _ := c.file.global_scope.find_const(name) {
continue
}
- c.error('module `$node.mod` has no constant or function `$sym.name`', sym.pos)
+ c.error('module `${node.mod}` has no constant or function `${sym.name}`', sym.pos)
}
if c.table.module_deprecated[node.mod] {
c.deprecate('module', node.mod, c.table.module_attrs[node.mod], node.pos)
@@ -2132,7 +2134,7 @@ fn (mut c Checker) stmts_ending_with_expression(stmts []ast.Stmt) {
}
if c.stmt_level > checker.stmt_level_cutoff_limit {
c.scope_returns = false
- c.error('checker: too many stmt levels: $c.stmt_level ', stmts[0].pos)
+ c.error('checker: too many stmt levels: ${c.stmt_level} ', stmts[0].pos)
return
}
mut unreachable := token.Pos{
@@ -2185,7 +2187,7 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
mut node := unsafe { node_ }
if c.expr_level > checker.expr_level_cutoff_limit {
- c.error('checker: too many expr levels: $c.expr_level ', node.pos())
+ c.error('checker: too many expr levels: ${c.expr_level} ', node.pos())
return ast.void_type
}
match mut node {
@@ -2230,13 +2232,13 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
c.ensure_type_exists(node.typ, node.pos) or {}
if !c.table.sumtype_has_variant(node.expr_type, node.typ, true) {
addr := '&'.repeat(node.typ.nr_muls())
- c.error('cannot cast `$expr_type_sym.name` to `$addr$type_sym.name`',
+ c.error('cannot cast `${expr_type_sym.name}` to `${addr}${type_sym.name}`',
node.pos)
}
} else if expr_type_sym.kind == .interface_ && type_sym.kind == .interface_ {
c.ensure_type_exists(node.typ, node.pos) or {}
} else if node.expr_type != node.typ {
- mut s := 'cannot cast non-sum type `$expr_type_sym.name` using `as`'
+ mut s := 'cannot cast non-sum type `${expr_type_sym.name}` using `as`'
if type_sym.kind == .sum_type {
s += ' - use e.g. `${type_sym.name}(some_expr)` instead.'
}
@@ -2262,13 +2264,13 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
mut ret_type := c.call_expr(mut node)
if !ret_type.has_flag(.optional) && !ret_type.has_flag(.result) {
if node.or_block.kind == .block {
- c.error('unexpected `or` block, the function `$node.name` does neither return an optional nor a result',
+ c.error('unexpected `or` block, the function `${node.name}` does neither return an optional nor a result',
node.or_block.pos)
} else if node.or_block.kind == .propagate_option {
- c.error('unexpected `?`, the function `$node.name` does neither return an optional nor a result',
+ c.error('unexpected `?`, the function `${node.name}` does neither return an optional nor a result',
node.or_block.pos)
} else if node.or_block.kind == .propagate_result {
- c.error('unexpected `!`, the function `$node.name` does neither return an optional nor a result',
+ c.error('unexpected `!`, the function `${node.name}` does neither return an optional nor a result',
node.or_block.pos)
}
}
@@ -2459,7 +2461,7 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
ast.TypeNode {
if !c.inside_x_is_type && node.typ.has_flag(.generic) && unsafe { c.table.cur_fn != 0 }
&& c.table.cur_fn.generic_names.len == 0 {
- c.error('unexpected generic variable in non-generic function `$c.table.cur_fn.name`',
+ c.error('unexpected generic variable in non-generic function `${c.table.cur_fn.name}`',
node.pos)
}
return node.typ
@@ -2476,7 +2478,7 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
if !c.check_types(ltype, ast.bool_type) {
ltype_sym := c.table.sym(ltype)
lname := if node.is_likely { '_likely_' } else { '_unlikely_' }
- c.error('`${lname}()` expects a boolean expression, instead it got `$ltype_sym.name`',
+ c.error('`${lname}()` expects a boolean expression, instead it got `${ltype_sym.name}`',
node.pos)
}
return ast.bool_type
@@ -2553,14 +2555,15 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
&& !to_type.has_flag(.result) {
ft := c.table.type_to_str(from_type)
tt := c.table.type_to_str(to_type)
- c.error('cannot cast `$ft` to `$tt`', node.pos)
+ c.error('cannot cast `${ft}` to `${tt}`', node.pos)
}
} else if mut to_sym.info is ast.Alias && !(final_to_sym.kind == .struct_ && to_type.is_ptr()) {
if !c.check_types(from_type, to_sym.info.parent_type) && !(final_to_sym.is_int()
&& final_from_sym.kind in [.enum_, .bool, .i8, .u8, .char]) {
ft := c.table.type_to_str(from_type)
tt := c.table.type_to_str(to_type)
- c.error('cannot cast `$ft` to `$tt` (alias to `$final_to_sym.name`)', node.pos)
+ c.error('cannot cast `${ft}` to `${tt}` (alias to `${final_to_sym.name}`)',
+ node.pos)
}
} else if to_sym.kind == .struct_ && !to_type.is_ptr()
&& !(to_sym.info as ast.Struct).is_typedef {
@@ -2571,12 +2574,12 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
from_type_info := from_sym.info as ast.Struct
to_type_info := to_sym.info as ast.Struct
if !c.check_struct_signature(from_type_info, to_type_info) {
- c.error('cannot convert struct `$from_sym.name` to struct `$to_sym.name`',
+ c.error('cannot convert struct `${from_sym.name}` to struct `${to_sym.name}`',
node.pos)
}
} else {
ft := c.table.type_to_str(from_type)
- c.error('cannot cast `$ft` to struct', node.pos)
+ c.error('cannot cast `${ft}` to struct', node.pos)
}
} else if to_sym.kind == .struct_ && to_type.is_ptr() {
if from_sym.kind == .alias {
@@ -2586,7 +2589,7 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
&& !from_type.is_ptr() {
ft := c.table.type_to_str(from_type)
tt := c.table.type_to_str(to_type)
- c.error('cannot cast `$ft` to `$tt`', node.pos)
+ c.error('cannot cast `${ft}` to `${tt}`', node.pos)
}
} else if to_sym.kind == .interface_ {
if c.type_implements(from_type, to_type, node.pos) {
@@ -2609,18 +2612,18 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
} else if from_type == ast.none_type && !to_type.has_flag(.optional)
&& !to_type.has_flag(.result) {
type_name := c.table.type_to_str(to_type)
- c.error('cannot cast `none` to `$type_name`', node.pos)
+ c.error('cannot cast `none` to `${type_name}`', node.pos)
} else if from_sym.kind == .struct_ && !from_type.is_ptr() {
if (to_type.is_ptr() || to_sym.kind !in [.sum_type, .interface_]) && !c.is_builtin_mod {
from_type_name := c.table.type_to_str(from_type)
type_name := c.table.type_to_str(to_type)
- c.error('cannot cast struct `$from_type_name` to `$type_name`', node.pos)
+ c.error('cannot cast struct `${from_type_name}` to `${type_name}`', node.pos)
}
} else if to_sym.kind == .u8 && !final_from_sym.is_number() && !final_from_sym.is_pointer()
&& !from_type.is_ptr() && final_from_sym.kind !in [.char, .enum_, .bool] {
ft := c.table.type_to_str(from_type)
tt := c.table.type_to_str(to_type)
- c.error('cannot cast type `$ft` to `$tt`', node.pos)
+ c.error('cannot cast type `${ft}` to `${tt}`', node.pos)
} else if from_type.has_flag(.optional) || from_type.has_flag(.result)
|| from_type.has_flag(.variadic) {
// variadic case can happen when arrays are converted into variadic
@@ -2631,38 +2634,38 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
} else {
'a variadic'
}
- c.error('cannot type cast $msg', node.pos)
+ c.error('cannot type cast ${msg}', node.pos)
} else if !c.inside_unsafe && to_type.is_ptr() && from_type.is_ptr()
&& to_type.deref() != ast.char_type && from_type.deref() != ast.char_type {
ft := c.table.type_to_str(from_type)
tt := c.table.type_to_str(to_type)
- c.warn('casting `$ft` to `$tt` is only allowed in `unsafe` code', node.pos)
+ c.warn('casting `${ft}` to `${tt}` is only allowed in `unsafe` code', node.pos)
} else if from_sym.kind == .array_fixed && !from_type.is_ptr() {
c.warn('cannot cast a fixed array (use e.g. `&arr[0]` instead)', node.pos)
} else if final_from_sym.kind == .string && final_to_sym.is_number()
&& final_to_sym.kind != .rune {
snexpr := node.expr.str()
tt := c.table.type_to_str(to_type)
- c.error('cannot cast string to `$tt`, use `${snexpr}.${final_to_sym.name}()` instead.',
+ c.error('cannot cast string to `${tt}`, use `${snexpr}.${final_to_sym.name}()` instead.',
node.pos)
} else if final_from_sym.kind == .string && to_type.is_ptr() && to_sym.kind != .string {
snexpr := node.expr.str()
tt := c.table.type_to_str(to_type)
- c.error('cannot cast string to `$tt`, use `${snexpr}.str` instead.', node.pos)
+ c.error('cannot cast string to `${tt}`, use `${snexpr}.str` instead.', node.pos)
} else if final_from_sym.kind == .string && to_sym.kind == .char {
snexpr := node.expr.str()
tt := c.table.type_to_str(to_type)
- c.error('cannot cast string to `$tt`, use `$snexpr[index]` instead.', node.pos)
+ c.error('cannot cast string to `${tt}`, use `${snexpr}[index]` instead.', node.pos)
} else if final_from_sym.kind == .array && !from_type.is_ptr() && to_type != ast.string_type {
ft := c.table.type_to_str(from_type)
tt := c.table.type_to_str(to_type)
- c.error('cannot cast array `$ft` to `$tt`', node.pos)
+ c.error('cannot cast array `${ft}` to `${tt}`', node.pos)
}
if to_sym.kind == .rune && from_sym.is_string() {
snexpr := node.expr.str()
ft := c.table.type_to_str(from_type)
- c.error('cannot cast `$ft` to rune, use `${snexpr}.runes()` instead.', node.pos)
+ c.error('cannot cast `${ft}` to rune, use `${snexpr}.runes()` instead.', node.pos)
}
if to_sym.kind == .enum_ && !(c.inside_unsafe || c.file.is_translated) && from_sym.is_int() {
@@ -2679,19 +2682,19 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
if from_type in [ast.u8_type, ast.bool_type] {
snexpr := node.expr.str()
ft := c.table.type_to_str(from_type)
- c.error('cannot cast type `$ft` to string, use `${snexpr}.str()` instead.',
+ c.error('cannot cast type `${ft}` to string, use `${snexpr}.str()` instead.',
node.pos)
} else if from_type.is_real_pointer() {
snexpr := node.expr.str()
ft := c.table.type_to_str(from_type)
- c.error('cannot cast pointer type `$ft` to string, use `&u8($snexpr).vstring()` or `cstring_to_vstring($snexpr)` instead.',
+ c.error('cannot cast pointer type `${ft}` to string, use `&u8(${snexpr}).vstring()` or `cstring_to_vstring(${snexpr})` instead.',
node.pos)
} else if from_type.is_number() {
snexpr := node.expr.str()
c.error('cannot cast number to string, use `${snexpr}.str()` instead.', node.pos)
} else if from_sym.kind == .alias && final_from_sym.name != 'string' {
ft := c.table.type_to_str(from_type)
- c.error('cannot cast type `$ft` to string, use `x.str()` instead.', node.pos)
+ c.error('cannot cast type `${ft}` to string, use `x.str()` instead.', node.pos)
} else if final_from_sym.kind == .array {
snexpr := node.expr.str()
if final_from_sym.name == '[]u8' {
@@ -2699,7 +2702,7 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
node.pos)
} else {
first_elem_idx := '[0]'
- c.error('cannot cast array to string, use `$snexpr${first_elem_idx}.str()` instead.',
+ c.error('cannot cast array to string, use `${snexpr}${first_elem_idx}.str()` instead.',
node.pos)
}
} else if final_from_sym.kind == .enum_ {
@@ -2710,17 +2713,17 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
} else if final_from_sym.kind == .sum_type {
snexpr := node.expr.str()
ft := c.table.type_to_str(from_type)
- c.error('cannot cast sumtype `$ft` to string, use `${snexpr}.str()` instead.',
+ c.error('cannot cast sumtype `${ft}` to string, use `${snexpr}.str()` instead.',
node.pos)
} else if final_from_sym.kind == .function {
fnexpr := node.expr.str()
- c.error('cannot cast function `$fnexpr` to string', node.pos)
+ c.error('cannot cast function `${fnexpr}` to string', node.pos)
} else if to_type != ast.string_type && from_type == ast.string_type
&& (!(to_sym.kind == .alias && final_to_sym.name == 'string')) {
- mut error_msg := 'cannot cast a string to a type `$final_to_sym.name`, that is not an alias of string'
+ mut error_msg := 'cannot cast a string to a type `${final_to_sym.name}`, that is not an alias of string'
if mut node.expr is ast.StringLiteral {
if node.expr.val.len == 1 {
- error_msg += ", for denoting characters use `$node.expr.val` instead of '$node.expr.val'"
+ error_msg += ", for denoting characters use `${node.expr.val}` instead of '${node.expr.val}'"
}
}
c.error(error_msg, node.pos)
@@ -2732,7 +2735,7 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
ft := c.table.type_to_str(from_type)
tt := c.table.type_to_str(to_type)
kind_name := if from_sym.kind == .sum_type { 'sum type' } else { 'interface' }
- c.error('cannot cast `$ft` $kind_name value to `$tt`, use `$node.expr as $tt` instead',
+ c.error('cannot cast `${ft}` ${kind_name} value to `${tt}`, use `${node.expr} as ${tt}` instead',
node.pos)
}
@@ -2773,7 +2776,7 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
}
if !in_range {
- c.warn('$node_val does not represent a value of enum $enum_typ_name',
+ c.warn('${node_val} does not represent a value of enum ${enum_typ_name}',
node.pos)
}
}
@@ -2860,7 +2863,7 @@ fn (mut c Checker) at_expr(mut node ast.AtExpr) ast.Type {
node.val = os.dir(vmod_file_location.vmod_file)
}
.unknown {
- c.error('unknown @ identifier: ${node.name}. Available identifiers: $token.valid_at_tokens',
+ c.error('unknown @ identifier: ${node.name}. Available identifiers: ${token.valid_at_tokens}',
node.pos)
}
}
@@ -2872,7 +2875,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
if c.const_deps.len > 0 {
mut name := node.name
if !name.contains('.') && node.mod != 'builtin' {
- name = '${node.mod}.$node.name'
+ name = '${node.mod}.${node.name}'
}
// detect cycles, while allowing for references to the same constant,
// used inside its initialisation like: `struct Abc { x &Abc } ... const a = [ Abc{0}, Abc{unsafe{&a[0]}} ]!`
@@ -2891,7 +2894,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
return c.expected_type
}
}
- c.error('cycle in constant `$c.const_decl`', node.pos)
+ c.error('cycle in constant `${c.const_decl}`', node.pos)
return ast.void_type
}
c.const_deps << name
@@ -2938,7 +2941,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
node.pos.pos
}
if node_pos < obj.pos.pos {
- c.error('undefined variable `$node.name` (used before declaration)',
+ c.error('undefined variable `${node.name}` (used before declaration)',
node.pos)
}
is_sum_type_cast := obj.smartcasts.len != 0
@@ -2948,7 +2951,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
if typ == 0 {
if mut obj.expr is ast.Ident {
if obj.expr.kind == .unresolved {
- c.error('unresolved variable: `$node.name`', node.pos)
+ c.error('unresolved variable: `${node.name}`', node.pos)
return ast.void_type
}
}
@@ -2969,7 +2972,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
typ = obj.expr.expr_type.clear_flag(.optional).clear_flag(.result)
}
} else if obj.expr is ast.EmptyExpr {
- c.error('invalid variable `$node.name`', node.pos)
+ c.error('invalid variable `${node.name}`', node.pos)
typ = ast.void_type
} else {
typ = c.expr(obj.expr)
@@ -3001,7 +3004,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
}
// prepend mod to look for fn call or const
else if !name.contains('.') && node.mod != 'builtin' {
- name = '${node.mod}.$node.name'
+ name = '${node.mod}.${node.name}'
}
if mut obj := c.file.global_scope.find(name) {
match mut obj {
@@ -3015,7 +3018,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
}
ast.ConstField {
if !(obj.is_pub || obj.mod == c.mod || c.pref.is_test) {
- c.error('constant `$obj.name` is private', node.pos)
+ c.error('constant `${obj.name}` is private', node.pos)
}
mut typ := obj.typ
if typ == 0 {
@@ -3078,12 +3081,12 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
node.mod = saved_mod
}
if node.tok_kind == .assign {
- c.error('undefined ident: `$node.name` (use `:=` to declare a variable)', node.pos)
+ c.error('undefined ident: `${node.name}` (use `:=` to declare a variable)', node.pos)
} else if node.name == 'errcode' {
c.error('undefined ident: `errcode`; did you mean `err.code`?', node.pos)
} else {
if c.inside_ct_attr {
- c.note('`[if $node.name]` is deprecated. Use `[if $node.name?]` instead',
+ c.note('`[if ${node.name}]` is deprecated. Use `[if ${node.name}?]` instead',
node.pos)
} else {
cname_mod := node.name.all_before('.')
@@ -3096,10 +3099,10 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
}
}
}
- c.error(util.new_suggestion(node.name, const_names_in_mod).say('undefined ident: `$node.name`'),
+ c.error(util.new_suggestion(node.name, const_names_in_mod).say('undefined ident: `${node.name}`'),
node.pos)
} else {
- c.error('undefined ident: `$node.name`', node.pos)
+ c.error('undefined ident: `${node.name}`', node.pos)
}
}
}
@@ -3223,7 +3226,7 @@ pub fn (mut c Checker) select_expr(mut node ast.SelectExpr) ast.Type {
if branch.is_timeout {
if !branch.stmt.typ.is_int() {
tsym := c.table.sym(branch.stmt.typ)
- c.error('invalid type `$tsym.name` for timeout - expected integer number of nanoseconds aka `time.Duration`',
+ c.error('invalid type `${tsym.name}` for timeout - expected integer number of nanoseconds aka `time.Duration`',
branch.stmt.pos)
}
} else {
@@ -3252,7 +3255,7 @@ pub fn (mut c Checker) select_expr(mut node ast.SelectExpr) ast.Type {
} else {
'error propagation'
}
- c.error('$err_prefix not allowed in `select` key', expr.or_block.pos)
+ c.error('${err_prefix} not allowed in `select` key', expr.or_block.pos)
}
}
else {
@@ -3281,13 +3284,13 @@ pub fn (mut c Checker) lock_expr(mut node ast.LockExpr) ast.Type {
id_name := node.lockeds[i].str()
if !e_typ.has_flag(.shared_f) {
obj_type := if node.lockeds[i] is ast.Ident { 'variable' } else { 'struct element' }
- c.error('`$id_name` must be declared as `shared` $obj_type to be locked',
+ c.error('`${id_name}` must be declared as `shared` ${obj_type} to be locked',
node.lockeds[i].pos())
}
if id_name in c.locked_names {
- c.error('`$id_name` is already locked', node.lockeds[i].pos())
+ c.error('`${id_name}` is already locked', node.lockeds[i].pos())
} else if id_name in c.rlocked_names {
- c.error('`$id_name` is already read-locked', node.lockeds[i].pos())
+ c.error('`${id_name}` is already read-locked', node.lockeds[i].pos())
}
if node.is_rlock[i] {
c.rlocked_names << id_name
@@ -3325,8 +3328,8 @@ fn (mut c Checker) find_definition(ident ast.Ident) !ast.Expr {
match ident.kind {
.unresolved, .blank_ident { return error('none') }
.variable, .constant { return c.find_obj_definition(ident.obj) }
- .global { return error('$ident.name is a global variable') }
- .function { return error('$ident.name is a function') }
+ .global { return error('${ident.name} is a global variable') }
+ .function { return error('${ident.name} is a function') }
}
}
@@ -3339,19 +3342,19 @@ fn (mut c Checker) find_obj_definition(obj ast.ScopeObject) !ast.Expr {
mut expr := ast.empty_expr
if obj is ast.Var {
if obj.is_mut {
- return error('`$name` is mut and may have changed since its definition')
+ return error('`${name}` is mut and may have changed since its definition')
}
expr = obj.expr
} else if obj is ast.ConstField {
expr = obj.expr
} else {
- return error('`$name` is a global variable and is unknown at compile time')
+ return error('`${name}` is a global variable and is unknown at compile time')
}
if mut expr is ast.Ident {
return c.find_definition(expr)
}
if !expr.is_pure_literal() {
- return error('definition of `$name` is unknown at compile time')
+ return error('definition of `${name}` is unknown at compile time')
}
return expr
}
@@ -3383,7 +3386,8 @@ pub fn (mut c Checker) postfix_expr(mut node ast.PostfixExpr) ast.Type {
}
if !(typ_sym.is_number() || ((c.inside_unsafe || c.pref.translated) && is_non_void_pointer)) {
typ_str := c.table.type_to_str(typ)
- c.error('invalid operation: $node.op.str() (non-numeric type `$typ_str`)', node.pos)
+ c.error('invalid operation: ${node.op.str()} (non-numeric type `${typ_str}`)',
+ node.pos)
} else {
node.auto_locked, _ = c.fail_if_immutable(node.expr)
}
@@ -3405,15 +3409,15 @@ pub fn (mut c Checker) mark_as_referenced(mut node ast.Expr, as_interface bool)
if obj.is_stack_obj && !type_sym.is_heap() && !c.pref.translated
&& !c.file.is_translated {
suggestion := if type_sym.kind == .struct_ {
- 'declaring `$type_sym.name` as `[heap]`'
+ 'declaring `${type_sym.name}` as `[heap]`'
} else {
- 'wrapping the `$type_sym.name` object in a `struct` declared as `[heap]`'
+ 'wrapping the `${type_sym.name}` object in a `struct` declared as `[heap]`'
}
mischief := if as_interface { 'used as interface object' } else { 'referenced' }
- c.error('`$node.name` cannot be $mischief outside `unsafe` blocks as it might be stored on stack. Consider ${suggestion}.',
+ c.error('`${node.name}` cannot be ${mischief} outside `unsafe` blocks as it might be stored on stack. Consider ${suggestion}.',
node.pos)
} else if type_sym.kind == .array_fixed {
- c.error('cannot reference fixed array `$node.name` outside `unsafe` blocks as it is supposed to be stored on stack',
+ c.error('cannot reference fixed array `${node.name}` outside `unsafe` blocks as it is supposed to be stored on stack',
node.pos)
} else {
match type_sym.kind {
@@ -3492,7 +3496,7 @@ pub fn (mut c Checker) prefix_expr(mut node ast.PrefixExpr) ast.Type {
}
if expr in [ast.BoolLiteral, ast.CallExpr, ast.CharLiteral, ast.FloatLiteral, ast.IntegerLiteral,
ast.InfixExpr, ast.StringLiteral, ast.StringInterLiteral] {
- c.error('cannot take the address of $expr', node.pos)
+ c.error('cannot take the address of ${expr}', node.pos)
}
if mut node.right is ast.Ident {
if node.right.kind == .constant && !c.inside_unsafe && c.pref.experimental {
@@ -3547,7 +3551,7 @@ pub fn (mut c Checker) prefix_expr(mut node ast.PrefixExpr) ast.Type {
}
if !right_type.is_pointer() && !c.pref.translated && !c.file.is_translated {
s := c.table.type_to_str(right_type)
- c.error('invalid indirect of `$s`, the type `$right_sym.name` is not a pointer',
+ c.error('invalid indirect of `${s}`, the type `${right_sym.name}` is not a pointer',
node.pos)
}
if right_type.is_voidptr() {
@@ -3578,7 +3582,7 @@ pub fn (mut c Checker) prefix_expr(mut node ast.PrefixExpr) ast.Type {
}
fn (mut c Checker) type_error_for_operator(op_label string, types_label string, found_type_label string, pos token.Pos) {
- c.error('operator `$op_label` can only be used with $types_label types, but the value after `$op_label` is of type `$found_type_label` instead',
+ c.error('operator `${op_label}` can only be used with ${types_label} types, but the value after `${op_label}` is of type `${found_type_label}` instead',
pos)
}
@@ -3590,30 +3594,30 @@ fn (mut c Checker) check_index(typ_sym &ast.TypeSymbol, index ast.Expr, index_ty
&& (index_type_sym.info as ast.Alias).parent_type.is_int())
|| (c.pref.translated && index_type.is_any_kind_of_pointer())) {
type_str := if typ_sym.kind == .string {
- 'non-integer string index `$index_type_sym.name`'
+ 'non-integer string index `${index_type_sym.name}`'
} else {
- 'non-integer index `$index_type_sym.name` (array type `$typ_sym.name`)'
+ 'non-integer index `${index_type_sym.name}` (array type `${typ_sym.name}`)'
}
- c.error('$type_str', pos)
+ c.error('${type_str}', pos)
}
if index is ast.IntegerLiteral && !is_gated {
if index.val[0] == `-` {
- c.error('negative index `$index.val`', index.pos)
+ c.error('negative index `${index.val}`', index.pos)
} else if typ_sym.kind == .array_fixed {
i := index.val.int()
info := typ_sym.info as ast.ArrayFixed
if (!range_index && i >= info.size) || (range_index && i > info.size) {
- c.error('index out of range (index: $i, len: $info.size)', index.pos)
+ c.error('index out of range (index: ${i}, len: ${info.size})', index.pos)
}
}
}
if index_type.has_flag(.optional) || index_type.has_flag(.result) {
type_str := if typ_sym.kind == .string {
- '(type `$typ_sym.name`)'
+ '(type `${typ_sym.name}`)'
} else {
- '(array type `$typ_sym.name`)'
+ '(array type `${typ_sym.name}`)'
}
- c.error('cannot use optional or result as index $type_str', pos)
+ c.error('cannot use optional or result as index ${type_str}', pos)
}
}
}
@@ -3621,7 +3625,7 @@ fn (mut c Checker) check_index(typ_sym &ast.TypeSymbol, index ast.Expr, index_ty
pub fn (mut c Checker) index_expr(mut node ast.IndexExpr) ast.Type {
mut typ := c.expr(node.left)
if typ == 0 {
- c.error('unknown type for expression `$node.left`', node.pos)
+ c.error('unknown type for expression `${node.left}`', node.pos)
return typ
}
mut typ_sym := c.table.final_sym(typ)
@@ -3652,12 +3656,12 @@ pub fn (mut c Checker) index_expr(mut node ast.IndexExpr) ast.Type {
}
if typ_sym.kind !in [.array, .array_fixed, .string, .map] && !typ.is_ptr()
&& typ !in [ast.byteptr_type, ast.charptr_type] && !typ.has_flag(.variadic) {
- c.error('type `$typ_sym.name` does not support indexing', node.pos)
+ c.error('type `${typ_sym.name}` does not support indexing', node.pos)
}
if typ.has_flag(.optional) {
- c.error('type `?$typ_sym.name` is optional, it does not support indexing', node.left.pos())
+ c.error('type `?${typ_sym.name}` is optional, it does not support indexing', node.left.pos())
} else if typ.has_flag(.result) {
- c.error('type `!$typ_sym.name` is result, it does not support indexing', node.left.pos())
+ c.error('type `!${typ_sym.name}` is result, it does not support indexing', node.left.pos())
}
if typ_sym.kind == .string && !typ.is_ptr() && node.is_setter {
c.error('cannot assign to s[i] since V strings are immutable\n' +
@@ -3675,7 +3679,7 @@ pub fn (mut c Checker) index_expr(mut node ast.IndexExpr) ast.Type {
}
if !is_ok && node.index is ast.RangeExpr {
s := c.table.type_to_str(typ)
- c.error('type `$s` does not support slicing', node.pos)
+ c.error('type `${s}` does not support slicing', node.pos)
} else if !c.inside_unsafe && !is_ok && !c.pref.translated && !c.file.is_translated {
c.warn('pointer indexing is only allowed in `unsafe` blocks', node.pos)
}
@@ -3705,7 +3709,7 @@ pub fn (mut c Checker) index_expr(mut node ast.IndexExpr) ast.Type {
index_type := c.expr(node.index)
if !c.check_types(index_type, info.key_type) {
err := c.expected_msg(index_type, info.key_type)
- c.error('invalid key: $err', node.pos)
+ c.error('invalid key: ${err}', node.pos)
}
value_sym := c.table.sym(info.value_type)
if !node.is_setter && value_sym.kind == .sum_type && node.or_expr.kind == .absent
@@ -3756,13 +3760,13 @@ pub fn (mut c Checker) enum_val(mut node ast.EnumVal) ast.Type {
if node.enum_name.starts_with('${c.mod}.') {
typ_idx = c.table.find_type_idx(node.enum_name['${c.mod}.'.len..])
if typ_idx == 0 {
- c.error('unknown enum `$node.enum_name` (type_idx=0)', node.pos)
+ c.error('unknown enum `${node.enum_name}` (type_idx=0)', node.pos)
return ast.void_type
}
}
if typ_idx == 0 {
// the actual type is still unknown, produce an error, instead of panic:
- c.error('unknown enum `$node.enum_name` (type_idx=0)', node.pos)
+ c.error('unknown enum `${node.enum_name}` (type_idx=0)', node.pos)
return ast.void_type
}
}
@@ -3785,7 +3789,7 @@ pub fn (mut c Checker) enum_val(mut node ast.EnumVal) ast.Type {
fsym := c.table.final_sym(typ)
if fsym.kind != .enum_ && !c.pref.translated && !c.file.is_translated {
// TODO in C int fields can be compared to enums, need to handle that in C2V
- c.error('expected type is not an enum (`$typ_sym.name`)', node.pos)
+ c.error('expected type is not an enum (`${typ_sym.name}`)', node.pos)
return ast.void_type
}
if fsym.info !is ast.Enum {
@@ -3793,12 +3797,12 @@ pub fn (mut c Checker) enum_val(mut node ast.EnumVal) ast.Type {
return ast.void_type
}
if !(typ_sym.is_pub || typ_sym.mod == c.mod) {
- c.error('enum `$typ_sym.name` is private', node.pos)
+ c.error('enum `${typ_sym.name}` is private', node.pos)
}
info := typ_sym.enum_info()
if node.val !in info.vals {
suggestion := util.new_suggestion(node.val, info.vals)
- c.error(suggestion.say('enum `$typ_sym.name` does not have a value `$node.val`'),
+ c.error(suggestion.say('enum `${typ_sym.name}` does not have a value `${node.val}`'),
node.pos)
}
node.typ = typ
@@ -3812,7 +3816,7 @@ pub fn (mut c Checker) chan_init(mut node ast.ChanInit) ast.Type {
if node.elem_type != 0 {
elem_sym := c.table.sym(node.elem_type)
if elem_sym.kind == .placeholder {
- c.error('unknown type `$elem_sym.name`', node.elem_type_pos)
+ c.error('unknown type `${elem_sym.name}`', node.elem_type_pos)
}
}
if node.has_cap {
@@ -3832,7 +3836,7 @@ pub fn (mut c Checker) offset_of(node ast.OffsetOf) ast.Type {
return ast.u32_type
}
if !c.table.struct_has_field(sym, node.field) {
- c.error('struct `$sym.name` has no field called `$node.field`', node.pos)
+ c.error('struct `${sym.name}` has no field called `${node.field}`', node.pos)
}
return ast.u32_type
}
@@ -3844,7 +3848,7 @@ pub fn (mut c Checker) check_dup_keys(node &ast.MapInit, i int) {
key_j := node.keys[j]
if key_j is ast.StringLiteral {
if key_i.val == key_j.val {
- c.error('duplicate key "$key_i.val" in map literal', key_i.pos)
+ c.error('duplicate key "${key_i.val}" in map literal', key_i.pos)
}
}
}
@@ -3853,7 +3857,7 @@ pub fn (mut c Checker) check_dup_keys(node &ast.MapInit, i int) {
key_j := node.keys[j]
if key_j is ast.IntegerLiteral {
if key_i.val == key_j.val {
- c.error('duplicate key "$key_i.val" in map literal', key_i.pos)
+ c.error('duplicate key "${key_i.val}" in map literal', key_i.pos)
}
}
}
@@ -3870,12 +3874,12 @@ pub fn (mut c Checker) add_error_detail_with_pos(msg string, pos token.Pos) {
}
pub fn (mut c Checker) add_instruction_for_optional_type() {
- c.add_error_detail_with_pos('prepend ? before the declaration of the return type of `$c.table.cur_fn.name`',
+ c.add_error_detail_with_pos('prepend ? before the declaration of the return type of `${c.table.cur_fn.name}`',
c.table.cur_fn.return_type_pos)
}
pub fn (mut c Checker) add_instruction_for_result_type() {
- c.add_error_detail_with_pos('prepend ! before the declaration of the return type of `$c.table.cur_fn.name`',
+ c.add_error_detail_with_pos('prepend ! before the declaration of the return type of `${c.table.cur_fn.name}`',
c.table.cur_fn.return_type_pos)
}
@@ -3886,7 +3890,7 @@ pub fn (mut c Checker) warn(s string, pos token.Pos) {
pub fn (mut c Checker) error(message string, pos token.Pos) {
$if checker_exit_on_first_error ? {
- eprintln('\n\n>> checker error: $message, pos: $pos')
+ eprintln('\n\n>> checker error: ${message}, pos: ${pos}')
print_backtrace()
exit(1)
}
@@ -4034,7 +4038,7 @@ fn (mut c Checker) fetch_field_name(field ast.StructField) string {
fn (mut c Checker) trace(fbase string, message string) {
if c.file.path_base == fbase {
- println('> c.trace | ${fbase:-10s} | $message')
+ println('> c.trace | ${fbase:-10s} | ${message}')
}
}
@@ -4045,14 +4049,14 @@ fn (mut c Checker) ensure_type_exists(typ ast.Type, pos token.Pos) ? {
}
sym := c.table.sym(typ)
if !c.is_builtin_mod && sym.kind == .struct_ && !sym.is_pub && sym.mod != c.mod {
- c.error('struct `$sym.name` was declared as private to module `$sym.mod`, so it can not be used inside module `$c.mod`',
+ c.error('struct `${sym.name}` was declared as private to module `${sym.mod}`, so it can not be used inside module `${c.mod}`',
pos)
return
}
match sym.kind {
.placeholder {
if sym.language == .v && !sym.name.starts_with('C.') {
- c.error(util.new_suggestion(sym.name, c.table.known_type_names()).say('unknown type `$sym.name`'),
+ c.error(util.new_suggestion(sym.name, c.table.known_type_names()).say('unknown type `${sym.name}`'),
pos)
return
}
@@ -4062,9 +4066,9 @@ fn (mut c Checker) ensure_type_exists(typ ast.Type, pos token.Pos) ? {
// suggestions due to f32 comparision issue.
if !c.is_builtin_mod {
msg := if sym.kind == .int_literal {
- 'unknown type `$sym.name`.\nDid you mean `int`?'
+ 'unknown type `${sym.name}`.\nDid you mean `int`?'
} else {
- 'unknown type `$sym.name`.\nDid you mean `f64`?'
+ 'unknown type `${sym.name}`.\nDid you mean `f64`?'
}
c.error(msg, pos)
return
@@ -4105,7 +4109,7 @@ pub fn (mut c Checker) fail_if_unreadable(expr ast.Expr, typ ast.Type, what stri
if typ.has_flag(.shared_f) {
if expr.name !in c.rlocked_names && expr.name !in c.locked_names {
action := if what == 'argument' { 'passed' } else { 'used' }
- c.error('`$expr.name` is `shared` and must be `rlock`ed or `lock`ed to be $action as non-mut $what',
+ c.error('`${expr.name}` is `shared` and must be `rlock`ed or `lock`ed to be ${action} as non-mut ${what}',
expr.pos)
}
}
@@ -4114,10 +4118,10 @@ pub fn (mut c Checker) fail_if_unreadable(expr ast.Expr, typ ast.Type, what stri
ast.SelectorExpr {
pos = expr.pos
if typ.has_flag(.shared_f) {
- expr_name := '${expr.expr}.$expr.field_name'
+ expr_name := '${expr.expr}.${expr.field_name}'
if expr_name !in c.rlocked_names && expr_name !in c.locked_names {
action := if what == 'argument' { 'passed' } else { 'used' }
- c.error('`$expr_name` is `shared` and must be `rlock`ed or `lock`ed to be $action as non-mut $what',
+ c.error('`${expr_name}` is `shared` and must be `rlock`ed or `lock`ed to be ${action} as non-mut ${what}',
expr.pos)
}
return
@@ -4150,7 +4154,7 @@ pub fn (mut c Checker) fail_if_unreadable(expr ast.Expr, typ ast.Type, what stri
}
}
if typ.has_flag(.shared_f) {
- c.error('you have to create a handle and `rlock` it to use a `shared` element as non-mut $what',
+ c.error('you have to create a handle and `rlock` it to use a `shared` element as non-mut ${what}',
pos)
}
}
@@ -4171,7 +4175,7 @@ pub fn (mut c Checker) goto_stmt(node ast.GotoStmt) {
c.warn('`goto` requires `unsafe` (consider using labelled break/continue)', node.pos)
}
if c.table.cur_fn != unsafe { nil } && node.name !in c.table.cur_fn.label_names {
- c.error('unknown label `$node.name`', node.pos)
+ c.error('unknown label `${node.name}`', node.pos)
}
c.goto_labels[node.name].is_used = true // Register a label use
// TODO: check label doesn't bypass variable declarations
@@ -4181,7 +4185,7 @@ fn (mut c Checker) check_unused_labels() {
for name, label in c.goto_labels {
if !label.is_used {
// TODO show label's location
- c.warn('label `$name` defined and not used', label.pos)
+ c.warn('label `${name}` defined and not used', label.pos)
c.goto_labels[name].is_used = true // so that this warning is not shown again
}
}
@@ -4202,19 +4206,19 @@ fn (mut c Checker) deprecate(kind string, name string, attrs []ast.Attr, pos tok
}
}
}
- start_message := '$kind `$name`'
+ start_message := '${kind} `${name}`'
error_time := after_time.add_days(180)
if error_time < now {
- c.error(semicolonize('$start_message has been deprecated since $after_time.ymmdd()',
+ c.error(semicolonize('${start_message} has been deprecated since ${after_time.ymmdd()}',
deprecation_message), pos)
} else if after_time < now {
- c.warn(semicolonize('$start_message has been deprecated since $after_time.ymmdd(), it will be an error after $error_time.ymmdd()',
+ c.warn(semicolonize('${start_message} has been deprecated since ${after_time.ymmdd()}, it will be an error after ${error_time.ymmdd()}',
deprecation_message), pos)
} else if after_time == now {
- c.warn(semicolonize('$start_message has been deprecated', deprecation_message),
+ c.warn(semicolonize('${start_message} has been deprecated', deprecation_message),
pos)
} else {
- c.note(semicolonize('$start_message will be deprecated after $after_time.ymmdd(), and will become an error after $error_time.ymmdd()',
+ c.note(semicolonize('${start_message} will be deprecated after ${after_time.ymmdd()}, and will become an error after ${error_time.ymmdd()}',
deprecation_message), pos)
}
}
@@ -4223,5 +4227,5 @@ fn semicolonize(main string, details string) string {
if details == '' {
return main
}
- return '$main; $details'
+ return '${main}; ${details}'
}
diff --git a/vlib/v/checker/comptime.v b/vlib/v/checker/comptime.v
index 9d4019ce1e..f881f48761 100644
--- a/vlib/v/checker/comptime.v
+++ b/vlib/v/checker/comptime.v
@@ -22,7 +22,7 @@ fn (mut c Checker) comptime_call(mut node ast.ComptimeCall) ast.Type {
return ast.void_type
}
if node.is_env {
- env_value := util.resolve_env_value("\$env('$node.args_var')", false) or {
+ env_value := util.resolve_env_value("\$env('${node.args_var}')", false) or {
c.error(err.msg(), node.env_pos)
return ast.string_type
}
@@ -55,12 +55,12 @@ fn (mut c Checker) comptime_call(mut node ast.ComptimeCall) ast.Type {
// ... look relative to the source file:
escaped_path = os.real_path(os.join_path_single(os.dir(c.file.path), escaped_path))
if !os.exists(escaped_path) {
- c.error('"$escaped_path" does not exist so it cannot be embedded',
+ c.error('"${escaped_path}" does not exist so it cannot be embedded',
node.pos)
return ast.string_type
}
if !os.is_file(escaped_path) {
- c.error('"$escaped_path" is not a file so it cannot be embedded',
+ c.error('"${escaped_path}" is not a file so it cannot be embedded',
node.pos)
return ast.string_type
}
@@ -72,8 +72,8 @@ fn (mut c Checker) comptime_call(mut node ast.ComptimeCall) ast.Type {
}
// c.file.embedded_files << node.embed_file
if node.embed_file.compression_type !in constants.valid_comptime_compression_types {
- supported := constants.valid_comptime_compression_types.map('.$it').join(', ')
- c.error('not supported compression type: .${node.embed_file.compression_type}. supported: $supported',
+ supported := constants.valid_comptime_compression_types.map('.${it}').join(', ')
+ c.error('not supported compression type: .${node.embed_file.compression_type}. supported: ${supported}',
node.pos)
}
return c.table.find_type_idx('v.embed_file.EmbedFileData')
@@ -119,12 +119,12 @@ fn (mut c Checker) comptime_call(mut node ast.ComptimeCall) ast.Type {
}
// s.$my_str()
v := node.scope.find_var(node.method_name) or {
- c.error('unknown identifier `$node.method_name`', node.method_pos)
+ c.error('unknown identifier `${node.method_name}`', node.method_pos)
return ast.void_type
}
if v.typ != ast.string_type {
s := c.expected_msg(v.typ, ast.string_type)
- c.error('invalid string method call: $s', node.method_pos)
+ c.error('invalid string method call: ${s}', node.method_pos)
return ast.void_type
}
// note: we should use a compile-time evaluation function rather than handle here
@@ -137,7 +137,7 @@ fn (mut c Checker) comptime_call(mut node ast.ComptimeCall) ast.Type {
}
left_sym := c.table.sym(c.unwrap_generic(node.left_type))
f := left_sym.find_method(method_name) or {
- c.error('could not find method `$method_name`', node.method_pos)
+ c.error('could not find method `${method_name}`', node.method_pos)
return ast.void_type
}
node.result_type = f.return_type
@@ -149,7 +149,8 @@ fn (mut c Checker) comptime_selector(mut node ast.ComptimeSelector) ast.Type {
expr_type := c.unwrap_generic(c.expr(node.field_expr))
expr_sym := c.table.sym(expr_type)
if expr_type != ast.string_type {
- c.error('expected `string` instead of `$expr_sym.name` (e.g. `field.name`)', node.field_expr.pos())
+ c.error('expected `string` instead of `${expr_sym.name}` (e.g. `field.name`)',
+ node.field_expr.pos())
}
if mut node.field_expr is ast.SelectorExpr {
left_pos := node.field_expr.expr.pos()
@@ -161,7 +162,7 @@ fn (mut c Checker) comptime_selector(mut node ast.ComptimeSelector) ast.Type {
if expr_name in c.comptime_fields_type {
return c.comptime_fields_type[expr_name]
}
- c.error('unknown `\$for` variable `$expr_name`', left_pos)
+ c.error('unknown `\$for` variable `${expr_name}`', left_pos)
} else {
c.error('expected selector expression e.g. `$(field.name)`', node.field_expr.pos())
}
@@ -172,7 +173,7 @@ fn (mut c Checker) comptime_for(node ast.ComptimeFor) {
typ := c.unwrap_generic(node.typ)
sym := c.table.sym(typ)
if sym.kind == .placeholder || typ.has_flag(.generic) {
- c.error('unknown type `$sym.name`', node.typ_pos)
+ c.error('unknown type `${sym.name}`', node.typ_pos)
}
if node.kind == .fields {
if sym.kind == .struct_ {
@@ -386,7 +387,7 @@ fn (mut c Checker) verify_vweb_params_for_method(node ast.Fn) (bool, int, int) {
param_sym := c.table.final_sym(param.typ)
if !(param_sym.is_string() || param_sym.is_number() || param_sym.is_float()
|| param_sym.kind == .bool) {
- c.error('invalid type `$param_sym.name` for parameter `$param.name` in vweb app method `$node.name`',
+ c.error('invalid type `${param_sym.name}` for parameter `${param.name}` in vweb app method `${node.name}`',
param.pos)
}
}
@@ -420,7 +421,7 @@ fn (mut c Checker) verify_all_vweb_routes() {
if f.return_type == typ_vweb_result && f.receiver.typ == m.params[0].typ
&& f.name == m.name && !f.attrs.contains('post') {
c.change_current_file(f.source_file) // setup of file path for the warning
- c.warn('mismatched parameters count between vweb method `${sym_app.name}.$m.name` ($nargs) and route attribute $m.attrs ($nroute_attributes)',
+ c.warn('mismatched parameters count between vweb method `${sym_app.name}.${m.name}` (${nargs}) and route attribute ${m.attrs} (${nroute_attributes})',
f.pos)
}
}
@@ -447,7 +448,7 @@ fn (mut c Checker) evaluate_once_comptime_if_attribute(mut node ast.Attr) bool {
return node.ct_skip
} else {
if node.ct_expr.name !in constants.valid_comptime_not_user_defined {
- c.note('`[if $node.ct_expr.name]` is deprecated. Use `[if $node.ct_expr.name ?]` instead',
+ c.note('`[if ${node.ct_expr.name}]` is deprecated. Use `[if ${node.ct_expr.name} ?]` instead',
node.pos)
node.ct_skip = node.ct_expr.name !in c.pref.compile_defines
node.ct_evaled = true
@@ -566,7 +567,7 @@ fn (mut c Checker) comptime_if_branch(cond ast.Expr, pos token.Pos) ComptimeBran
if !c.check_types(right_type, left_type) {
left_name := c.table.type_to_str(left_type)
right_name := c.table.type_to_str(right_type)
- c.error('mismatched types `$left_name` and `$right_name`',
+ c.error('mismatched types `${left_name}` and `${right_name}`',
cond.pos)
}
// :)
@@ -682,7 +683,7 @@ fn (mut c Checker) comptime_if_branch(cond ast.Expr, pos token.Pos) ComptimeBran
if cond.obj !is ast.Var && cond.obj !is ast.ConstField
&& cond.obj !is ast.GlobalField {
if !c.inside_ct_attr {
- c.error('unknown var: `$cname`', pos)
+ c.error('unknown var: `${cname}`', pos)
}
return .unknown
}
@@ -692,7 +693,7 @@ fn (mut c Checker) comptime_if_branch(cond ast.Expr, pos token.Pos) ComptimeBran
}
if !c.check_types(typ, ast.bool_type) {
type_name := c.table.type_to_str(typ)
- c.error('non-bool type `$type_name` used as \$if condition', cond.pos)
+ c.error('non-bool type `${type_name}` used as \$if condition', cond.pos)
}
// :)
// until `v.eval` is stable, I can't think of a better way to do this
diff --git a/vlib/v/checker/containers.v b/vlib/v/checker/containers.v
index 8eb88b722f..f1dce5e3d5 100644
--- a/vlib/v/checker/containers.v
+++ b/vlib/v/checker/containers.v
@@ -196,7 +196,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
}
}
c.check_expected(typ, elem_type) or {
- c.error('invalid array element: $err.msg()', expr.pos())
+ c.error('invalid array element: ${err.msg()}', expr.pos())
}
}
}
@@ -233,7 +233,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
fixed_size = comptime_value.i64() or { fixed_size }
}
} else {
- c.error('non-constant array bound `$init_expr.name`', init_expr.pos)
+ c.error('non-constant array bound `${init_expr.name}`', init_expr.pos)
}
}
ast.InfixExpr {
@@ -246,7 +246,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
}
}
if fixed_size <= 0 {
- c.error('fixed size cannot be zero or negative (fixed_size: $fixed_size)',
+ c.error('fixed size cannot be zero or negative (fixed_size: ${fixed_size})',
init_expr.pos())
}
idx := c.table.find_or_register_array_fixed(node.elem_type, int(fixed_size), init_expr)
@@ -265,7 +265,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
fn (mut c Checker) check_array_init_para_type(para string, expr ast.Expr, pos token.Pos) {
sym := c.table.sym(c.unwrap_generic(c.expr(expr)))
if sym.kind !in [.int, .int_literal] {
- c.error('array $para needs to be an int', pos)
+ c.error('array ${para} needs to be an int', pos)
}
}
@@ -307,10 +307,10 @@ pub fn (mut c Checker) map_init(mut node ast.MapInit) ast.Type {
if val_info.generic_types.len > 0 && val_info.concrete_types.len == 0
&& !info.value_type.has_flag(.generic) {
if c.table.cur_concrete_types.len == 0 {
- c.error('generic struct `$val_sym.name` must specify type parameter, e.g. Foo',
+ c.error('generic struct `${val_sym.name}` must specify type parameter, e.g. Foo',
node.pos)
} else {
- c.error('generic struct `$val_sym.name` must specify type parameter, e.g. Foo',
+ c.error('generic struct `${val_sym.name}` must specify type parameter, e.g. Foo',
node.pos)
}
}
@@ -369,7 +369,7 @@ pub fn (mut c Checker) map_init(mut node ast.MapInit) ast.Type {
if !c.check_types(key_type, key0_type) || (i == 0 && key_type.is_number()
&& key0_type.is_number() && key0_type != ast.mktyp(key_type)) {
msg := c.expected_msg(key_type, key0_type)
- c.error('invalid map key: $msg', key.pos())
+ c.error('invalid map key: ${msg}', key.pos())
same_key_type = false
}
if expecting_interface_map {
@@ -388,13 +388,13 @@ pub fn (mut c Checker) map_init(mut node ast.MapInit) ast.Type {
continue
} else {
msg := c.expected_msg(val_type, node.value_type)
- c.error('invalid map value: $msg', val.pos())
+ c.error('invalid map value: ${msg}', val.pos())
}
}
if !c.check_types(val_type, val0_type) || (i == 0 && val_type.is_number()
&& val0_type.is_number() && val0_type != ast.mktyp(val_type)) {
msg := c.expected_msg(val_type, val0_type)
- c.error('invalid map value: $msg', val.pos())
+ c.error('invalid map value: ${msg}', val.pos())
}
}
if same_key_type {
diff --git a/vlib/v/checker/fn.v b/vlib/v/checker/fn.v
index 1a69b677b8..7fff7e261f 100644
--- a/vlib/v/checker/fn.v
+++ b/vlib/v/checker/fn.v
@@ -8,7 +8,7 @@ import v.token
fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
$if trace_post_process_generic_fns_types ? {
if node.generic_names.len > 0 {
- eprintln('>>> post processing node.name: ${node.name:-30} | $node.generic_names <=> $c.table.cur_concrete_types')
+ eprintln('>>> post processing node.name: ${node.name:-30} | ${node.generic_names} <=> ${c.table.cur_concrete_types}')
}
}
// notice vweb route methods (non-generic method)
@@ -89,7 +89,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
if node.language == .v && !c.is_builtin_mod && !node.is_anon {
c.check_valid_snake_case(node.name, 'function name', node.pos)
if !node.is_method && node.mod == 'main' && node.short_name in c.table.builtin_pub_fns {
- c.error('cannot redefine builtin public function `$node.short_name`', node.pos)
+ c.error('cannot redefine builtin public function `${node.short_name}`', node.pos)
}
}
if node.name == 'main.main' {
@@ -98,7 +98,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
if node.return_type != ast.void_type {
if ct_attr_idx := node.attrs.find_comptime_define() {
sexpr := node.attrs[ct_attr_idx].ct_expr.str()
- c.error('only functions that do NOT return values can have `[if $sexpr]` tags',
+ c.error('only functions that do NOT return values can have `[if ${sexpr}]` tags',
node.pos)
}
if node.generic_names.len > 0 {
@@ -136,7 +136,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
for name in generic_names {
if name !in node.generic_names {
fn_generic_names := node.generic_names.join(', ')
- c.error('generic type name `$name` is not mentioned in fn `$node.name<$fn_generic_names>`',
+ c.error('generic type name `${name}` is not mentioned in fn `${node.name}<${fn_generic_names}>`',
node.return_type_pos)
}
}
@@ -162,14 +162,14 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
}
if sym.name.len == 1 {
// One letter types are reserved for generics.
- c.error('unknown type `$sym.name`', node.receiver_pos)
+ c.error('unknown type `${sym.name}`', node.receiver_pos)
return
}
// make sure interface does not implement its own interface methods
if mut sym.info is ast.Interface && sym.has_method(node.name) {
// if the method is in info.methods then it is an interface method
if sym.info.has_method(node.name) {
- c.error('interface `$sym.name` cannot implement its own interface method `$node.name`',
+ c.error('interface `${sym.name}` cannot implement its own interface method `${node.name}`',
node.pos)
}
}
@@ -177,7 +177,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
if field := c.table.find_field(sym, node.name) {
field_sym := c.table.sym(field.typ)
if field_sym.kind == .function {
- c.error('type `$sym.name` has both field and method named `$node.name`',
+ c.error('type `${sym.name}` has both field and method named `${node.name}`',
node.pos)
}
}
@@ -187,7 +187,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
}
if !node.receiver.typ.is_ptr() {
tname := sym.name.after_char(`.`)
- c.error('`.free()` methods should be defined on either a `(mut x &$tname)`, or a `(x &$tname)` receiver',
+ c.error('`.free()` methods should be defined on either a `(mut x &${tname})`, or a `(x &${tname})` receiver',
node.receiver_pos)
}
if node.params.len != 1 {
@@ -199,7 +199,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
if node.method_idx < sym.methods.len {
sym.methods[node.method_idx].source_fn = voidptr(node)
} else {
- c.error('method index: $node.method_idx >= sym.methods.len: $sym.methods.len',
+ c.error('method index: ${node.method_idx} >= sym.methods.len: ${sym.methods.len}',
node.pos)
}
}
@@ -208,7 +208,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
for mut param in node.params {
c.ensure_type_exists(param.typ, param.type_pos) or { return }
if reserved_type_names_chk.matches(param.name) {
- c.error('invalid use of reserved type `$param.name` as a parameter name',
+ c.error('invalid use of reserved type `${param.name}` as a parameter name',
param.pos)
}
if param.typ.has_flag(.optional) || param.typ.has_flag(.result) {
@@ -247,7 +247,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
for name in generic_names {
if name !in node.generic_names {
fn_generic_names := node.generic_names.join(', ')
- c.error('generic type name `$name` is not mentioned in fn `$node.name<$fn_generic_names>`',
+ c.error('generic type name `${name}` is not mentioned in fn `${node.name}<${fn_generic_names}>`',
param.type_pos)
}
}
@@ -295,12 +295,12 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
c.error('argument cannot be `mut` for operator overloading', node.pos)
} else if !c.check_same_type_ignoring_pointers(node.receiver.typ,
node.params[1].typ) {
- c.error('expected `$receiver_sym.name` not `$param_sym.name` - both operands must be the same type for operator overloading',
+ c.error('expected `${receiver_sym.name}` not `${param_sym.name}` - both operands must be the same type for operator overloading',
node.params[1].type_pos)
} else if node.name in ['<', '=='] && node.return_type != ast.bool_type {
c.error('operator comparison methods should return `bool`', node.pos)
} else if parent_sym.is_primitive() {
- c.error('cannot define operator methods on type alias for `$parent_sym.name`',
+ c.error('cannot define operator methods on type alias for `${parent_sym.name}`',
node.pos)
}
}
@@ -365,7 +365,7 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
if c.inside_anon_fn {
c.error('missing return at the end of an anonymous function', node.pos)
} else if !node.attrs.contains('_naked') {
- c.error('missing return at end of function `$node.name`', node.pos)
+ c.error('missing return at end of function `${node.name}`', node.pos)
}
}
node.source_file = c.file
@@ -424,10 +424,10 @@ fn (mut c Checker) anon_fn(mut node ast.AnonFn) ast.Type {
mut has_generic := false
for mut var in node.inherited_vars {
parent_var := node.decl.scope.parent.find_var(var.name) or {
- panic('unexpected checker error: cannot find parent of inherited variable `$var.name`')
+ panic('unexpected checker error: cannot find parent of inherited variable `${var.name}`')
}
if var.is_mut && !parent_var.is_mut {
- c.error('original `$parent_var.name` is immutable, declare it with `mut` to make it mutable',
+ c.error('original `${parent_var.name}` is immutable, declare it with `mut` to make it mutable',
var.pos)
}
var.typ = parent_var.typ
@@ -449,7 +449,7 @@ pub fn (mut c Checker) call_expr(mut node ast.CallExpr) ast.Type {
// Check whether the inner function definition is before the call
if var := node.scope.find_var(node.name) {
if var.expr is ast.AnonFn && var.pos.pos > node.pos.pos {
- c.error('unknown function: $node.name', node.pos)
+ c.error('unknown function: ${node.name}', node.pos)
}
}
// TODO merge logic from method_call and fn_call
@@ -500,13 +500,13 @@ pub fn (mut c Checker) call_expr(mut node ast.CallExpr) ast.Type {
if node.or_block.kind == .propagate_result && !c.table.cur_fn.return_type.has_flag(.result)
&& !c.table.cur_fn.return_type.has_flag(.optional) {
c.add_instruction_for_result_type()
- c.error('to propagate the result call, `$c.table.cur_fn.name` must return a result',
+ c.error('to propagate the result call, `${c.table.cur_fn.name}` must return a result',
node.or_block.pos)
}
if node.or_block.kind == .propagate_option
&& !c.table.cur_fn.return_type.has_flag(.optional) {
c.add_instruction_for_optional_type()
- c.error('to propagate the optional call, `$c.table.cur_fn.name` must return an optional',
+ c.error('to propagate the optional call, `${c.table.cur_fn.name}` must return an optional',
node.or_block.pos)
}
}
@@ -521,9 +521,9 @@ pub fn (mut c Checker) builtin_args(mut node ast.CallExpr, fn_name string, func
arg := node.args[0]
c.check_expr_opt_call(arg.expr, arg.typ)
if arg.typ.is_void() {
- c.error('`$fn_name` can not print void expressions', node.pos)
+ c.error('`${fn_name}` can not print void expressions', node.pos)
} else if arg.typ == ast.char_type && arg.typ.nr_muls() == 0 {
- c.error('`$fn_name` cannot print type `char` directly, print its address or cast it to an integer instead',
+ c.error('`${fn_name}` cannot print type `char` directly, print its address or cast it to an integer instead',
node.pos)
}
c.fail_if_unreadable(arg.expr, arg.typ, 'argument to print')
@@ -594,7 +594,8 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
tsym := c.table.sym(typ)
if !tsym.name.starts_with('Promise<') {
- c.error('JS.await: first argument must be a promise, got `$tsym.name`', node.pos)
+ c.error('JS.await: first argument must be a promise, got `${tsym.name}`',
+ node.pos)
return ast.void_type
}
if c.table.cur_fn != unsafe { nil } {
@@ -633,15 +634,15 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
kind = c.table.sym(sym.info.parent_type).kind
}
if kind !in [.struct_, .sum_type, .map, .array] {
- c.error('json.decode: expected sum type, struct, map or array, found $kind',
+ c.error('json.decode: expected sum type, struct, map or array, found ${kind}',
expr.pos)
}
} else {
- c.error('json.decode: unknown type `$sym.name`', node.pos)
+ c.error('json.decode: unknown type `${sym.name}`', node.pos)
}
} else {
typ := expr.type_name()
- c.error('json.decode: first argument needs to be a type, got `$typ`', node.pos)
+ c.error('json.decode: first argument needs to be a type, got `${typ}`', node.pos)
return ast.void_type
}
c.expected_type = ast.string_type
@@ -683,7 +684,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
// try prefix with current module as it would have never gotten prefixed
if !found && !fn_name.contains('.') && node.mod != 'builtin' {
- name_prefixed := '${node.mod}.$fn_name'
+ name_prefixed := '${node.mod}.${fn_name}'
if f := c.table.find_fn(name_prefixed) {
node.name = name_prefixed
found = true
@@ -758,7 +759,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
if !found && c.pref.is_vsh {
// TOOD: test this hack more extensively
- os_name := 'os.$fn_name'
+ os_name := 'os.${fn_name}'
if f := c.table.find_fn(os_name) {
if f.generic_names.len == node.concrete_types.len {
node_alias_name := node.fkey()
@@ -849,11 +850,11 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
}
suggestion := util.new_suggestion(fn_name, mod_func_names)
- c.error(suggestion.say('unknown function: $fn_name '), node.pos)
+ c.error(suggestion.say('unknown function: ${fn_name} '), node.pos)
return ast.void_type
}
}
- c.error('unknown function: $fn_name', node.pos)
+ c.error('unknown function: ${fn_name}', node.pos)
return ast.void_type
}
@@ -861,13 +862,13 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
node.is_ctor_new = func.is_ctor_new
if !found_in_args {
if node.scope.known_var(fn_name) {
- c.error('ambiguous call to: `$fn_name`, may refer to fn `$fn_name` or variable `$fn_name`',
+ c.error('ambiguous call to: `${fn_name}`, may refer to fn `${fn_name}` or variable `${fn_name}`',
node.pos)
}
}
if !func.is_pub && func.language == .v && func.name.len > 0 && func.mod.len > 0
&& func.mod != c.mod && !c.pref.is_test {
- c.error('function `$func.name` is private', node.pos)
+ c.error('function `${func.name}` is private', node.pos)
}
if c.table.cur_fn != unsafe { nil } && !c.table.cur_fn.is_deprecated && func.is_deprecated {
c.deprecate('function', func.name, func.attrs, node.pos)
@@ -875,7 +876,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
if func.is_unsafe && !c.inside_unsafe
&& (func.language != .c || (func.name[2] in [`m`, `s`] && func.mod == 'builtin')) {
// builtin C.m*, C.s* only - temp
- c.warn('function `$func.name` must be called from an `unsafe` block', node.pos)
+ c.warn('function `${func.name}` must be called from an `unsafe` block', node.pos)
}
node.is_keep_alive = func.is_keep_alive
if func.language == .v && func.no_body && !c.pref.translated && !c.file.is_translated
@@ -885,7 +886,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
if node.concrete_types.len > 0 && func.generic_names.len > 0
&& node.concrete_types.len != func.generic_names.len {
plural := if func.generic_names.len == 1 { '' } else { 's' }
- c.error('expected $func.generic_names.len generic parameter$plural, got $node.concrete_types.len',
+ c.error('expected ${func.generic_names.len} generic parameter${plural}, got ${node.concrete_types.len}',
node.concrete_list_pos)
}
for concrete_type in node.concrete_types {
@@ -914,12 +915,12 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
arg := node.args[0]
node.args[0].typ = c.expr(arg.expr)
if node.args[0].typ == ast.error_type {
- c.warn('`error($arg)` can be shortened to just `$arg`', node.pos)
+ c.warn('`error(${arg})` can be shortened to just `${arg}`', node.pos)
}
}
c.set_node_expected_arg_types(mut node, func)
if !c.pref.backend.is_js() && node.args.len > 0 && func.params.len == 0 {
- c.error('too many arguments in call to `$func.name` (non-js backend: $c.pref.backend)',
+ c.error('too many arguments in call to `${func.name}` (non-js backend: ${c.pref.backend})',
node.pos)
}
for i, mut call_arg in node.args {
@@ -934,7 +935,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
if func.is_variadic && call_arg.expr is ast.ArrayDecompose {
if i > func.params.len - 1 {
- c.error('too many arguments in call to `$func.name`', node.pos)
+ c.error('too many arguments in call to `${func.name}`', node.pos)
}
}
if func.is_variadic && i >= func.params.len - 1 {
@@ -952,15 +953,15 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
&& !param.typ.has_flag(.generic) && expected_type != typ {
styp := c.table.type_to_str(typ)
elem_styp := c.table.type_to_str(expected_type)
- c.error('to pass `$call_arg.expr` ($styp) to `$func.name` (which accepts type `...$elem_styp`), use `...$call_arg.expr`',
+ c.error('to pass `${call_arg.expr}` (${styp}) to `${func.name}` (which accepts type `...${elem_styp}`), use `...${call_arg.expr}`',
node.pos)
} else if call_arg.expr is ast.ArrayDecompose
&& c.table.sym(expected_type).kind == .sum_type
&& expected_type.idx() != typ.idx() {
expected_type_str := c.table.type_to_str(expected_type)
got_type_str := c.table.type_to_str(typ)
- c.error('cannot use `...$got_type_str` as `...$expected_type_str` in argument ${
- i + 1} to `$fn_name`', call_arg.pos)
+ c.error('cannot use `...${got_type_str}` as `...${expected_type_str}` in argument ${
+ i + 1} to `${fn_name}`', call_arg.pos)
}
}
} else {
@@ -1003,22 +1004,22 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
if !param.is_mut {
tok := call_arg.share.str()
- c.error('`$node.name` parameter `$param.name` is not `$tok`, `$tok` is not needed`',
+ c.error('`${node.name}` parameter `${param.name}` is not `${tok}`, `${tok}` is not needed`',
call_arg.expr.pos())
} else {
if param.typ.share() != call_arg.share {
- c.error('wrong shared type `$call_arg.share.str()`, expected: `$param.typ.share().str()`',
+ c.error('wrong shared type `${call_arg.share.str()}`, expected: `${param.typ.share().str()}`',
call_arg.expr.pos())
}
if to_lock != '' && !param.typ.has_flag(.shared_f) {
- c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
+ c.error('${to_lock} is `shared` and must be `lock`ed to be passed as `mut`',
pos)
}
}
} else {
if param.is_mut {
tok := call_arg.share.str()
- c.error('function `$node.name` parameter `$param.name` is `$tok`, so use `$tok $call_arg.expr` instead',
+ c.error('function `${node.name}` parameter `${param.name}` is `${tok}`, so use `${tok} ${call_arg.expr}` instead',
call_arg.expr.pos())
} else {
c.fail_if_unreadable(call_arg.expr, arg_typ, 'argument')
@@ -1057,7 +1058,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
if arg_typ !in [ast.voidptr_type, ast.nil_type]
&& !c.check_multiple_ptr_match(arg_typ, param.typ, param, call_arg) {
got_typ_str, expected_typ_str := c.get_string_names_of(arg_typ, param.typ)
- c.error('cannot use `$got_typ_str` as `$expected_typ_str` in argument ${i + 1} to `$fn_name`',
+ c.error('cannot use `${got_typ_str}` as `${expected_typ_str}` in argument ${i + 1} to `${fn_name}`',
call_arg.pos)
}
continue
@@ -1161,19 +1162,19 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
continue
}
}
- c.error('$err.msg() in argument ${i + 1} to `$fn_name`', call_arg.pos)
+ c.error('${err.msg()} in argument ${i + 1} to `${fn_name}`', call_arg.pos)
}
if final_param_sym.kind == .struct_ && arg_typ !in [ast.voidptr_type, ast.nil_type]
&& !c.check_multiple_ptr_match(arg_typ, param.typ, param, call_arg) {
got_typ_str, expected_typ_str := c.get_string_names_of(arg_typ, param.typ)
- c.error('cannot use `$got_typ_str` as `$expected_typ_str` in argument ${i + 1} to `$fn_name`',
+ c.error('cannot use `${got_typ_str}` as `${expected_typ_str}` in argument ${i + 1} to `${fn_name}`',
call_arg.pos)
}
// Warn about automatic (de)referencing, which will be removed soon.
if func.language != .c && !c.inside_unsafe && arg_typ.nr_muls() != param.typ.nr_muls()
&& !(call_arg.is_mut && param.is_mut) && !(!call_arg.is_mut && !param.is_mut)
&& param.typ !in [ast.byteptr_type, ast.charptr_type, ast.voidptr_type, ast.nil_type] {
- c.warn('automatic referencing/dereferencing is deprecated and will be removed soon (got: $arg_typ.nr_muls() references, expected: $param.typ.nr_muls() references)',
+ c.warn('automatic referencing/dereferencing is deprecated and will be removed soon (got: ${arg_typ.nr_muls()} references, expected: ${param.typ.nr_muls()} references)',
call_arg.pos)
}
}
@@ -1211,7 +1212,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
if c.comptime_fields_type.len > 0 {
continue
}
- c.error('$err.msg() in argument ${i + 1} to `$fn_name`', call_arg.pos)
+ c.error('${err.msg()} in argument ${i + 1} to `${fn_name}`', call_arg.pos)
}
}
}
@@ -1281,9 +1282,9 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
method_name := node.name
mut unknown_method_msg := if field := c.table.find_field(left_sym, method_name) {
- 'unknown method `$field.name` did you mean to access the field with the same name instead?'
+ 'unknown method `${field.name}` did you mean to access the field with the same name instead?'
} else {
- 'unknown method or field: `${left_sym.name}.$method_name`'
+ 'unknown method or field: `${left_sym.name}.${method_name}`'
}
if left_type.has_flag(.optional) {
c.error('optional type cannot be called directly', node.left.pos())
@@ -1344,7 +1345,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
arg_type := c.expr(arg_expr)
arg_sym := c.table.sym(arg_type)
if !c.check_types(arg_type, info.elem_type) && !c.check_types(left_type, arg_type) {
- c.error('cannot $method_name `$arg_sym.name` to `$left_sym.name`', arg_expr.pos())
+ c.error('cannot ${method_name} `${arg_sym.name}` to `${left_sym.name}`', arg_expr.pos())
}
} else if final_left_sym.info is ast.Array && method_name in ['first', 'last', 'pop'] {
return c.array_builtin_method_call(mut node, left_type, final_left_sym)
@@ -1463,7 +1464,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
// If a private method is called outside of the module
// its receiver type is defined in, show an error.
// println('warn $method_name lef.mod=$left_type_sym.mod c.mod=$c.mod')
- c.error('method `${left_sym.name}.$method_name` is private', node.pos)
+ c.error('method `${left_sym.name}.${method_name}` is private', node.pos)
}
rec_share := method.params[0].typ.share()
if rec_share == .shared_t && (c.locked_names.len > 0 || c.rlocked_names.len > 0) {
@@ -1477,7 +1478,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
}
// node.is_mut = true
if to_lock != '' && rec_share != .shared_t {
- c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
+ c.error('${to_lock} is `shared` and must be `lock`ed to be passed as `mut`',
pos)
}
} else {
@@ -1490,7 +1491,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if node.concrete_types.len > 0 && method.generic_names.len > 0
&& node.concrete_types.len != method.generic_names.len {
plural := if method.generic_names.len == 1 { '' } else { 's' }
- c.error('expected $method.generic_names.len generic parameter$plural, got $node.concrete_types.len',
+ c.error('expected ${method.generic_names.len} generic parameter${plural}, got ${node.concrete_types.len}',
node.concrete_list_pos)
}
for concrete_type in node.concrete_types {
@@ -1545,7 +1546,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
}
if no_type_promotion {
if got_arg_typ != exp_arg_typ {
- c.error('cannot use `${c.table.sym(got_arg_typ).name}` as argument for `$method.name` (`$exp_arg_sym.name` expected)',
+ c.error('cannot use `${c.table.sym(got_arg_typ).name}` as argument for `${method.name}` (`${exp_arg_sym.name}` expected)',
arg.pos)
}
}
@@ -1567,7 +1568,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if method.is_variadic && arg.expr is ast.ArrayDecompose {
if i > method.params.len - 2 {
- c.error('too many arguments in call to `$method.name`', node.pos)
+ c.error('too many arguments in call to `${method.name}`', node.pos)
}
}
if method.is_variadic && i >= method.params.len - 2 {
@@ -1585,15 +1586,15 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
&& !param.typ.has_flag(.generic) && expected_type != typ {
styp := c.table.type_to_str(typ)
elem_styp := c.table.type_to_str(expected_type)
- c.error('to pass `$arg.expr` ($styp) to `$method.name` (which accepts type `...$elem_styp`), use `...$arg.expr`',
+ c.error('to pass `${arg.expr}` (${styp}) to `${method.name}` (which accepts type `...${elem_styp}`), use `...${arg.expr}`',
node.pos)
} else if arg.expr is ast.ArrayDecompose
&& c.table.sym(expected_type).kind == .sum_type
&& expected_type.idx() != typ.idx() {
expected_type_str := c.table.type_to_str(expected_type)
got_type_str := c.table.type_to_str(typ)
- c.error('cannot use `...$got_type_str` as `...$expected_type_str` in argument ${
- i + 1} to `$method_name`', arg.pos)
+ c.error('cannot use `...${got_type_str}` as `...${expected_type_str}` in argument ${
+ i + 1} to `${method_name}`', arg.pos)
}
}
} else {
@@ -1610,22 +1611,22 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
to_lock, pos := c.fail_if_immutable(arg.expr)
if !param_is_mut {
tok := arg.share.str()
- c.error('`$node.name` parameter `$param.name` is not `$tok`, `$tok` is not needed`',
+ c.error('`${node.name}` parameter `${param.name}` is not `${tok}`, `${tok}` is not needed`',
arg.expr.pos())
} else {
if param_share != arg.share {
- c.error('wrong shared type `$arg.share.str()`, expected: `$param_share.str()`',
+ c.error('wrong shared type `${arg.share.str()}`, expected: `${param_share.str()}`',
arg.expr.pos())
}
if to_lock != '' && param_share != .shared_t {
- c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
+ c.error('${to_lock} is `shared` and must be `lock`ed to be passed as `mut`',
pos)
}
}
} else {
if param_is_mut {
tok := arg.share.str()
- c.error('method `$node.name` parameter `$param.name` is `$tok`, so use `$tok $arg.expr` instead',
+ c.error('method `${node.name}` parameter `${param.name}` is `${tok}`, so use `${tok} ${arg.expr}` instead',
arg.expr.pos())
} else {
c.fail_if_unreadable(arg.expr, got_arg_typ, 'argument')
@@ -1668,13 +1669,13 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if func_info.func.params[0].typ.nr_muls() != elem_typ.nr_muls() + 1 {
arg_typ_str := c.table.type_to_str(func_info.func.params[0].typ)
expected_typ_str := c.table.type_to_str(elem_typ.ref())
- c.error('sort_with_compare callback function parameter `${func_info.func.params[0].name}` with type `$arg_typ_str` should be `$expected_typ_str`',
+ c.error('sort_with_compare callback function parameter `${func_info.func.params[0].name}` with type `${arg_typ_str}` should be `${expected_typ_str}`',
func_info.func.params[0].type_pos)
}
if func_info.func.params[1].typ.nr_muls() != elem_typ.nr_muls() + 1 {
arg_typ_str := c.table.type_to_str(func_info.func.params[1].typ)
expected_typ_str := c.table.type_to_str(elem_typ.ref())
- c.error('sort_with_compare callback function parameter `${func_info.func.params[1].name}` with type `$arg_typ_str` should be `$expected_typ_str`',
+ c.error('sort_with_compare callback function parameter `${func_info.func.params[1].name}` with type `${arg_typ_str}` should be `${expected_typ_str}`',
func_info.func.params[1].type_pos)
}
}
@@ -1695,8 +1696,8 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
&& !c.check_multiple_ptr_match(got_arg_typ, param.typ, param, arg) {
got_typ_str, expected_typ_str := c.get_string_names_of(got_arg_typ,
param.typ)
- c.error('cannot use `$got_typ_str` as `$expected_typ_str` in argument ${i + 1} to `$method_name`',
- arg.pos)
+ c.error('cannot use `${got_typ_str}` as `${expected_typ_str}` in argument ${i +
+ 1} to `${method_name}`', arg.pos)
}
continue
}
@@ -1723,23 +1724,23 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
continue
}
}
- c.error('$err.msg() in argument ${i + 1} to `${left_sym.name}.$method_name`',
+ c.error('${err.msg()} in argument ${i + 1} to `${left_sym.name}.${method_name}`',
arg.pos)
}
param_typ_sym := c.table.sym(exp_arg_typ)
if param_typ_sym.kind == .struct_ && got_arg_typ !in [ast.voidptr_type, ast.nil_type]
&& !c.check_multiple_ptr_match(got_arg_typ, param.typ, param, arg) {
got_typ_str, expected_typ_str := c.get_string_names_of(got_arg_typ, param.typ)
- c.error('cannot use `$got_typ_str` as `$expected_typ_str` in argument ${i + 1} to `$method_name`',
+ c.error('cannot use `${got_typ_str}` as `${expected_typ_str}` in argument ${i + 1} to `${method_name}`',
arg.pos)
}
}
if method.is_unsafe && !c.inside_unsafe {
- c.warn('method `${left_sym.name}.$method_name` must be called from an `unsafe` block',
+ c.warn('method `${left_sym.name}.${method_name}` must be called from an `unsafe` block',
node.pos)
}
if c.table.cur_fn != unsafe { nil } && !c.table.cur_fn.is_deprecated && method.is_deprecated {
- c.deprecate('method', '${left_sym.name}.$method.name', method.attrs, node.pos)
+ c.deprecate('method', '${left_sym.name}.${method.name}', method.attrs, node.pos)
}
c.set_node_expected_arg_types(mut node, method)
if is_method_from_embed {
@@ -1799,7 +1800,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if method_name == 'str' {
if left_sym.kind == .interface_ {
iname := left_sym.name
- c.error('interface `$iname` does not have a .str() method. Use typeof() instead',
+ c.error('interface `${iname}` does not have a .str() method. Use typeof() instead',
node.pos)
}
node.receiver_type = left_type
@@ -1848,22 +1849,22 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
to_lock, pos := c.fail_if_immutable(arg.expr)
if !param.is_mut {
tok := arg.share.str()
- c.error('`$node.name` parameter ${i + 1} is not `$tok`, `$tok` is not needed`',
+ c.error('`${node.name}` parameter ${i + 1} is not `${tok}`, `${tok}` is not needed`',
arg.expr.pos())
} else {
if param_share != arg.share {
- c.error('wrong shared type `$arg.share.str()`, expected: `$param_share.str()`',
+ c.error('wrong shared type `${arg.share.str()}`, expected: `${param_share.str()}`',
arg.expr.pos())
}
if to_lock != '' && param_share != .shared_t {
- c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
+ c.error('${to_lock} is `shared` and must be `lock`ed to be passed as `mut`',
pos)
}
}
} else {
if param.is_mut {
tok := arg.share.str()
- c.error('method `$node.name` parameter ${i + 1} is `$tok`, so use `$tok $arg.expr` instead',
+ c.error('method `${node.name}` parameter ${i + 1} is `${tok}`, so use `${tok} ${arg.expr}` instead',
arg.expr.pos())
} else {
c.fail_if_unreadable(arg.expr, targ, 'argument')
@@ -1875,7 +1876,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
c.check_expected_call_arg(targ, c.unwrap_generic(exp_arg_typ), node.language,
arg) or {
if targ != ast.void_type {
- c.error('$err.msg() in argument ${i + 1} to `${left_sym.name}.$method_name`',
+ c.error('${err.msg()} in argument ${i + 1} to `${left_sym.name}.${method_name}`',
arg.pos)
}
}
@@ -1940,7 +1941,7 @@ fn (mut c Checker) post_process_generic_fns() {
fkey := node.fkey()
gtypes := c.table.fn_generic_types[fkey]
$if trace_post_process_generic_fns ? {
- eprintln('> post_process_generic_fns $node.mod | $node.name | fkey: $fkey | gtypes: $gtypes')
+ eprintln('> post_process_generic_fns ${node.mod} | ${node.name} | fkey: ${fkey} | gtypes: ${gtypes}')
}
for concrete_types in gtypes {
c.table.cur_concrete_types = concrete_types
@@ -1956,7 +1957,7 @@ fn (mut c Checker) post_process_generic_fns() {
c.table.cur_concrete_types = []
$if trace_post_process_generic_fns ? {
if node.generic_names.len > 0 {
- eprintln(' > fn_decl node.name: $node.name | generic_names: $node.generic_names | ninstances: $node.ninstances')
+ eprintln(' > fn_decl node.name: ${node.name} | generic_names: ${node.generic_names} | ninstances: ${node.ninstances}')
}
}
}
@@ -1992,18 +1993,18 @@ pub fn (mut c Checker) check_expected_arg_count(mut node ast.CallExpr, f &ast.Fn
}
}
}
- c.error('expected $min_required_params arguments, but got $nr_args', node.pos)
+ c.error('expected ${min_required_params} arguments, but got ${nr_args}', node.pos)
return error('')
} else if !f.is_variadic && nr_args > nr_params {
unexpected_args_pos := node.args[min_required_params].pos.extend(node.args.last().pos)
- c.error('expected $min_required_params arguments, but got $nr_args', unexpected_args_pos)
+ c.error('expected ${min_required_params} arguments, but got ${nr_args}', unexpected_args_pos)
return error('')
}
}
fn (mut c Checker) check_map_and_filter(is_map bool, elem_typ ast.Type, node ast.CallExpr) {
if node.args.len != 1 {
- c.error('expected 1 argument, but got $node.args.len', node.pos)
+ c.error('expected 1 argument, but got ${node.args.len}', node.pos)
// Finish early so that it doesn't fail later
return
}
@@ -2019,17 +2020,18 @@ fn (mut c Checker) check_map_and_filter(is_map bool, elem_typ ast.Type, node ast
c.error('function needs exactly 1 argument', arg_expr.decl.pos)
} else if is_map && (arg_expr.decl.return_type == ast.void_type
|| arg_expr.decl.params[0].typ != elem_typ) {
- c.error('type mismatch, should use `fn(a $elem_sym.name) T {...}`', arg_expr.decl.pos)
+ c.error('type mismatch, should use `fn(a ${elem_sym.name}) T {...}`',
+ arg_expr.decl.pos)
} else if !is_map && (arg_expr.decl.return_type != ast.bool_type
|| arg_expr.decl.params[0].typ != elem_typ) {
- c.error('type mismatch, should use `fn(a $elem_sym.name) bool {...}`',
+ c.error('type mismatch, should use `fn(a ${elem_sym.name}) bool {...}`',
arg_expr.decl.pos)
}
}
ast.Ident {
if arg_expr.kind == .function {
func := c.table.find_fn(arg_expr.name) or {
- c.error('$arg_expr.name does not exist', arg_expr.pos)
+ c.error('${arg_expr.name} does not exist', arg_expr.pos)
return
}
if func.return_type.has_flag(.optional) {
@@ -2040,11 +2042,11 @@ fn (mut c Checker) check_map_and_filter(is_map bool, elem_typ ast.Type, node ast
c.error('function needs exactly 1 argument', node.pos)
} else if is_map
&& (func.return_type == ast.void_type || func.params[0].typ != elem_typ) {
- c.error('type mismatch, should use `fn(a $elem_sym.name) T {...}`',
+ c.error('type mismatch, should use `fn(a ${elem_sym.name}) T {...}`',
arg_expr.pos)
} else if !is_map
&& (func.return_type != ast.bool_type || func.params[0].typ != elem_typ) {
- c.error('type mismatch, should use `fn(a $elem_sym.name) bool {...}`',
+ c.error('type mismatch, should use `fn(a ${elem_sym.name}) bool {...}`',
arg_expr.pos)
}
} else if arg_expr.kind == .variable {
@@ -2060,11 +2062,11 @@ fn (mut c Checker) check_map_and_filter(is_map bool, elem_typ ast.Type, node ast
c.error('function needs exactly 1 argument', expr.decl.pos)
} else if is_map && (expr.decl.return_type == ast.void_type
|| expr.decl.params[0].typ != elem_typ) {
- c.error('type mismatch, should use `fn(a $elem_sym.name) T {...}`',
+ c.error('type mismatch, should use `fn(a ${elem_sym.name}) T {...}`',
expr.decl.pos)
} else if !is_map && (expr.decl.return_type != ast.bool_type
|| expr.decl.params[0].typ != elem_typ) {
- c.error('type mismatch, should use `fn(a $elem_sym.name) bool {...}`',
+ c.error('type mismatch, should use `fn(a ${elem_sym.name}) bool {...}`',
expr.decl.pos)
}
return
@@ -2080,14 +2082,15 @@ fn (mut c Checker) check_map_and_filter(is_map bool, elem_typ ast.Type, node ast
}
ast.CallExpr {
if is_map && arg_expr.return_type in [ast.void_type, 0] {
- c.error('type mismatch, `$arg_expr.name` does not return anything', arg_expr.pos)
+ c.error('type mismatch, `${arg_expr.name}` does not return anything',
+ arg_expr.pos)
} else if !is_map && arg_expr.return_type != ast.bool_type {
if arg_expr.or_block.kind != .absent && (arg_expr.return_type.has_flag(.optional)
|| arg_expr.return_type.has_flag(.result))
&& arg_expr.return_type.clear_flag(.optional).clear_flag(.result) == ast.bool_type {
return
}
- c.error('type mismatch, `$arg_expr.name` must return a bool', arg_expr.pos)
+ c.error('type mismatch, `${arg_expr.name}` must return a bool', arg_expr.pos)
}
}
ast.StringLiteral, ast.StringInterLiteral {
@@ -2144,12 +2147,12 @@ fn (mut c Checker) map_builtin_method_call(mut node ast.CallExpr, left_type ast.
'delete' {
c.fail_if_immutable(node.left)
if node.args.len != 1 {
- c.error('expected 1 argument, but got $node.args.len', node.pos)
+ c.error('expected 1 argument, but got ${node.args.len}', node.pos)
}
info := left_sym.info as ast.Map
arg_type := c.expr(node.args[0].expr)
c.check_expected_call_arg(arg_type, info.key_type, node.language, node.args[0]) or {
- c.error('$err.msg() in argument 1 to `Map.delete`', node.args[0].pos)
+ c.error('${err.msg()} in argument 1 to `Map.delete`', node.args[0].pos)
}
}
else {}
@@ -2172,7 +2175,7 @@ fn (mut c Checker) array_builtin_method_call(mut node ast.CallExpr, left_type as
scope_register_it(mut node.scope, node.pos, elem_typ)
} else if method_name == 'sort' {
if node.left is ast.CallExpr {
- c.error('the `sort()` method can be called only on mutable receivers, but `$node.left` is a call expression',
+ c.error('the `sort()` method can be called only on mutable receivers, but `${node.left}` is a call expression',
node.pos)
}
c.fail_if_immutable(node.left)
@@ -2180,7 +2183,7 @@ fn (mut c Checker) array_builtin_method_call(mut node ast.CallExpr, left_type as
scope_register_a_b(mut node.scope, node.pos, elem_typ)
if node.args.len > 1 {
- c.error('expected 0 or 1 argument, but got $node.args.len', node.pos)
+ c.error('expected 0 or 1 argument, but got ${node.args.len}', node.pos)
} else if node.args.len == 1 {
if node.args[0].expr is ast.InfixExpr {
if node.args[0].expr.op !in [.gt, .lt] {
@@ -2229,7 +2232,7 @@ fn (mut c Checker) array_builtin_method_call(mut node ast.CallExpr, left_type as
}
node.return_type = c.table.find_or_register_array(thread_ret_type)
} else {
- c.error('`$left_sym.name` has no method `wait()` (only thread handles and arrays of them have)',
+ c.error('`${left_sym.name}` has no method `wait()` (only thread handles and arrays of them have)',
node.left.pos())
}
}
@@ -2283,21 +2286,21 @@ fn (mut c Checker) array_builtin_method_call(mut node ast.CallExpr, left_type as
} else if method_name == 'contains' {
// c.warn('use `value in arr` instead of `arr.contains(value)`', node.pos)
if node.args.len != 1 {
- c.error('`.contains()` expected 1 argument, but got $node.args.len', node.pos)
+ c.error('`.contains()` expected 1 argument, but got ${node.args.len}', node.pos)
} else if !left_sym.has_method('contains') {
arg_typ := c.expr(node.args[0].expr)
c.check_expected_call_arg(arg_typ, elem_typ, node.language, node.args[0]) or {
- c.error('$err.msg() in argument 1 to `.contains()`', node.args[0].pos)
+ c.error('${err.msg()} in argument 1 to `.contains()`', node.args[0].pos)
}
}
node.return_type = ast.bool_type
} else if method_name == 'index' {
if node.args.len != 1 {
- c.error('`.index()` expected 1 argument, but got $node.args.len', node.pos)
+ c.error('`.index()` expected 1 argument, but got ${node.args.len}', node.pos)
} else if !left_sym.has_method('index') {
arg_typ := c.expr(node.args[0].expr)
c.check_expected_call_arg(arg_typ, elem_typ, node.language, node.args[0]) or {
- c.error('$err.msg() in argument 1 to `.index()`', node.args[0].pos)
+ c.error('${err.msg()} in argument 1 to `.index()`', node.args[0].pos)
}
}
node.return_type = ast.int_type
diff --git a/vlib/v/checker/for.v b/vlib/v/checker/for.v
index 9dfb4a6eeb..68f6ce6ac1 100644
--- a/vlib/v/checker/for.v
+++ b/vlib/v/checker/for.v
@@ -39,13 +39,13 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
if node.key_var.len > 0 && node.key_var != '_' {
c.check_valid_snake_case(node.key_var, 'variable name', node.pos)
if reserved_type_names_chk.matches(node.key_var) {
- c.error('invalid use of reserved type `$node.key_var` as key name', node.pos)
+ c.error('invalid use of reserved type `${node.key_var}` as key name', node.pos)
}
}
if node.val_var.len > 0 && node.val_var != '_' {
c.check_valid_snake_case(node.val_var, 'variable name', node.pos)
if reserved_type_names_chk.matches(node.val_var) {
- c.error('invalid use of reserved type `$node.val_var` as value name', node.pos)
+ c.error('invalid use of reserved type `${node.val_var}` as value name', node.pos)
}
}
if node.is_range {
@@ -163,7 +163,7 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
ast.Ident {
if mut node.cond.obj is ast.Var {
if !node.cond.obj.is_mut {
- c.error('`$node.cond.obj.name` is immutable, it cannot be changed',
+ c.error('`${node.cond.obj.name}` is immutable, it cannot be changed',
node.cond.pos)
}
}
@@ -178,7 +178,7 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
root_ident := node.cond.root_ident() or { node.cond.expr as ast.Ident }
if root_ident.kind != .unresolved {
if !(root_ident.obj as ast.Var).is_mut {
- c.error('field `$node.cond.field_name` is immutable, it cannot be changed',
+ c.error('field `${node.cond.field_name}` is immutable, it cannot be changed',
node.cond.pos)
}
}
diff --git a/vlib/v/checker/if.v b/vlib/v/checker/if.v
index 1561a8b0a0..d9997cbe6c 100644
--- a/vlib/v/checker/if.v
+++ b/vlib/v/checker/if.v
@@ -36,7 +36,7 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
for i in 0 .. node.branches.len {
mut branch := node.branches[i]
if branch.cond is ast.ParExpr && !c.pref.translated && !c.file.is_translated {
- c.error('unnecessary `()` in `$if_kind` condition, use `$if_kind expr {` instead of `$if_kind (expr) {`.',
+ c.error('unnecessary `()` in `${if_kind}` condition, use `${if_kind} expr {` instead of `${if_kind} (expr) {`.',
branch.pos)
}
if !node.has_else || i < node.branches.len - 1 {
@@ -59,7 +59,7 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
if sym.kind == .multi_return {
mr_info := sym.info as ast.MultiReturn
if branch.cond.vars.len != mr_info.types.len {
- c.error('if guard expects $mr_info.types.len variables, but got $branch.cond.vars.len',
+ c.error('if guard expects ${mr_info.types.len} variables, but got ${branch.cond.vars.len}',
branch.pos)
continue
} else {
@@ -92,7 +92,7 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
got_type := c.unwrap_generic((right as ast.TypeNode).typ)
sym := c.table.sym(got_type)
if sym.kind == .placeholder || got_type.has_flag(.generic) {
- c.error('unknown type `$sym.name`', branch.cond.right.pos())
+ c.error('unknown type `${sym.name}`', branch.cond.right.pos())
}
if left is ast.SelectorExpr {
@@ -266,11 +266,11 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
node.pos)
}
} else if !node.is_comptime {
- c.error('`$if_kind` expression requires an expression as the last statement of every branch',
+ c.error('`${if_kind}` expression requires an expression as the last statement of every branch',
branch.pos)
}
} else if !node.is_comptime {
- c.error('`$if_kind` expression requires an expression as the last statement of every branch',
+ c.error('`${if_kind}` expression requires an expression as the last statement of every branch',
branch.pos)
}
}
@@ -301,7 +301,7 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
node.typ = ast.mktyp(node.typ)
if expr_required && !node.has_else {
d := if node.is_comptime { '$' } else { '' }
- c.error('`$if_kind` expression needs `${d}else` clause', node.pos)
+ c.error('`${if_kind}` expression needs `${d}else` clause', node.pos)
}
return node.typ
}
@@ -321,7 +321,7 @@ fn (mut c Checker) smartcast_if_conds(node ast.Expr, mut scope ast.Scope) {
ast.none_type_idx
}
else {
- c.error('invalid type `$right_expr`', right_expr.pos())
+ c.error('invalid type `${right_expr}`', right_expr.pos())
ast.Type(0)
}
}
@@ -339,7 +339,7 @@ fn (mut c Checker) smartcast_if_conds(node ast.Expr, mut scope ast.Scope) {
} else if !c.check_types(right_type, expr_type) && left_sym.kind != .sum_type {
expect_str := c.table.type_to_str(right_type)
expr_str := c.table.type_to_str(expr_type)
- c.error('cannot use type `$expect_str` as type `$expr_str`', node.pos)
+ c.error('cannot use type `${expect_str}` as type `${expr_str}`', node.pos)
}
if node.left in [ast.Ident, ast.SelectorExpr] && node.right is ast.TypeNode {
is_variable := if node.left is ast.Ident {
@@ -358,7 +358,7 @@ fn (mut c Checker) smartcast_if_conds(node ast.Expr, mut scope ast.Scope) {
&& (left_sym.kind == .interface_ && right_sym.kind != .interface_) {
v := scope.find_var(node.left.name) or { &ast.Var{} }
if v.is_mut && !node.left.is_mut {
- c.error('smart casting a mutable interface value requires `if mut $node.left.name is ...`',
+ c.error('smart casting a mutable interface value requires `if mut ${node.left.name} is ...`',
node.left.pos)
}
}
diff --git a/vlib/v/checker/infix.v b/vlib/v/checker/infix.v
index a79ee9ba4b..eac47a6148 100644
--- a/vlib/v/checker/infix.v
+++ b/vlib/v/checker/infix.v
@@ -80,7 +80,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
c.warn('pointer arithmetic is only allowed in `unsafe` blocks', left_right_pos)
}
if (left_type == ast.voidptr_type || left_type == ast.nil_type) && !c.pref.translated {
- c.error('`$node.op` cannot be used with `voidptr`', left_pos)
+ c.error('`${node.op}` cannot be used with `voidptr`', left_pos)
}
}
}
@@ -117,7 +117,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
|| (right_sym.kind == .alias && left_sym.kind in [.struct_, .array, .sum_type])
if is_mismatch {
c.add_error_detail('left type: `${c.table.type_to_str(left_type)}` vs right type: `${c.table.type_to_str(right_type)}`')
- c.error('possible type mismatch of compared values of `$node.op` operation',
+ c.error('possible type mismatch of compared values of `${node.op}` operation',
left_right_pos)
} else if left_type in ast.integer_type_idxs && right_type in ast.integer_type_idxs {
is_left_type_signed := left_type in ast.signed_integer_type_idxs
@@ -125,12 +125,12 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
if !is_left_type_signed && mut node.right is ast.IntegerLiteral {
if node.right.val.int() < 0 && left_type in ast.int_promoted_type_idxs {
lt := c.table.sym(left_type).name
- c.error('`$lt` cannot be compared with negative value', node.right.pos)
+ c.error('`${lt}` cannot be compared with negative value', node.right.pos)
}
} else if !is_right_type_signed && mut node.left is ast.IntegerLiteral {
if node.left.val.int() < 0 && right_type in ast.int_promoted_type_idxs {
rt := c.table.sym(right_type).name
- c.error('negative value cannot be compared with `$rt`', node.left.pos)
+ c.error('negative value cannot be compared with `${rt}`', node.left.pos)
}
} else if is_left_type_signed != is_right_type_signed
&& left_type != ast.int_literal_type_idx
@@ -143,7 +143,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
|| (is_right_type_signed && rs < ls)) {
lt := c.table.sym(left_type).name
rt := c.table.sym(right_type).name
- c.error('`$lt` cannot be compared with `$rt`', node.pos)
+ c.error('`${lt}` cannot be compared with `${rt}`', node.pos)
}
}
}
@@ -154,7 +154,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
if left_sym.kind !in [.sum_type, .interface_] {
elem_type := right_final_sym.array_info().elem_type
c.check_expected(left_type, elem_type) or {
- c.error('left operand to `$node.op` does not match the array element type: $err.msg()',
+ c.error('left operand to `${node.op}` does not match the array element type: ${err.msg()}',
left_right_pos)
}
}
@@ -162,7 +162,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
.map {
map_info := right_final_sym.map_info()
c.check_expected(left_type, map_info.key_type) or {
- c.error('left operand to `$node.op` does not match the map key type: $err.msg()',
+ c.error('left operand to `${node.op}` does not match the map key type: ${err.msg()}',
left_right_pos)
}
node.left_type = map_info.key_type
@@ -171,13 +171,13 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
if left_sym.kind !in [.sum_type, .interface_] {
elem_type := right_final_sym.array_fixed_info().elem_type
c.check_expected(left_type, elem_type) or {
- c.error('left operand to `$node.op` does not match the fixed array element type: $err.msg()',
+ c.error('left operand to `${node.op}` does not match the fixed array element type: ${err.msg()}',
left_right_pos)
}
}
}
else {
- c.error('`$node.op.str()` can only be used with arrays and maps',
+ c.error('`${node.op.str()}` can only be used with arrays and maps',
node.pos)
}
}
@@ -222,10 +222,11 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
left_name := c.table.type_to_str(unwrapped_left_type)
right_name := c.table.type_to_str(unwrapped_right_type)
if left_name == right_name {
- c.error('undefined operation `$left_name` $node.op.str() `$right_name`',
+ c.error('undefined operation `${left_name}` ${node.op.str()} `${right_name}`',
left_right_pos)
} else {
- c.error('mismatched types `$left_name` and `$right_name`', left_right_pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`',
+ left_right_pos)
}
}
} else if !c.pref.translated && right_sym.kind == .alias && right_sym.info is ast.Alias
@@ -246,10 +247,11 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
left_name := c.table.type_to_str(unwrapped_left_type)
right_name := c.table.type_to_str(unwrapped_right_type)
if left_name == right_name {
- c.error('undefined operation `$left_name` $node.op.str() `$right_name`',
+ c.error('undefined operation `${left_name}` ${node.op.str()} `${right_name}`',
left_right_pos)
} else {
- c.error('mismatched types `$left_name` and `$right_name`', left_right_pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`',
+ left_right_pos)
}
}
}
@@ -265,10 +267,11 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
left_name := c.table.type_to_str(unwrapped_left_type)
right_name := c.table.type_to_str(unwrapped_right_type)
if left_name == right_name {
- c.error('undefined operation `$left_name` $node.op.str() `$right_name`',
+ c.error('undefined operation `${left_name}` ${node.op.str()} `${right_name}`',
left_right_pos)
} else {
- c.error('mismatched types `$left_name` and `$right_name`', left_right_pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`',
+ left_right_pos)
}
}
} else if !c.pref.translated && right_sym.kind in [.array, .array_fixed, .map, .struct_] {
@@ -282,10 +285,11 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
left_name := c.table.type_to_str(unwrapped_left_type)
right_name := c.table.type_to_str(unwrapped_right_type)
if left_name == right_name {
- c.error('undefined operation `$left_name` $node.op.str() `$right_name`',
+ c.error('undefined operation `${left_name}` ${node.op.str()} `${right_name}`',
left_right_pos)
} else {
- c.error('mismatched types `$left_name` and `$right_name`', left_right_pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`',
+ left_right_pos)
}
}
} else if node.left.is_auto_deref_var() || node.right.is_auto_deref_var() {
@@ -302,7 +306,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
left_name := c.table.type_to_str(ast.mktyp(deref_left_type))
right_name := c.table.type_to_str(ast.mktyp(deref_right_type))
if left_name != right_name {
- c.error('mismatched types `$left_name` and `$right_name`', left_right_pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`', left_right_pos)
}
} else {
unaliased_left_type := c.table.unalias_num_type(unwrapped_left_type)
@@ -318,10 +322,10 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
if promoted_type.idx() == ast.void_type_idx {
left_name := c.table.type_to_str(unwrapped_left_type)
right_name := c.table.type_to_str(unwrapped_right_type)
- c.error('mismatched types `$left_name` and `$right_name`', left_right_pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`', left_right_pos)
} else if promoted_type.has_flag(.optional) || promoted_type.has_flag(.result) {
s := c.table.type_to_str(promoted_type)
- c.error('`$node.op` cannot be used with `$s`', node.pos)
+ c.error('`${node.op}` cannot be used with `${s}`', node.pos)
} else if promoted_type.is_float() {
if node.op in [.mod, .xor, .amp, .pipe] {
side := if unwrapped_left_type == promoted_type { 'left' } else { 'right' }
@@ -339,7 +343,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
c.error('float modulo not allowed, use math.fmod() instead',
pos)
} else {
- c.error('$side type of `$node.op.str()` cannot be non-integer type `$name`',
+ c.error('${side} type of `${node.op.str()}` cannot be non-integer type `${name}`',
pos)
}
}
@@ -366,17 +370,18 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
if !(node.op == .lt && c.pref.translated) {
// Allow `&Foo < &Foo` in translated code.
// TODO maybe in unsafe as well?
- c.error('undefined operation `$left_name` $node.op.str() `$right_name`',
+ c.error('undefined operation `${left_name}` ${node.op.str()} `${right_name}`',
left_right_pos)
}
} else {
- c.error('mismatched types `$left_name` and `$right_name`', left_right_pos)
+ c.error('mismatched types `${left_name}` and `${right_name}`',
+ left_right_pos)
}
}
}
if left_sym.kind == .struct_ && right_sym.kind == .struct_ {
if !left_sym.has_method('<') && node.op in [.ge, .le] {
- c.error('cannot use `$node.op` as `<` operator method is not defined',
+ c.error('cannot use `${node.op}` as `<` operator method is not defined',
left_right_pos)
} else if !left_sym.has_method('<') && node.op == .gt {
c.error('cannot use `>` as `<=` operator method is not defined', left_right_pos)
@@ -389,7 +394,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
need_overload := gen_sym.kind in [.struct_, .interface_]
if need_overload && !gen_sym.has_method_with_generic_parent('<')
&& node.op in [.ge, .le] {
- c.error('cannot use `$node.op` as `<` operator method is not defined',
+ c.error('cannot use `${node.op}` as `<` operator method is not defined',
left_right_pos)
} else if need_overload && !gen_sym.has_method_with_generic_parent('<')
&& node.op == .gt {
@@ -451,13 +456,13 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
} else if left_value_sym.kind == .sum_type {
if right_sym.kind != .array {
if !c.table.is_sumtype_or_in_variant(left_value_type, ast.mktyp(right_type)) {
- c.error('cannot append `$right_sym.name` to `$left_sym.name`',
+ c.error('cannot append `${right_sym.name}` to `${left_sym.name}`',
right_pos)
}
} else {
right_value_type := c.table.value_type(right_type)
if !c.table.is_sumtype_or_in_variant(left_value_type, ast.mktyp(right_value_type)) {
- c.error('cannot append `$right_sym.name` to `$left_sym.name`',
+ c.error('cannot append `${right_sym.name}` to `${left_sym.name}`',
right_pos)
}
}
@@ -474,7 +479,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
} else if c.check_types(unwrapped_right_type, c.unwrap_generic(left_type)) {
return ast.void_type
}
- c.error('cannot append `$right_sym.name` to `$left_sym.name`', right_pos)
+ c.error('cannot append `${right_sym.name}` to `${left_sym.name}`', right_pos)
return ast.void_type
} else {
return c.check_shift(mut node, left_type, right_type)
@@ -538,7 +543,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
ast.none_type_idx
}
else {
- c.error('invalid type `$right_expr`', right_expr.pos())
+ c.error('invalid type `${right_expr}`', right_expr.pos())
ast.Type(0)
}
}
@@ -546,21 +551,23 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
typ_sym := c.table.sym(typ)
op := node.op.str()
if typ_sym.kind == .placeholder {
- c.error('$op: type `$typ_sym.name` does not exist', right_expr.pos())
+ c.error('${op}: type `${typ_sym.name}` does not exist', right_expr.pos())
}
if left_sym.kind == .aggregate {
parent_left_type := (left_sym.info as ast.Aggregate).sum_type
left_sym = c.table.sym(parent_left_type)
}
if left_sym.kind !in [.interface_, .sum_type] {
- c.error('`$op` can only be used with interfaces and sum types', node.pos)
+ c.error('`${op}` can only be used with interfaces and sum types',
+ node.pos)
} else if mut left_sym.info is ast.SumType {
if typ !in left_sym.info.variants {
- c.error('`$left_sym.name` has no variant `$right_sym.name`', right_pos)
+ c.error('`${left_sym.name}` has no variant `${right_sym.name}`',
+ right_pos)
}
} else if left_sym.info is ast.Interface {
if typ_sym.kind != .interface_ && !c.type_implements(typ, left_type, right_pos) {
- c.error("`$typ_sym.name` doesn't implement interface `$left_sym.name`",
+ c.error("`${typ_sym.name}` doesn't implement interface `${left_sym.name}`",
right_pos)
}
}
@@ -572,29 +579,29 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
chan_info := left_sym.chan_info()
elem_type := chan_info.elem_type
if !c.check_types(right_type, elem_type) {
- c.error('cannot push `$right_sym.name` on `$left_sym.name`', right_pos)
+ c.error('cannot push `${right_sym.name}` on `${left_sym.name}`', right_pos)
}
if chan_info.is_mut {
// TODO: The error message of the following could be more specific...
c.fail_if_immutable(node.right)
}
if elem_type.is_ptr() && !right_type.is_ptr() {
- c.error('cannot push non-reference `$right_sym.name` on `$left_sym.name`',
+ c.error('cannot push non-reference `${right_sym.name}` on `${left_sym.name}`',
right_pos)
}
c.stmts_ending_with_expression(node.or_block.stmts)
} else {
- c.error('cannot push on non-channel `$left_sym.name`', left_pos)
+ c.error('cannot push on non-channel `${left_sym.name}`', left_pos)
}
return ast.void_type
}
.and, .logical_or {
if !c.pref.translated && !c.file.is_translated {
if node.left_type != ast.bool_type_idx {
- c.error('left operand for `$node.op` is not a boolean', node.left.pos())
+ c.error('left operand for `${node.op}` is not a boolean', node.left.pos())
}
if node.right_type != ast.bool_type_idx {
- c.error('right operand for `$node.op` is not a boolean', node.right.pos())
+ c.error('right operand for `${node.op}` is not a boolean', node.right.pos())
}
}
if mut node.left is ast.InfixExpr {
@@ -633,9 +640,9 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
// sum types can't have any infix operation except of `is`, `eq`, `ne`.
// `is` is checked before and doesn't reach this.
if c.table.type_kind(left_type) == .sum_type && !eq_ne {
- c.error('cannot use operator `$node.op` with `$left_sym.name`', node.pos)
+ c.error('cannot use operator `${node.op}` with `${left_sym.name}`', node.pos)
} else if c.table.type_kind(right_type) == .sum_type && !eq_ne {
- c.error('cannot use operator `$node.op` with `$right_sym.name`', node.pos)
+ c.error('cannot use operator `${node.op}` with `${right_sym.name}`', node.pos)
}
// TODO move this to symmetric_check? Right now it would break `return 0` for `fn()?int `
left_is_optional := left_type.has_flag(.optional)
@@ -665,14 +672,14 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
// pointer arithmetic is fine, it is checked in other places
return return_type
}
- c.error('infix expr: cannot use `$right_sym.name` (right expression) as `$left_sym.name`',
+ c.error('infix expr: cannot use `${right_sym.name}` (right expression) as `${left_sym.name}`',
left_right_pos)
} else if left_type.is_ptr() {
for_ptr_op := c.table.type_is_for_pointer_arithmetic(left_type)
if left_sym.language == .v && !c.pref.translated && !c.inside_unsafe && !for_ptr_op
&& right_type.is_int() {
sugg := ' (you can use it inside an `unsafe` block)'
- c.error('infix expr: cannot use `$right_sym.name` (right expression) as `$left_sym.name` $sugg',
+ c.error('infix expr: cannot use `${right_sym.name}` (right expression) as `${left_sym.name}` ${sugg}',
left_right_pos)
}
}
@@ -692,13 +699,13 @@ fn (mut c Checker) check_div_mod_by_zero(expr ast.Expr, op_kind token.Kind) {
ast.FloatLiteral {
if expr.val.f64() == 0.0 {
oper := if op_kind == .div { 'division' } else { 'modulo' }
- c.error('$oper by zero', expr.pos)
+ c.error('${oper} by zero', expr.pos)
}
}
ast.IntegerLiteral {
if expr.val.int() == 0 {
oper := if op_kind == .div { 'division' } else { 'modulo' }
- c.error('$oper by zero', expr.pos)
+ c.error('${oper} by zero', expr.pos)
}
}
ast.CastExpr {
@@ -711,5 +718,5 @@ fn (mut c Checker) check_div_mod_by_zero(expr ast.Expr, op_kind token.Kind) {
pub fn (mut c Checker) invalid_operator_error(op token.Kind, left_type ast.Type, right_type ast.Type, pos token.Pos) {
left_name := c.table.type_to_str(left_type)
right_name := c.table.type_to_str(right_type)
- c.error('invalid operator `$op` to `$left_name` and `$right_name`', pos)
+ c.error('invalid operator `${op}` to `${left_name}` and `${right_name}`', pos)
}
diff --git a/vlib/v/checker/interface.v b/vlib/v/checker/interface.v
index aafa640cc9..9913177dbd 100644
--- a/vlib/v/checker/interface.v
+++ b/vlib/v/checker/interface.v
@@ -36,7 +36,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
has_generic_types = true
}
if isym.kind != .interface_ {
- c.error('interface `$node.name` tries to embed `$isym.name`, but `$isym.name` is not an interface, but `$isym.kind`',
+ c.error('interface `${node.name}` tries to embed `${isym.name}`, but `${isym.name}` is not an interface, but `${isym.kind}`',
embed.pos)
continue
}
@@ -47,7 +47,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
for name in embed_generic_names {
if name !in node_generic_names {
interface_generic_names := node_generic_names.join(', ')
- c.error('generic type name `$name` is not mentioned in interface `$node.name<$interface_generic_names>`',
+ c.error('generic type name `${name}` is not mentioned in interface `${node.name}<${interface_generic_names}>`',
embed.pos)
}
}
@@ -80,7 +80,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
if ifield.typ != field.typ {
exp := c.table.type_to_str(ifield.typ)
got := c.table.type_to_str(field.typ)
- c.error('embedded interface `$embed_decl.name` conflicts existing field: `$ifield.name`, expecting type: `$exp`, got type: `$got`',
+ c.error('embedded interface `${embed_decl.name}` conflicts existing field: `${ifield.name}`, expecting type: `${exp}`, got type: `${got}`',
ifield.pos)
}
}
@@ -99,7 +99,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
if msg.len > 0 {
em_sig := c.table.fn_signature(em_fn, skip_receiver: true)
m_sig := c.table.fn_signature(m_fn, skip_receiver: true)
- c.error('embedded interface `$embed_decl.name` causes conflict: $msg, for interface method `$em_sig` vs `$m_sig`',
+ c.error('embedded interface `${embed_decl.name}` causes conflict: ${msg}, for interface method `${em_sig}` vs `${m_sig}`',
imethod.pos)
}
}
@@ -122,7 +122,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
if is_js {
mtyp := c.table.sym(method.return_type)
if !mtyp.is_js_compatible() {
- c.error('method $method.name returns non JS type', method.pos)
+ c.error('method ${method.name} returns non JS type', method.pos)
}
}
if method.return_type.has_flag(.generic) {
@@ -134,7 +134,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
for name in method_generic_names {
if name !in node_generic_names {
interface_generic_names := node_generic_names.join(', ')
- c.error('generic type name `$name` is not mentioned in interface `$node.name<$interface_generic_names>`',
+ c.error('generic type name `${name}` is not mentioned in interface `${node.name}<${interface_generic_names}>`',
method.return_type_pos)
}
}
@@ -149,7 +149,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
}
c.ensure_type_exists(param.typ, param.pos) or { return }
if reserved_type_names_chk.matches(param.name) {
- c.error('invalid use of reserved type `$param.name` as a parameter name',
+ c.error('invalid use of reserved type `${param.name}` as a parameter name',
param.pos)
}
// Ensure each generic type of the method was declared in the interface's definition
@@ -159,7 +159,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
for name in method_generic_names {
if name !in node_generic_names {
interface_generic_names := node_generic_names.join(', ')
- c.error('generic type name `$name` is not mentioned in interface `$node.name<$interface_generic_names>`',
+ c.error('generic type name `${name}` is not mentioned in interface `${node.name}<${interface_generic_names}>`',
param.type_pos)
}
}
@@ -168,7 +168,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
ptyp := c.table.sym(param.typ)
if !ptyp.is_js_compatible() && !(j == method.params.len - 1
&& method.is_variadic) {
- c.error('method `$method.name` accepts non JS type as parameter',
+ c.error('method `${method.name}` accepts non JS type as parameter',
method.pos)
}
}
@@ -176,13 +176,13 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
for field in node.fields {
field_sym := c.table.sym(field.typ)
if field.name == method.name && field_sym.kind == .function {
- c.error('type `$decl_sym.name` has both field and method named `$method.name`',
+ c.error('type `${decl_sym.name}` has both field and method named `${method.name}`',
method.pos)
}
}
for j in 0 .. i {
if method.name == node.methods[j].name {
- c.error('duplicate method name `$method.name`', method.pos)
+ c.error('duplicate method name `${method.name}`', method.pos)
}
}
}
@@ -197,7 +197,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
if is_js {
tsym := c.table.sym(field.typ)
if !tsym.is_js_compatible() {
- c.error('field `$field.name` uses non JS type', field.pos)
+ c.error('field `${field.name}` uses non JS type', field.pos)
}
}
if field.typ == node.typ && node.language != .js {
@@ -206,7 +206,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
}
for j in 0 .. i {
if field.name == node.fields[j].name {
- c.error('field name `$field.name` duplicate', field.pos)
+ c.error('field name `${field.name}` duplicate', field.pos)
}
}
}
@@ -239,7 +239,7 @@ fn (mut c Checker) resolve_generic_interface(typ ast.Type, interface_type ast.Ty
for imethod in inter_sym.info.methods {
method := typ_sym.find_method(imethod.name) or {
typ_sym.find_method_with_generic_parent(imethod.name) or {
- c.error('can not find method `$imethod.name` on `$typ_sym.name`, needed for interface: `$inter_sym.name`',
+ c.error('can not find method `${imethod.name}` on `${typ_sym.name}`, needed for interface: `${inter_sym.name}`',
pos)
return 0
}
@@ -249,13 +249,13 @@ fn (mut c Checker) resolve_generic_interface(typ ast.Type, interface_type ast.Ty
mret_sym := c.table.sym(method.return_type)
if method.return_type == ast.void_type
&& imethod.return_type != method.return_type {
- c.error('interface method `$imethod.name` returns `$imret_sym.name`, but implementation method `$method.name` returns no value',
+ c.error('interface method `${imethod.name}` returns `${imret_sym.name}`, but implementation method `${method.name}` returns no value',
pos)
return 0
}
if imethod.return_type == ast.void_type
&& imethod.return_type != method.return_type {
- c.error('interface method `$imethod.name` returns no value, but implementation method `$method.name` returns `$mret_sym.name`',
+ c.error('interface method `${imethod.name}` returns no value, but implementation method `${method.name}` returns `${mret_sym.name}`',
pos)
return 0
}
@@ -306,7 +306,7 @@ fn (mut c Checker) resolve_generic_interface(typ ast.Type, interface_type ast.Ty
}
}
if inferred_type == ast.void_type {
- c.error('could not infer generic type `$gt_name` in interface `$inter_sym.name`',
+ c.error('could not infer generic type `${gt_name}` in interface `${inter_sym.name}`',
pos)
return interface_type
}
diff --git a/vlib/v/checker/match.v b/vlib/v/checker/match.v
index 75834a821d..34cf4087ac 100644
--- a/vlib/v/checker/match.v
+++ b/vlib/v/checker/match.v
@@ -134,7 +134,7 @@ fn (mut c Checker) check_match_branch_last_stmt(last_stmt ast.ExprStmt, ret_type
return
}
}
- c.error('return type mismatch, it should be `$ret_sym.name`', last_stmt.pos)
+ c.error('return type mismatch, it should be `${ret_sym.name}`', last_stmt.pos)
}
}
}
@@ -164,7 +164,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
c.error('start value is higher than end value', branch.pos)
}
} else {
- c.error('mismatched range types - $expr.low is an integer, but $expr.high is not',
+ c.error('mismatched range types - ${expr.low} is an integer, but ${expr.high} is not',
low_expr.pos)
}
} else if low_expr is ast.CharLiteral {
@@ -176,23 +176,23 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
}
} else {
typ := c.table.type_to_str(c.expr(node.cond))
- c.error('mismatched range types - trying to match `$node.cond`, which has type `$typ`, against a range of `rune`',
+ c.error('mismatched range types - trying to match `${node.cond}`, which has type `${typ}`, against a range of `rune`',
low_expr.pos)
}
} else {
typ := c.table.type_to_str(c.expr(expr.low))
- c.error('cannot use type `$typ` in match range', branch.pos)
+ c.error('cannot use type `${typ}` in match range', branch.pos)
}
high_low_cutoff := 1000
if high - low > high_low_cutoff {
- c.warn('more than $high_low_cutoff possibilities ($low ... $high) in match range',
+ c.warn('more than ${high_low_cutoff} possibilities (${low} ... ${high}) in match range',
branch.pos)
}
for i in low .. high + 1 {
key = i.str()
val := if key in branch_exprs { branch_exprs[key] } else { 0 }
if val == 1 {
- c.error('match case `$key` is handled more than once', branch.pos)
+ c.error('match case `${key}` is handled more than once', branch.pos)
}
branch_exprs[key] = val + 1
}
@@ -212,7 +212,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
}
val := if key in branch_exprs { branch_exprs[key] } else { 0 }
if val == 1 {
- c.error('match case `$key` is handled more than once', branch.pos)
+ c.error('match case `${key}` is handled more than once', branch.pos)
}
c.expected_type = node.cond_type
expr_type := c.expr(expr)
@@ -242,17 +242,18 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
sumtype_variant_names := cond_type_sym.info.variants.map(c.table.type_to_str_using_aliases(it,
{}))
suggestion := util.new_suggestion(expr_str, sumtype_variant_names)
- c.error(suggestion.say('`$expect_str` has no variant `$expr_str`'),
+ c.error(suggestion.say('`${expect_str}` has no variant `${expr_str}`'),
expr.pos())
}
} else if cond_type_sym.info is ast.Alias && expr_type_sym.info is ast.Struct {
expr_str := c.table.type_to_str(expr_type)
expect_str := c.table.type_to_str(node.cond_type)
- c.error('cannot match alias type `$expect_str` with `$expr_str`', expr.pos())
+ c.error('cannot match alias type `${expect_str}` with `${expr_str}`',
+ expr.pos())
} else if !c.check_types(expr_type, node.cond_type) {
expr_str := c.table.type_to_str(expr_type)
expect_str := c.table.type_to_str(node.cond_type)
- c.error('cannot match `$expect_str` with `$expr_str`', expr.pos())
+ c.error('cannot match `${expect_str}` with `${expr_str}`', expr.pos())
}
branch_exprs[key] = val + 1
}
@@ -270,7 +271,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
agg_cname.write_string('___')
}
type_str := c.table.type_to_str(expr.typ)
- name := if c.is_builtin_mod { type_str } else { '${c.mod}.$type_str' }
+ name := if c.is_builtin_mod { type_str } else { '${c.mod}.${type_str}' }
agg_name.write_string(name)
agg_cname.write_string(util.no_dots(name))
}
@@ -310,7 +311,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
for v in variants {
if v !in branch_exprs {
is_exhaustive = false
- unhandled << '`$v`'
+ unhandled << '`${v}`'
}
}
} else {
@@ -320,7 +321,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
v_str := c.table.type_to_str(v)
if v_str !in branch_exprs {
is_exhaustive = false
- unhandled << '`$v_str`'
+ unhandled << '`${v_str}`'
}
}
}
@@ -329,7 +330,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
for v in cond_type_sym.info.vals {
if v !in branch_exprs {
is_exhaustive = false
- unhandled << '`.$v`'
+ unhandled << '`.${v}`'
}
}
}
@@ -367,7 +368,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
remaining := unhandled.len - c.match_exhaustive_cutoff_limit
err_details += unhandled[0..c.match_exhaustive_cutoff_limit].join(', ')
if remaining > 0 {
- err_details += ', and $remaining others ...'
+ err_details += ', and ${remaining} others ...'
}
}
err_details += ' or `else {}` at the end)'
diff --git a/vlib/v/checker/orm.v b/vlib/v/checker/orm.v
index 0392d24995..fafa002efd 100644
--- a/vlib/v/checker/orm.v
+++ b/vlib/v/checker/orm.v
@@ -18,7 +18,7 @@ fn (mut c Checker) sql_expr(mut node ast.SqlExpr) ast.Type {
c.cur_orm_ts = old_ts
}
if sym.info !is ast.Struct {
- c.error('The table symbol `$sym.name` has to be a struct', node.table_expr.pos)
+ c.error('The table symbol `${sym.name}` has to be a struct', node.table_expr.pos)
return ast.void_type
}
info := sym.info as ast.Struct
@@ -148,7 +148,7 @@ fn (mut c Checker) sql_stmt_line(mut node ast.SqlStmtLine) ast.Type {
c.cur_orm_ts = old_ts
}
if table_sym.info !is ast.Struct {
- c.error('unknown type `$table_sym.name`', node.pos)
+ c.error('unknown type `${table_sym.name}`', node.pos)
return ast.void_type
}
info := table_sym.info as ast.Struct
@@ -165,7 +165,7 @@ fn (mut c Checker) sql_stmt_line(mut node ast.SqlStmtLine) ast.Type {
} else {
ast.Type(0)
}
- mut object_var_name := '${node.object_var_name}.$f.name'
+ mut object_var_name := '${node.object_var_name}.${f.name}'
if typ != f.typ {
object_var_name = node.object_var_name
}
@@ -188,7 +188,7 @@ fn (mut c Checker) sql_stmt_line(mut node ast.SqlStmtLine) ast.Type {
for i, column in node.updated_columns {
x := node.fields.filter(it.name == column)
if x.len == 0 {
- c.error('type `$table_sym.name` has no field named `$column`', node.pos)
+ c.error('type `${table_sym.name}` has no field named `${column}`', node.pos)
continue
}
field := x[0]
@@ -214,11 +214,11 @@ fn (mut c Checker) fetch_and_verify_orm_fields(info ast.Struct, pos token.Pos, t
&& c.table.sym(c.table.sym(it.typ).array_info().elem_type).kind == .struct_))
&& !it.attrs.contains('skip'))
if fields.len == 0 {
- c.error('V orm: select: empty fields in `$table_name`', pos)
+ c.error('V orm: select: empty fields in `${table_name}`', pos)
return []ast.StructField{}
}
if fields[0].name != 'id' {
- c.error('V orm: `id int` must be the first field in `$table_name`', pos)
+ c.error('V orm: `id int` must be the first field in `${table_name}`', pos)
}
return fields
}
diff --git a/vlib/v/checker/return.v b/vlib/v/checker/return.v
index 520265b1c2..a1715dd1f5 100644
--- a/vlib/v/checker/return.v
+++ b/vlib/v/checker/return.v
@@ -20,7 +20,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
|| expected_type_sym.kind == .void) {
stype := c.table.type_to_str(expected_type)
arg := if expected_type_sym.kind == .multi_return { 'arguments' } else { 'argument' }
- c.error('expected `$stype` $arg', node.pos)
+ c.error('expected `${stype}` ${arg}', node.pos)
return
}
if node.exprs.len == 0 {
@@ -43,7 +43,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
return
}
if typ == ast.void_type {
- c.error('`$expr` used as value', node.pos)
+ c.error('`${expr}` used as value', node.pos)
return
}
// Unpack multi return types
@@ -113,7 +113,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
arg := if expected_types.len == 1 { 'argument' } else { 'arguments' }
midx := imax(0, imin(expected_types.len, expr_idxs.len - 1))
mismatch_pos := node.exprs[expr_idxs[midx]].pos()
- c.error('expected $expected_types.len $arg, but got $got_types.len', mismatch_pos)
+ c.error('expected ${expected_types.len} ${arg}, but got ${got_types.len}', mismatch_pos)
return
}
for i, exp_type in expected_types {
@@ -172,7 +172,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
} else {
got_typ_sym.name
}
- c.error('cannot use `$got_typ_name` as type `${c.table.type_to_str(exp_type)}` in return argument',
+ c.error('cannot use `${got_typ_name}` as type `${c.table.type_to_str(exp_type)}` in return argument',
pos)
}
}
@@ -182,7 +182,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
if node.exprs[expr_idxs[i]].is_auto_deref_var() {
continue
}
- c.error('fn `$c.table.cur_fn.name` expects you to return a non reference type `${c.table.type_to_str(exp_type)}`, but you are returning `${c.table.type_to_str(got_typ)}` instead',
+ c.error('fn `${c.table.cur_fn.name}` expects you to return a non reference type `${c.table.type_to_str(exp_type)}`, but you are returning `${c.table.type_to_str(got_typ)}` instead',
pos)
}
if (exp_type.is_ptr() || exp_type.is_pointer())
@@ -192,7 +192,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
if node.exprs[expr_idxs[i]].is_auto_deref_var() {
continue
}
- c.error('fn `$c.table.cur_fn.name` expects you to return a reference type `${c.table.type_to_str(exp_type)}`, but you are returning `${c.table.type_to_str(got_typ)}` instead',
+ c.error('fn `${c.table.cur_fn.name}` expects you to return a reference type `${c.table.type_to_str(exp_type)}`, but you are returning `${c.table.type_to_str(got_typ)}` instead',
pos)
}
if exp_type.is_ptr() && got_typ.is_ptr() {
@@ -207,11 +207,11 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
type_sym := c.table.sym(obj.typ.set_nr_muls(0))
if !type_sym.is_heap() && !c.pref.translated && !c.file.is_translated {
suggestion := if type_sym.kind == .struct_ {
- 'declaring `$type_sym.name` as `[heap]`'
+ 'declaring `${type_sym.name}` as `[heap]`'
} else {
- 'wrapping the `$type_sym.name` object in a `struct` declared as `[heap]`'
+ 'wrapping the `${type_sym.name}` object in a `struct` declared as `[heap]`'
}
- c.error('`$r_expr.name` cannot be returned outside `unsafe` blocks as it might refer to an object stored on stack. Consider ${suggestion}.',
+ c.error('`${r_expr.name}` cannot be returned outside `unsafe` blocks as it might refer to an object stored on stack. Consider ${suggestion}.',
r_expr.pos)
}
}
diff --git a/vlib/v/checker/str.v b/vlib/v/checker/str.v
index e51f45e103..c4014991c2 100644
--- a/vlib/v/checker/str.v
+++ b/vlib/v/checker/str.v
@@ -173,7 +173,7 @@ pub fn (mut c Checker) int_lit(mut node ast.IntegerLiteral) ast.Type {
lit := node.val.replace('_', '').all_after('-')
is_neg := node.val.starts_with('-')
limit := if is_neg { '9223372036854775808' } else { '18446744073709551615' }
- message := 'integer literal $node.val overflows int'
+ message := 'integer literal ${node.val} overflows int'
if lit.len > limit.len {
c.error(message, node.pos)
diff --git a/vlib/v/checker/struct.v b/vlib/v/checker/struct.v
index e3cda72f16..9077d1815f 100644
--- a/vlib/v/checker/struct.v
+++ b/vlib/v/checker/struct.v
@@ -18,7 +18,7 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
}
embed_sym := c.table.sym(embed.typ)
if embed_sym.kind != .struct_ {
- c.error('`$embed_sym.name` is not a struct', embed.pos)
+ c.error('`${embed_sym.name}` is not a struct', embed.pos)
} else {
info := embed_sym.info as ast.Struct
if info.is_heap && !embed.typ.is_ptr() {
@@ -32,7 +32,7 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
for name in embed_generic_names {
if name !in node_generic_names {
struct_generic_names := node_generic_names.join(', ')
- c.error('generic type name `$name` is not mentioned in struct `$node.name<$struct_generic_names>`',
+ c.error('generic type name `${name}` is not mentioned in struct `${node.name}<${struct_generic_names}>`',
embed.pos)
}
}
@@ -58,13 +58,13 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
sym := c.table.sym(field.typ)
for j in 0 .. i {
if field.name == node.fields[j].name {
- c.error('field name `$field.name` duplicate', field.pos)
+ c.error('field name `${field.name}` duplicate', field.pos)
}
}
if field.typ != 0 {
if !field.typ.is_ptr() {
if c.table.unaliased_type(field.typ) == struct_typ_idx {
- c.error('field `$field.name` is part of `$node.name`, they can not both have the same type',
+ c.error('field `${field.name}` is part of `${node.name}`, they can not both have the same type',
field.type_pos)
}
}
@@ -76,7 +76,7 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
}
if info.generic_types.len > 0 && !field.typ.has_flag(.generic)
&& info.concrete_types.len == 0 {
- c.error('field `$field.name` type is generic struct, must specify the generic type names, e.g. Foo, Foo',
+ c.error('field `${field.name}` type is generic struct, must specify the generic type names, e.g. Foo, Foo',
field.type_pos)
}
}
@@ -107,7 +107,7 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
}
}
} else {
- c.error('incompatible initializer for field `$field.name`: $err.msg()',
+ c.error('incompatible initializer for field `${field.name}`: ${err.msg()}',
field.default_expr.pos())
}
}
@@ -151,7 +151,7 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
for name in field_generic_names {
if name !in node_generic_names {
struct_generic_names := node_generic_names.join(', ')
- c.error('generic type name `$name` is not mentioned in struct `$node.name<$struct_generic_names>`',
+ c.error('generic type name `${name}` is not mentioned in struct `${node.name}<${struct_generic_names}>`',
field.type_pos)
}
}
@@ -240,7 +240,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
for ct in struct_sym.info.concrete_types {
ct_sym := c.table.sym(ct)
if ct_sym.kind == .placeholder {
- c.error('unknown type `$ct_sym.name`', node.pos)
+ c.error('unknown type `${ct_sym.name}`', node.pos)
}
}
if struct_sym.info.generic_types.len > 0 && struct_sym.info.concrete_types.len == 0
@@ -253,14 +253,14 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
node.pos)
} else if node.generic_types.len > 0
&& node.generic_types.len != struct_sym.info.generic_types.len {
- c.error('generic struct init expects $struct_sym.info.generic_types.len generic parameter, but got $node.generic_types.len',
+ c.error('generic struct init expects ${struct_sym.info.generic_types.len} generic parameter, but got ${node.generic_types.len}',
node.pos)
} else if node.generic_types.len > 0 && c.table.cur_fn != unsafe { nil } {
for gtyp in node.generic_types {
gtyp_name := c.table.sym(gtyp).name
if gtyp_name !in c.table.cur_fn.generic_names {
cur_generic_names := '(' + c.table.cur_fn.generic_names.join(',') + ')'
- c.error('generic struct init type parameter `$gtyp_name` must be within the parameters `$cur_generic_names` of the current generic function',
+ c.error('generic struct init type parameter `${gtyp_name}` must be within the parameters `${cur_generic_names}` of the current generic function',
node.pos)
break
}
@@ -268,7 +268,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
}
}
if node.generic_types.len > 0 && struct_sym.info.generic_types.len == 0 {
- c.error('a non generic struct `$node.typ_str` used like a generic struct',
+ c.error('a non generic struct `${node.typ_str}` used like a generic struct',
node.name_pos)
}
if node.generic_types.len > 0 && struct_sym.info.generic_types.len == node.generic_types.len
@@ -281,7 +281,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
if parent_sym.kind == .map {
alias_str := c.table.type_to_str(node.typ)
map_str := c.table.type_to_str(struct_sym.info.parent_type)
- c.error('direct map alias init is not possible, use `${alias_str}($map_str{})` instead',
+ c.error('direct map alias init is not possible, use `${alias_str}(${map_str}{})` instead',
node.pos)
return ast.void_type
}
@@ -304,43 +304,43 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
if !first_letter.is_capital()
&& (type_sym.kind != .struct_ || !(type_sym.info as ast.Struct).is_anon)
&& type_sym.kind != .placeholder {
- c.error('cannot initialize builtin type `$type_sym.name`', node.pos)
+ c.error('cannot initialize builtin type `${type_sym.name}`', node.pos)
}
}
if type_sym.kind == .sum_type && node.fields.len == 1 {
sexpr := node.fields[0].expr.str()
- c.error('cast to sum type using `${type_sym.name}($sexpr)` not `$type_sym.name{$sexpr}`',
+ c.error('cast to sum type using `${type_sym.name}(${sexpr})` not `${type_sym.name}{${sexpr}}`',
node.pos)
}
if type_sym.kind == .interface_ && type_sym.language != .js {
- c.error('cannot instantiate interface `$type_sym.name`', node.pos)
+ c.error('cannot instantiate interface `${type_sym.name}`', node.pos)
}
if type_sym.info is ast.Alias {
if type_sym.info.parent_type.is_number() {
- c.error('cannot instantiate number type alias `$type_sym.name`', node.pos)
+ c.error('cannot instantiate number type alias `${type_sym.name}`', node.pos)
return ast.void_type
}
}
// allow init structs from generic if they're private except the type is from builtin module
if !type_sym.is_pub && type_sym.kind != .placeholder && type_sym.language != .c
&& (type_sym.mod != c.mod && !(node.typ.has_flag(.generic) && type_sym.mod != 'builtin')) {
- c.error('type `$type_sym.name` is private', node.pos)
+ c.error('type `${type_sym.name}` is private', node.pos)
}
if type_sym.kind == .struct_ {
info := type_sym.info as ast.Struct
if info.attrs.len > 0 && info.attrs[0].name == 'noinit' && type_sym.mod != c.mod {
- c.error('struct `$type_sym.name` is declared with a `[noinit]` attribute, so ' +
- 'it cannot be initialized with `$type_sym.name{}`', node.pos)
+ c.error('struct `${type_sym.name}` is declared with a `[noinit]` attribute, so ' +
+ 'it cannot be initialized with `${type_sym.name}{}`', node.pos)
}
}
if type_sym.name.len == 1 && c.table.cur_fn != unsafe { nil }
&& c.table.cur_fn.generic_names.len == 0 {
- c.error('unknown struct `$type_sym.name`', node.pos)
+ c.error('unknown struct `${type_sym.name}`', node.pos)
return ast.void_type
}
match type_sym.kind {
.placeholder {
- c.error('unknown struct: $type_sym.name', node.pos)
+ c.error('unknown struct: ${type_sym.name}', node.pos)
return ast.void_type
}
.any {
@@ -357,13 +357,13 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
info_t := type_sym.info as ast.Alias
sym := c.table.sym(info_t.parent_type)
if sym.kind == .placeholder { // pending import symbol did not resolve
- c.error('unknown struct: $type_sym.name', node.pos)
+ c.error('unknown struct: ${type_sym.name}', node.pos)
return ast.void_type
}
if sym.kind == .struct_ {
info = sym.info as ast.Struct
} else {
- c.error('alias type name: $sym.name is not struct type', node.pos)
+ c.error('alias type name: ${sym.name} is not struct type', node.pos)
}
} else {
info = type_sym.info as ast.Struct
@@ -374,7 +374,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
if exp_len != got_len && !c.pref.translated {
// XTODO remove !translated check
amount := if exp_len < got_len { 'many' } else { 'few' }
- c.error('too $amount fields in `$type_sym.name` literal (expecting $exp_len, got $got_len)',
+ c.error('too ${amount} fields in `${type_sym.name}` literal (expecting ${exp_len}, got ${got_len})',
node.pos)
}
}
@@ -405,12 +405,12 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
}
if !exists {
existing_fields := c.table.struct_fields(type_sym).map(it.name)
- c.error(util.new_suggestion(field.name, existing_fields).say('unknown field `$field.name` in struct literal of type `$type_sym.name`'),
+ c.error(util.new_suggestion(field.name, existing_fields).say('unknown field `${field.name}` in struct literal of type `${type_sym.name}`'),
field.pos)
continue
}
if field_name in inited_fields {
- c.error('duplicate field name in struct literal: `$field_name`',
+ c.error('duplicate field name in struct literal: `${field_name}`',
field.pos)
continue
}
@@ -423,7 +423,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
c.expected_type = expected_type
expr_type = c.expr(field.expr)
if expr_type == ast.void_type {
- c.error('`$field.expr` (no value) used as value', field.pos)
+ c.error('`${field.expr}` (no value) used as value', field.pos)
}
if !field_info.typ.has_flag(.optional) && !field.typ.has_flag(.result) {
expr_type = c.check_expr_opt_call(field.expr, expr_type)
@@ -438,7 +438,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
}
} else if expr_type != ast.void_type && expr_type_sym.kind != .placeholder {
c.check_expected(c.unwrap_generic(expr_type), c.unwrap_generic(field_info.typ)) or {
- c.error('cannot assign to field `$field_info.name`: $err.msg()',
+ c.error('cannot assign to field `${field_info.name}`: ${err.msg()}',
field.pos)
}
}
@@ -458,11 +458,11 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
node.fields[i].expected_type = field_info.typ
if field_info.typ.has_flag(.optional) {
- c.error('field `$field_info.name` is optional, but initialization of optional fields currently unsupported',
+ c.error('field `${field_info.name}` is optional, but initialization of optional fields currently unsupported',
field.pos)
}
if field_info.typ.has_flag(.result) {
- c.error('field `$field_info.name` is result, but initialization of result fields currently unsupported',
+ c.error('field `${field_info.name}` is result, but initialization of result fields currently unsupported',
field.pos)
}
if expr_type.is_ptr() && expected_type.is_ptr() {
@@ -476,11 +476,11 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
sym := c.table.sym(obj.typ.set_nr_muls(0))
if !sym.is_heap() && !c.pref.translated && !c.file.is_translated {
suggestion := if sym.kind == .struct_ {
- 'declaring `$sym.name` as `[heap]`'
+ 'declaring `${sym.name}` as `[heap]`'
} else {
- 'wrapping the `$sym.name` object in a `struct` declared as `[heap]`'
+ 'wrapping the `${sym.name}` object in a `struct` declared as `[heap]`'
}
- c.error('`$field.expr.name` cannot be assigned outside `unsafe` blocks as it might refer to an object stored on stack. Consider ${suggestion}.',
+ c.error('`${field.expr.name}` cannot be assigned outside `unsafe` blocks as it might refer to an object stored on stack. Consider ${suggestion}.',
field.expr.pos)
}
}
@@ -512,7 +512,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
info.fields[i].default_expr_typ = ast.new_type(idx)
}
} else {
- if const_field := c.table.global_scope.find_const('$field.default_expr') {
+ if const_field := c.table.global_scope.find_const('${field.default_expr}') {
info.fields[i].default_expr_typ = const_field.typ
}
}
@@ -521,18 +521,18 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
}
if field.typ.is_ptr() && !field.typ.has_flag(.shared_f) && !node.has_update_expr
&& !c.pref.translated && !c.file.is_translated {
- c.warn('reference field `${type_sym.name}.$field.name` must be initialized',
+ c.warn('reference field `${type_sym.name}.${field.name}` must be initialized',
node.pos)
continue
}
if sym.kind == .struct_ {
- c.check_ref_fields_initialized(sym, mut checked_types, '${type_sym.name}.$field.name',
+ c.check_ref_fields_initialized(sym, mut checked_types, '${type_sym.name}.${field.name}',
node)
} else if sym.kind == .alias {
parent_sym := c.table.sym((sym.info as ast.Alias).parent_type)
if parent_sym.kind == .struct_ {
c.check_ref_fields_initialized(parent_sym, mut checked_types,
- '${type_sym.name}.$field.name', node)
+ '${type_sym.name}.${field.name}', node)
}
}
// Do not allow empty uninitialized interfaces
@@ -545,7 +545,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
}
if sym.kind == .interface_ && (!has_noinit && sym.language != .js) {
// TODO: should be an error instead, but first `ui` needs updating.
- c.note('interface field `${type_sym.name}.$field.name` must be initialized',
+ c.note('interface field `${type_sym.name}.${field.name}` must be initialized',
node.pos)
}
// Do not allow empty uninitialized sum types
@@ -566,7 +566,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
}
}
if !found {
- c.error('field `${type_sym.name}.$field.name` must be initialized',
+ c.error('field `${type_sym.name}.${field.name}` must be initialized',
node.pos)
}
}
@@ -580,7 +580,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
node.update_expr_type = update_type
if c.table.sym(update_type).kind != .struct_ {
s := c.table.type_to_str(update_type)
- c.error('expected struct, found `$s`', node.update_expr.pos())
+ c.error('expected struct, found `${s}`', node.update_expr.pos())
} else if update_type != node.typ {
from_sym := c.table.sym(update_type)
to_sym := c.table.sym(node.typ)
@@ -588,7 +588,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
to_info := to_sym.info as ast.Struct
// TODO this check is too strict
if !c.check_struct_signature(from_info, to_info) {
- c.error('struct `$from_sym.name` is not compatible with struct `$to_sym.name`',
+ c.error('struct `${from_sym.name}` is not compatible with struct `${to_sym.name}`',
node.update_expr.pos())
}
}
@@ -614,7 +614,7 @@ fn (mut c Checker) check_ref_fields_initialized(struct_sym &ast.TypeSymbol, mut
continue
}
if field.typ.is_ptr() && !field.typ.has_flag(.shared_f) && !field.has_default_expr {
- c.warn('reference field `${linked_name}.$field.name` must be initialized (part of struct `$struct_sym.name`)',
+ c.warn('reference field `${linked_name}.${field.name}` must be initialized (part of struct `${struct_sym.name}`)',
node.pos)
continue
}
@@ -626,13 +626,13 @@ fn (mut c Checker) check_ref_fields_initialized(struct_sym &ast.TypeSymbol, mut
continue
}
checked_types << field.typ
- c.check_ref_fields_initialized(sym, mut checked_types, '${linked_name}.$field.name',
+ c.check_ref_fields_initialized(sym, mut checked_types, '${linked_name}.${field.name}',
node)
} else if sym.kind == .alias {
psym := c.table.sym((sym.info as ast.Alias).parent_type)
if psym.kind == .struct_ {
checked_types << field.typ
- c.check_ref_fields_initialized(psym, mut checked_types, '${linked_name}.$field.name',
+ c.check_ref_fields_initialized(psym, mut checked_types, '${linked_name}.${field.name}',
node)
}
}
diff --git a/vlib/v/checker/tests/anon_structs_visibility/main.v b/vlib/v/checker/tests/anon_structs_visibility/main.v
index 99efe553e5..cbdfa96428 100644
--- a/vlib/v/checker/tests/anon_structs_visibility/main.v
+++ b/vlib/v/checker/tests/anon_structs_visibility/main.v
@@ -6,5 +6,5 @@ fn main() {
baz: 1
}
}
- println('foo.bar.baz == $foo.bar.baz')
+ println('foo.bar.baz == ${foo.bar.baz}')
}
diff --git a/vlib/v/compiler_errors_test.v b/vlib/v/compiler_errors_test.v
index 188e68904d..d088092ba4 100644
--- a/vlib/v/compiler_errors_test.v
+++ b/vlib/v/compiler_errors_test.v
@@ -80,10 +80,10 @@ fn test_all() {
checker_with_check_option_dir := 'vlib/v/checker/tests/with_check_option'
parser_dir := 'vlib/v/parser/tests'
scanner_dir := 'vlib/v/scanner/tests'
- module_dir := '$checker_dir/modules'
- global_dir := '$checker_dir/globals'
- global_run_dir := '$checker_dir/globals_run'
- run_dir := '$checker_dir/run'
+ module_dir := '${checker_dir}/modules'
+ global_dir := '${checker_dir}/globals'
+ global_run_dir := '${checker_dir}/globals_run'
+ run_dir := '${checker_dir}/run'
skip_unused_dir := 'vlib/v/tests/skip_unused'
//
checker_tests := get_tests_in_dir(checker_dir, false).filter(!it.contains('with_check_option'))
@@ -136,7 +136,7 @@ fn test_all() {
parallel_jobs: 1
label: 'comptime env tests'
}
- cte_dir := '$checker_dir/comptime_env'
+ cte_dir := '${checker_dir}/comptime_env'
files := get_tests_in_dir(cte_dir, false)
cte_tasks.add('', cte_dir, '-no-retry-compilation run', '.run.out', files, false)
cte_tasks.add_evars('VAR=/usr/include', '', cte_dir, '-no-retry-compilation run',
@@ -274,8 +274,8 @@ fn (mut tasks Tasks) run() {
bench.fail()
eprintln(bstep_message(mut bench, benchmark.b_fail, task.path, task.took))
println('============')
- println('failed cmd: $task.cli_cmd')
- println('expected_out_path: $task.expected_out_path')
+ println('failed cmd: ${task.cli_cmd}')
+ println('expected_out_path: ${task.expected_out_path}')
println('============')
println('expected:')
println(task.expected)
@@ -289,7 +289,7 @@ fn (mut tasks Tasks) run() {
bench.ok()
assert true
if tasks.show_cmd {
- eprintln(bstep_message(mut bench, benchmark.b_ok, '$task.cli_cmd', task.took))
+ eprintln(bstep_message(mut bench, benchmark.b_ok, '${task.cli_cmd}', task.took))
} else {
if github_job == '' {
// local mode:
@@ -328,8 +328,8 @@ fn (mut task TaskDescription) execute() {
return
}
program := task.path
- cmd_prefix := if task.evars.len > 0 { '$task.evars ' } else { '' }
- cli_cmd := '$cmd_prefix${os.quoted_path(task.vexe)} $task.voptions ${os.quoted_path(program)}'
+ cmd_prefix := if task.evars.len > 0 { '${task.evars} ' } else { '' }
+ cli_cmd := '${cmd_prefix}${os.quoted_path(task.vexe)} ${task.voptions} ${os.quoted_path(program)}'
res := os.execute(cli_cmd)
expected_out_path := program.replace('.vv', '') + task.result_extension
task.expected_out_path = expected_out_path
diff --git a/vlib/v/depgraph/depgraph.v b/vlib/v/depgraph/depgraph.v
index 0ffa9475a6..4efa5b434b 100644
--- a/vlib/v/depgraph/depgraph.v
+++ b/vlib/v/depgraph/depgraph.v
@@ -57,7 +57,7 @@ pub fn (o &OrderedDepMap) get(name string) []string {
pub fn (mut o OrderedDepMap) delete(name string) {
if name !in o.data {
- panic('delete: no such key: $name')
+ panic('delete: no such key: ${name}')
}
for i, _ in o.keys {
if o.keys[i] == name {
@@ -155,7 +155,7 @@ pub fn (graph &DepGraph) display() string {
mut out := []string{}
for node in graph.nodes {
for dep in node.deps {
- out << ' * $node.name -> $dep'
+ out << ' * ${node.name} -> ${dep}'
}
}
return out.join('\n')
@@ -219,7 +219,7 @@ fn (mut nn NodeNames) is_part_of_cycle(name string, already_seen []string) (bool
}
pub fn show(graph &DepGraph, path string) {
- mut dg := dotgraph.new('ModGraph', 'ModGraph for $path', 'blue')
+ mut dg := dotgraph.new('ModGraph', 'ModGraph for ${path}', 'blue')
mbuiltin := 'builtin'
for node in graph.nodes {
is_main := node.name == 'main'
diff --git a/vlib/v/doc/doc.v b/vlib/v/doc/doc.v
index c92738bbee..3dbb01c75c 100644
--- a/vlib/v/doc/doc.v
+++ b/vlib/v/doc/doc.v
@@ -69,7 +69,7 @@ pub fn platform_from_string(platform_str string) ?Platform {
'haiku' { return .haiku }
'nix' { return .linux }
'' { return .auto }
- else { return error('vdoc: invalid platform `$platform_str`') }
+ else { return error('vdoc: invalid platform `${platform_str}`') }
}
}
@@ -194,7 +194,7 @@ pub fn (mut d Doc) stmt(stmt ast.Stmt, filename string) ?DocNode {
platform: platform_from_filename(filename)
}
if (!node.is_pub && d.pub_only) || stmt is ast.GlobalDecl {
- return error('symbol $node.name not public')
+ return error('symbol ${node.name} not public')
}
if node.name.starts_with(d.orig_mod_name + '.') {
node.name = node.name.all_after(d.orig_mod_name + '.')
@@ -466,7 +466,7 @@ pub fn (mut d Doc) file_asts(file_asts []ast.File) ? {
// }
d.head = DocNode{
name: module_name
- content: 'module $module_name'
+ content: 'module ${module_name}'
kind: .none_
}
} else if file_ast.mod.name != d.orig_mod_name {
@@ -497,7 +497,7 @@ pub fn (mut d Doc) file_asts(file_asts []ast.File) ? {
if d.filter_symbol_names.len != 0 && d.contents.len != 0 {
for filter_name in d.filter_symbol_names {
if filter_name !in d.contents {
- return error('vdoc: `$filter_name` symbol in module `$d.orig_mod_name` not found')
+ return error('vdoc: `${filter_name}` symbol in module `${d.orig_mod_name}` not found')
}
}
}
@@ -508,7 +508,7 @@ pub fn (mut d Doc) file_asts(file_asts []ast.File) ? {
// instance of `Doc` if it is successful. Otherwise, it will throw an error.
pub fn generate(input_path string, pub_only bool, with_comments bool, platform Platform, filter_symbol_names ...string) ?Doc {
if platform == .js {
- return error('vdoc: Platform `$platform` is not supported.')
+ return error('vdoc: Platform `${platform}` is not supported.')
}
mut doc := new(input_path)
doc.pub_only = pub_only
diff --git a/vlib/v/doc/doc_test.v b/vlib/v/doc/doc_test.v
index cdb0ef8329..7e6cd126ea 100644
--- a/vlib/v/doc/doc_test.v
+++ b/vlib/v/doc/doc_test.v
@@ -12,7 +12,7 @@ fn test_generate_from_mod() {
doc.Doc{}
}
assert nested_mod_doc.head.name == nested_mod_name
- assert nested_mod_doc.head.content == 'module $nested_mod_name'
+ assert nested_mod_doc.head.content == 'module ${nested_mod_name}'
assert nested_mod_doc.contents.len == 3
assert nested_mod_doc.contents['ChunkScanner'].children.len == 3
}
diff --git a/vlib/v/doc/module.v b/vlib/v/doc/module.v
index 4c277d4f4b..f6e4151698 100644
--- a/vlib/v/doc/module.v
+++ b/vlib/v/doc/module.v
@@ -49,7 +49,7 @@ fn get_parent_mod(input_dir string) ?string {
}
parent_mod := get_parent_mod(base_dir) or { return input_dir_name }
if parent_mod.len > 0 {
- return '${parent_mod}.$file_ast.mod.name'
+ return '${parent_mod}.${file_ast.mod.name}'
}
return file_ast.mod.name
}
@@ -74,7 +74,7 @@ pub fn lookup_module_with_path(mod string, base_path string) ?string {
}
return path
}
- return error('module "$mod" not found.')
+ return error('module "${mod}" not found.')
}
// lookup_module returns the result of the `lookup_module_with_path`
diff --git a/vlib/v/doc/node.v b/vlib/v/doc/node.v
index 490817124a..068f2f1f30 100644
--- a/vlib/v/doc/node.v
+++ b/vlib/v/doc/node.v
@@ -62,7 +62,7 @@ pub fn (dc DocNode) merge_comments_without_examples() string {
if dc.comments[i].is_multi_line_example() {
i++
if i == dc.comments.len || !dc.comments[i].has_triple_backtick() {
- eprintln('$dc.file_path:$dc.pos.line_nr: warning: expected code block after empty example line:')
+ eprintln('${dc.file_path}:${dc.pos.line_nr}: warning: expected code block after empty example line:')
eprintln('// ```')
if i < dc.comments.len {
eprintln('Found:')
diff --git a/vlib/v/doc/utils.v b/vlib/v/doc/utils.v
index 0a641e8f01..b2e2462737 100644
--- a/vlib/v/doc/utils.v
+++ b/vlib/v/doc/utils.v
@@ -115,7 +115,7 @@ pub fn merge_doc_comments(comments []DocComment) string {
pub fn (mut d Doc) stmt_signature(stmt ast.Stmt) string {
match stmt {
ast.Module {
- return 'module $stmt.name'
+ return 'module ${stmt.name}'
}
ast.FnDecl {
return stmt.stringify(d.table, d.fmt.cur_mod, d.fmt.mod2alias)
diff --git a/vlib/v/dotgraph/dotgraph.v b/vlib/v/dotgraph/dotgraph.v
index 9366b316b1..ba846cd6f6 100644
--- a/vlib/v/dotgraph/dotgraph.v
+++ b/vlib/v/dotgraph/dotgraph.v
@@ -12,12 +12,12 @@ pub fn new(name string, label string, color string) &DotGraph {
mut res := &DotGraph{
sb: strings.new_builder(1024)
}
- res.writeln(' subgraph cluster_$name {')
+ res.writeln(' subgraph cluster_${name} {')
res.writeln('\tedge [fontname="Helvetica",fontsize="10",labelfontname="Helvetica",labelfontsize="10",style="solid",color="black"];')
res.writeln('\tnode [fontname="Helvetica",fontsize="10",style="filled",fontcolor="black",fillcolor="white",color="black",shape="box"];')
res.writeln('\trankdir="LR";')
- res.writeln('\tcolor="$color";')
- res.writeln('\tlabel="$label";')
+ res.writeln('\tcolor="${color}";')
+ res.writeln('\tlabel="${label}";')
// Node14 [shape="box",label="PrivateBase",URL="$classPrivateBase.html"];
// Node15 -> Node9 [dir=back,color="midnightblue",fontsize=10,style="solid"];
return res
@@ -48,9 +48,9 @@ pub fn (mut d DotGraph) new_node(nlabel string, cfg NewNodeConfig) {
nname = cfg.node_name
}
if cfg.should_highlight {
- d.writeln('\t$nname [label="$nlabel",color="blue",height=0.2,width=0.4,fillcolor="#00FF00",tooltip="$cfg.tooltip",shape=oval];')
+ d.writeln('\t${nname} [label="${nlabel}",color="blue",height=0.2,width=0.4,fillcolor="#00FF00",tooltip="${cfg.tooltip}",shape=oval];')
} else {
- d.writeln('\t$nname [shape="box",label="$nlabel"];')
+ d.writeln('\t${nname} [shape="box",label="${nlabel}"];')
}
}
@@ -66,9 +66,9 @@ pub fn (mut d DotGraph) new_edge(source string, target string, cfg NewEdgeConfig
nsource := cfg.name2node_fn(source, cfg.ctx)
ntarget := cfg.name2node_fn(target, cfg.ctx)
if cfg.should_highlight {
- d.writeln('\t$nsource -> $ntarget [color="blue"];')
+ d.writeln('\t${nsource} -> ${ntarget} [color="blue"];')
} else {
- d.writeln('\t$nsource -> $ntarget;')
+ d.writeln('\t${nsource} -> ${ntarget};')
}
}
diff --git a/vlib/v/embed_file/embed_file.v b/vlib/v/embed_file/embed_file.v
index 271739c476..1606e0b9fc 100644
--- a/vlib/v/embed_file/embed_file.v
+++ b/vlib/v/embed_file/embed_file.v
@@ -22,7 +22,7 @@ pub:
}
pub fn (ed EmbedFileData) str() string {
- return 'embed_file.EmbedFileData{ len: $ed.len, path: "$ed.path", apath: "$ed.apath", uncompressed: ${ptr_str(ed.uncompressed)} }'
+ return 'embed_file.EmbedFileData{ len: ${ed.len}, path: "${ed.path}", apath: "${ed.apath}", uncompressed: ${ptr_str(ed.uncompressed)} }'
}
[unsafe]
@@ -64,11 +64,11 @@ pub fn (mut ed EmbedFileData) data() &u8 {
}
if ed.uncompressed == unsafe { nil } && ed.compressed != unsafe { nil } {
decoder := g_embed_file_decoders.decoders[ed.compression_type] or {
- panic('EmbedFileData error: unknown compression of "$ed.path": "$ed.compression_type"')
+ panic('EmbedFileData error: unknown compression of "${ed.path}": "${ed.compression_type}"')
}
compressed := unsafe { ed.compressed.vbytes(ed.len) }
decompressed := decoder.decompress(compressed) or {
- panic('EmbedFileData error: decompression of "$ed.path" failed: $err')
+ panic('EmbedFileData error: decompression of "${ed.path}" failed: ${err}')
}
unsafe {
ed.uncompressed = &u8(memdup(decompressed.data, ed.len))
@@ -78,11 +78,11 @@ pub fn (mut ed EmbedFileData) data() &u8 {
if !os.is_file(path) {
path = ed.apath
if !os.is_file(path) {
- panic('EmbedFileData error: files "$ed.path" and "$ed.apath" do not exist')
+ panic('EmbedFileData error: files "${ed.path}" and "${ed.apath}" do not exist')
}
}
bytes := os.read_bytes(path) or {
- panic('EmbedFileData error: "$path" could not be read: $err')
+ panic('EmbedFileData error: "${path}" could not be read: ${err}')
}
ed.uncompressed = bytes.data
ed.free_uncompressed = true
@@ -112,7 +112,7 @@ pub fn find_index_entry_by_path(start voidptr, path string, algo string) &EmbedF
}
}
$if trace_embed_file ? {
- eprintln('>> v.embed_file find_index_entry_by_path ${ptr_str(start)}, id: $x.id, path: "$path", algo: "$algo" => ${ptr_str(x)}')
+ eprintln('>> v.embed_file find_index_entry_by_path ${ptr_str(start)}, id: ${x.id}, path: "${path}", algo: "${algo}" => ${ptr_str(x)}')
}
return x
}
diff --git a/vlib/v/embed_file/tests/embed_file_test.v b/vlib/v/embed_file/tests/embed_file_test.v
index dad2f1ddeb..5dc121dfef 100644
--- a/vlib/v/embed_file/tests/embed_file_test.v
+++ b/vlib/v/embed_file/tests/embed_file_test.v
@@ -10,10 +10,10 @@ fn test_const_embed_file_to_string() {
fn test_const_embed_file() {
mut file := const_file
- eprintln('file: $file')
+ eprintln('file: ${file}')
assert file.len == 603
fdata := file.data()
- eprintln('file after .data() call: $file')
+ eprintln('file after .data() call: ${file}')
assert file.path == 'v.png'
assert file.len == 603
unsafe {
@@ -23,10 +23,10 @@ fn test_const_embed_file() {
fn test_embed_file() {
mut file := $embed_file('v.png')
- eprintln('file: $file')
+ eprintln('file: ${file}')
assert file.len == 603
fdata := file.data()
- eprintln('file after .data() call: $file')
+ eprintln('file after .data() call: ${file}')
assert file.len == 603
unsafe {
assert fdata.vbytes(4) == [u8(0x89), `P`, `N`, `G`]
diff --git a/vlib/v/embed_file/tests/embed_file_with_import_test.v b/vlib/v/embed_file/tests/embed_file_with_import_test.v
index be0c20a826..3f34968080 100644
--- a/vlib/v/embed_file/tests/embed_file_with_import_test.v
+++ b/vlib/v/embed_file/tests/embed_file_with_import_test.v
@@ -2,10 +2,10 @@ import v.embed_file
fn test_embed_file_with_import() {
mut file := $embed_file('v.png')
- eprintln('file: $file')
+ eprintln('file: ${file}')
assert file.len == 603
fdata := file.data()
- eprintln('file after .data() call: $file')
+ eprintln('file after .data() call: ${file}')
assert file.len == 603
unsafe {
assert fdata.vbytes(4) == [u8(0x89), `P`, `N`, `G`]
diff --git a/vlib/v/eval/eval.v b/vlib/v/eval/eval.v
index 13e27de836..878a15c73a 100644
--- a/vlib/v/eval/eval.v
+++ b/vlib/v/eval/eval.v
@@ -63,7 +63,7 @@ pub fn (mut e Eval) run_func(func ast.FnDecl, _args ...Object) {
//
mut args := _args.clone()
if func.params.len != args.len && !func.is_variadic {
- e.error('mismatched parameter length for $func.name: got `$args.len`, expected `$func.params.len`')
+ e.error('mismatched parameter length for ${func.name}: got `${args.len}`, expected `${func.params.len}`')
}
if func.name in ['print', 'println', 'eprint', 'eprintln', 'panic'] {
@@ -189,7 +189,7 @@ pub fn (mut e Eval) register_symbol(stmt ast.Stmt, mod string, file string) {
}
for i, branch in x.branches {
mut do_if := false
- println('branch:$branch')
+ println('branch:${branch}')
match branch.cond {
ast.Ident {
match (branch.cond as ast.Ident).name {
@@ -213,12 +213,12 @@ pub fn (mut e Eval) register_symbol(stmt ast.Stmt, mod string, file string) {
}
}
else {
- e.error('unknown declaration expression statement $x.type_name()')
+ e.error('unknown declaration expression statement ${x.type_name()}')
}
}
}
else {
- e.error('unhandled declaration statement $stmt.type_name()')
+ e.error('unhandled declaration statement ${stmt.type_name()}')
}
}
}
@@ -231,8 +231,8 @@ fn (e Eval) error(msg string) {
fn (e Eval) panic(s string) {
commithash := unsafe { tos5(&char(C.V_CURRENT_COMMIT_HASH)) }
- eprintln('V panic: $s')
- eprintln('V hash: $commithash')
+ eprintln('V panic: ${s}')
+ eprintln('V hash: ${commithash}')
e.print_backtrace()
exit(1)
}
@@ -247,6 +247,6 @@ fn (e Eval) print_backtrace() {
}
fn_name := e.trace_function_names[t.fn_idx] or { t.fn_idx.str() }
word := if i == e.back_trace.len - 1 { 'at' } else { 'by' }
- eprintln('$file_path:${t.line + 1}: $word $fn_name')
+ eprintln('${file_path}:${t.line + 1}: ${word} ${fn_name}')
}
}
diff --git a/vlib/v/eval/expr.v b/vlib/v/eval/expr.v
index 9d49629f39..b123661f08 100644
--- a/vlib/v/eval/expr.v
+++ b/vlib/v/eval/expr.v
@@ -91,7 +91,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
// return Int{y, 32}
// }
else {
- e.error('unknown c function: `$expr.name`')
+ e.error('unknown c function: `${expr.name}`')
}
}
}
@@ -111,13 +111,13 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
return e.return_values
}
}
- e.error('unknown function: ${mod}.$name at line $expr.pos.line_nr')
+ e.error('unknown function: ${mod}.${name} at line ${expr.pos.line_nr}')
}
// .js {
// e.error('js is not supported')
// }
else {
- e.error('$expr.language is not supported as a call expression language')
+ e.error('${expr.language} is not supported as a call expression language')
}
}
}
@@ -157,7 +157,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
do_if = e.pref.prealloc
}
else {
- e.error('unknown compile time if: $branch.cond.name')
+ e.error('unknown compile time if: ${branch.cond.name}')
}
}
}
@@ -185,7 +185,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
break
}
} else {
- e.error('non-bool expression: $b.cond')
+ e.error('non-bool expression: ${b.cond}')
}
}
return empty
@@ -199,7 +199,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
ast.IntegerLiteral {
// return u64(strconv.parse_uint(expr.val, 0, 64)
return i64(strconv.parse_int(expr.val, 0, 64) or {
- e.error('invalid integer literal: $expr.val')
+ e.error('invalid integer literal: ${expr.val}')
}) // TODO: numbers larger than 2^63 (for u64)
}
ast.FloatLiteral {
@@ -222,7 +222,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
}[expr.name.all_after_last('.')] or { ast.EmptyStmt{} } as Object
}
else {
- e.error('unknown ident kind for `$expr.name`: $expr.kind')
+ e.error('unknown ident kind for `${expr.name}`: ${expr.kind}')
}
}
}
@@ -417,7 +417,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
return Int{exp.len, 32}
}
else {
- e.error('unknown selector to string: $expr.field_name')
+ e.error('unknown selector to string: ${expr.field_name}')
}
}
}
@@ -427,12 +427,12 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
return Int{exp.val.len, 32}
}
else {
- e.error('unknown selector to array: $expr.field_name')
+ e.error('unknown selector to array: ${expr.field_name}')
}
}
}
else {
- e.error('unknown selector expression: $exp.type_name()')
+ e.error('unknown selector expression: ${exp.type_name()}')
}
}
e.error(exp.str())
@@ -453,7 +453,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
val: []Object{}
}
} else {
- e.error('unknown array init combination; len: $expr.has_len, cap: $expr.has_cap, init: $expr.has_default')
+ e.error('unknown array init combination; len: ${expr.has_len}, cap: ${expr.has_cap}, init: ${expr.has_default}')
}
}
if expr.is_fixed || expr.has_val {
@@ -470,7 +470,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
}
ast.CharLiteral {
if expr.val.len !in [1, 2] {
- e.error('invalid size of char literal: $expr.val.len')
+ e.error('invalid size of char literal: ${expr.val.len}')
}
if expr.val[0] == `\\` { // is an escape
return e.get_escape(rune(expr.val[1]))
@@ -497,7 +497,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
}
}
else {
- e.error('unhandled prefix expression $expr.op')
+ e.error('unhandled prefix expression ${expr.op}')
}
}
}
@@ -512,7 +512,7 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
return e.expr(expr.expr, ast.i64_type_idx)
}
else {
- e.error('unhandled postfix expression $expr.op')
+ e.error('unhandled postfix expression ${expr.op}')
}
}
}
@@ -585,7 +585,7 @@ fn (e Eval) get_escape(r rune) rune {
}
}
if res == `e` {
- e.error('unknown escape: `$r`')
+ e.error('unknown escape: `${r}`')
}
return res
}
diff --git a/vlib/v/eval/gen/infix_gen.v b/vlib/v/eval/gen/infix_gen.v
index 9b42191054..29cb174ec1 100644
--- a/vlib/v/eval/gen/infix_gen.v
+++ b/vlib/v/eval/gen/infix_gen.v
@@ -37,47 +37,47 @@ fn main() {
b.write_string(header)
for enm, op in comparison {
- b.write_string('.$enm{match left{')
+ b.write_string('.${enm}{match left{')
for ct in compound_types {
- b.write_string('$ct {match right{')
+ b.write_string('${ct} {match right{')
for ct2 in compound_types {
- b.write_string('$ct2{return left.val${op}right.val}')
+ b.write_string('${ct2}{return left.val${op}right.val}')
}
for lt2 in literal_types {
- b.write_string('$lt2{return left.val${op}right}')
+ b.write_string('${lt2}{return left.val${op}right}')
}
- b.write_string("else{e.error('invalid operands to $op: $ct and \$right.type_name()')}}}")
+ b.write_string("else{e.error('invalid operands to ${op}: ${ct} and \$right.type_name()')}}}")
}
for lt in literal_types {
- b.write_string('$lt {match right{')
+ b.write_string('${lt} {match right{')
for ct2 in compound_types {
- b.write_string('$ct2{return left${op}right.val}')
+ b.write_string('${ct2}{return left${op}right.val}')
}
for lt2 in literal_types {
- b.write_string('$lt2{return left${op}right}')
+ b.write_string('${lt2}{return left${op}right}')
}
- b.write_string("else {e.error('invalid operands to $op: ")
+ b.write_string("else {e.error('invalid operands to ${op}: ")
b.write_string(if lt == 'i64' { 'int' } else { 'float' })
b.write_string(" literal and \$right.type_name()')}}}")
}
if op in ['==', '!='] {
- b.write_string('string{match right{string{return left${op}right}else{e.error(\'invalid operands to $op: string and \$right.type_name()\')}}}')
+ b.write_string('string{match right{string{return left${op}right}else{e.error(\'invalid operands to ${op}: string and \$right.type_name()\')}}}')
}
- b.write_string("else {e.error('invalid operands to $op: \$left.type_name() and \$right.type_name()')}}}")
+ b.write_string("else {e.error('invalid operands to ${op}: \$left.type_name() and \$right.type_name()')}}}")
}
for math, op in math_ops {
- b.write_string('.$math{match left{')
+ b.write_string('.${math}{match left{')
for ct in compound_types {
if op in ['<<', '>>'] && ct == 'Float' {
continue
}
- b.write_string('$ct {match right{')
+ b.write_string('${ct} {match right{')
for ct2 in compound_types {
if op in ['<<', '>>'] && ct2 == 'Float' {
continue
}
unsafe_start, unsafe_end := if op in ['<<', '>>'] { 'unsafe{', '}' } else { '', '' }
- b.write_string('$ct2{if expecting in ast.signed_integer_type_idxs{return Int{i64(left.val)${op}i64(right.val),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left.val)${op}u64(right.val),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left.val)${op}i64(right.val))$unsafe_end}')
+ b.write_string('${ct2}{if expecting in ast.signed_integer_type_idxs{return Int{i64(left.val)${op}i64(right.val),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left.val)${op}u64(right.val),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left.val)${op}i64(right.val))${unsafe_end}}')
if op !in ['<<', '>>'] {
b.write_string('else if expecting in ast.float_type_idxs{return Float{f64(left.val)${op}f64(right.val), i8(e.type_to_size(expecting))}}else if expecting==ast.float_literal_type_idx{return f64(f64(left.val)${op}f64(right.val))}')
}
@@ -88,25 +88,25 @@ fn main() {
continue
}
unsafe_start, unsafe_end := if op in ['<<', '>>'] { 'unsafe{', '}' } else { '', '' }
- b.write_string('$lt2{if expecting in ast.signed_integer_type_idxs{return Int{i64(left.val)${op}i64(right),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left.val)${op}u64(right),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left.val)${op}i64(right))$unsafe_end}')
+ b.write_string('${lt2}{if expecting in ast.signed_integer_type_idxs{return Int{i64(left.val)${op}i64(right),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left.val)${op}u64(right),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left.val)${op}i64(right))${unsafe_end}}')
if op !in ['<<', '>>'] {
b.write_string('else if expecting in ast.float_type_idxs{return Float{f64(left.val)${op}f64(right), i8(e.type_to_size(expecting))}}else if expecting==ast.float_literal_type_idx{return f64(f64(left.val)${op}f64(right))}')
}
b.write_string(uk_expect_footer)
}
- b.write_string("else {e.error('invalid operands to $op: $ct and \$right.type_name()')}}}")
+ b.write_string("else {e.error('invalid operands to ${op}: ${ct} and \$right.type_name()')}}}")
}
for lt in literal_types {
if op in ['<<', '>>'] && lt == 'f64' {
continue
}
- b.write_string('$lt{match right{')
+ b.write_string('${lt}{match right{')
for ct2 in compound_types {
if op in ['<<', '>>'] && ct2 == 'Float' {
continue
}
unsafe_start, unsafe_end := if op in ['<<', '>>'] { 'unsafe{', '}' } else { '', '' }
- b.write_string('$ct2{if expecting in ast.signed_integer_type_idxs{return Int{i64(left)${op}i64(right.val),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left)${op}u64(right.val),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left)${op}i64(right.val))$unsafe_end}')
+ b.write_string('${ct2}{if expecting in ast.signed_integer_type_idxs{return Int{i64(left)${op}i64(right.val),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left)${op}u64(right.val),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left)${op}i64(right.val))${unsafe_end}}')
if op !in ['<<', '>>'] {
b.write_string('else if expecting in ast.float_type_idxs{return Float{f64(left)${op}f64(right.val), i8(e.type_to_size(expecting))}}else if expecting==ast.float_literal_type_idx{return f64(f64(left)${op}f64(right.val))}')
}
@@ -117,17 +117,17 @@ fn main() {
continue
}
unsafe_start, unsafe_end := if op in ['<<', '>>'] { 'unsafe{', '}' } else { '', '' }
- b.write_string('$lt2{if expecting in ast.signed_integer_type_idxs{return Int{i64(left)${op}i64(right),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left)${op}u64(right),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left)${op}i64(right))$unsafe_end}')
+ b.write_string('${lt2}{if expecting in ast.signed_integer_type_idxs{return Int{i64(left)${op}i64(right),i8(e.type_to_size(expecting))}}else if expecting in ast.unsigned_integer_type_idxs{return Uint{u64(left)${op}u64(right),i8(e.type_to_size(expecting))}}else if expecting==ast.int_literal_type_idx{${unsafe_start}return i64(i64(left)${op}i64(right))${unsafe_end}}')
if op !in ['<<', '>>'] {
b.write_string('else if expecting in ast.float_type_idxs{return Float{f64(left)${op}f64(right), i8(e.type_to_size(expecting))}}else if expecting==ast.float_literal_type_idx{return f64(f64(left)${op}f64(right))}')
}
b.write_string(uk_expect_footer)
}
- b.write_string("else {e.error('invalid operands to $op: ")
+ b.write_string("else {e.error('invalid operands to ${op}: ")
b.write_string(if lt == 'i64' { 'int' } else { 'float' })
b.write_string(" literal and \$right.type_name()')}}}")
}
- b.write_string("else {e.error('invalid operands to $op: \$left.type_name() and \$right.type_name()')}}}")
+ b.write_string("else {e.error('invalid operands to ${op}: \$left.type_name() and \$right.type_name()')}}}")
}
b.write_string(footer)
diff --git a/vlib/v/eval/infix.v b/vlib/v/eval/infix.v
index 782581ac41..2b0911af8b 100644
--- a/vlib/v/eval/infix.v
+++ b/vlib/v/eval/infix.v
@@ -15,7 +15,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val > right.val }
i64 { return left.val > right }
f64 { return left.val > right }
- else { e.error('invalid operands to >: Int and $right.type_name()') }
+ else { e.error('invalid operands to >: Int and ${right.type_name()}') }
}
}
Uint {
@@ -25,7 +25,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val > right.val }
i64 { return left.val > right }
f64 { return left.val > right }
- else { e.error('invalid operands to >: Uint and $right.type_name()') }
+ else { e.error('invalid operands to >: Uint and ${right.type_name()}') }
}
}
Float {
@@ -35,7 +35,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val > right.val }
i64 { return left.val > right }
f64 { return left.val > right }
- else { e.error('invalid operands to >: Float and $right.type_name()') }
+ else { e.error('invalid operands to >: Float and ${right.type_name()}') }
}
}
i64 {
@@ -45,7 +45,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left > right.val }
i64 { return left > right }
f64 { return left > right }
- else { e.error('invalid operands to >: int literal and $right.type_name()') }
+ else { e.error('invalid operands to >: int literal and ${right.type_name()}') }
}
}
f64 {
@@ -55,11 +55,11 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left > right.val }
i64 { return left > right }
f64 { return left > right }
- else { e.error('invalid operands to >: float literal and $right.type_name()') }
+ else { e.error('invalid operands to >: float literal and ${right.type_name()}') }
}
}
else {
- e.error('invalid operands to >: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to >: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -72,7 +72,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val < right.val }
i64 { return left.val < right }
f64 { return left.val < right }
- else { e.error('invalid operands to <: Int and $right.type_name()') }
+ else { e.error('invalid operands to <: Int and ${right.type_name()}') }
}
}
Uint {
@@ -82,7 +82,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val < right.val }
i64 { return left.val < right }
f64 { return left.val < right }
- else { e.error('invalid operands to <: Uint and $right.type_name()') }
+ else { e.error('invalid operands to <: Uint and ${right.type_name()}') }
}
}
Float {
@@ -92,7 +92,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val < right.val }
i64 { return left.val < right }
f64 { return left.val < right }
- else { e.error('invalid operands to <: Float and $right.type_name()') }
+ else { e.error('invalid operands to <: Float and ${right.type_name()}') }
}
}
i64 {
@@ -102,7 +102,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left < right.val }
i64 { return left < right }
f64 { return left < right }
- else { e.error('invalid operands to <: int literal and $right.type_name()') }
+ else { e.error('invalid operands to <: int literal and ${right.type_name()}') }
}
}
f64 {
@@ -112,11 +112,11 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left < right.val }
i64 { return left < right }
f64 { return left < right }
- else { e.error('invalid operands to <: float literal and $right.type_name()') }
+ else { e.error('invalid operands to <: float literal and ${right.type_name()}') }
}
}
else {
- e.error('invalid operands to <: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to <: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -129,7 +129,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val == right.val }
i64 { return left.val == right }
f64 { return left.val == right }
- else { e.error('invalid operands to ==: Int and $right.type_name()') }
+ else { e.error('invalid operands to ==: Int and ${right.type_name()}') }
}
}
Uint {
@@ -139,7 +139,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val == right.val }
i64 { return left.val == right }
f64 { return left.val == right }
- else { e.error('invalid operands to ==: Uint and $right.type_name()') }
+ else { e.error('invalid operands to ==: Uint and ${right.type_name()}') }
}
}
Float {
@@ -149,7 +149,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val == right.val }
i64 { return left.val == right }
f64 { return left.val == right }
- else { e.error('invalid operands to ==: Float and $right.type_name()') }
+ else { e.error('invalid operands to ==: Float and ${right.type_name()}') }
}
}
i64 {
@@ -159,7 +159,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left == right.val }
i64 { return left == right }
f64 { return left == right }
- else { e.error('invalid operands to ==: int literal and $right.type_name()') }
+ else { e.error('invalid operands to ==: int literal and ${right.type_name()}') }
}
}
f64 {
@@ -169,17 +169,17 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left == right.val }
i64 { return left == right }
f64 { return left == right }
- else { e.error('invalid operands to ==: float literal and $right.type_name()') }
+ else { e.error('invalid operands to ==: float literal and ${right.type_name()}') }
}
}
string {
match right {
string { return left == right }
- else { e.error('invalid operands to ==: string and $right.type_name()') }
+ else { e.error('invalid operands to ==: string and ${right.type_name()}') }
}
}
else {
- e.error('invalid operands to ==: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to ==: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -192,7 +192,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val != right.val }
i64 { return left.val != right }
f64 { return left.val != right }
- else { e.error('invalid operands to !=: Int and $right.type_name()') }
+ else { e.error('invalid operands to !=: Int and ${right.type_name()}') }
}
}
Uint {
@@ -202,7 +202,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val != right.val }
i64 { return left.val != right }
f64 { return left.val != right }
- else { e.error('invalid operands to !=: Uint and $right.type_name()') }
+ else { e.error('invalid operands to !=: Uint and ${right.type_name()}') }
}
}
Float {
@@ -212,7 +212,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left.val != right.val }
i64 { return left.val != right }
f64 { return left.val != right }
- else { e.error('invalid operands to !=: Float and $right.type_name()') }
+ else { e.error('invalid operands to !=: Float and ${right.type_name()}') }
}
}
i64 {
@@ -222,7 +222,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left != right.val }
i64 { return left != right }
f64 { return left != right }
- else { e.error('invalid operands to !=: int literal and $right.type_name()') }
+ else { e.error('invalid operands to !=: int literal and ${right.type_name()}') }
}
}
f64 {
@@ -232,17 +232,17 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
Float { return left != right.val }
i64 { return left != right }
f64 { return left != right }
- else { e.error('invalid operands to !=: float literal and $right.type_name()') }
+ else { e.error('invalid operands to !=: float literal and ${right.type_name()}') }
}
}
string {
match right {
string { return left != right }
- else { e.error('invalid operands to !=: string and $right.type_name()') }
+ else { e.error('invalid operands to !=: string and ${right.type_name()}') }
}
}
else {
- e.error('invalid operands to !=: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to !=: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -326,7 +326,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to +: Int and $right.type_name()')
+ e.error('invalid operands to +: Int and ${right.type_name()}')
}
}
}
@@ -408,7 +408,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to +: Uint and $right.type_name()')
+ e.error('invalid operands to +: Uint and ${right.type_name()}')
}
}
}
@@ -490,7 +490,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to +: Float and $right.type_name()')
+ e.error('invalid operands to +: Float and ${right.type_name()}')
}
}
}
@@ -572,7 +572,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to +: int literal and $right.type_name()')
+ e.error('invalid operands to +: int literal and ${right.type_name()}')
}
}
}
@@ -654,12 +654,12 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to +: float literal and $right.type_name()')
+ e.error('invalid operands to +: float literal and ${right.type_name()}')
}
}
}
else {
- e.error('invalid operands to +: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to +: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -743,7 +743,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to -: Int and $right.type_name()')
+ e.error('invalid operands to -: Int and ${right.type_name()}')
}
}
}
@@ -825,7 +825,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to -: Uint and $right.type_name()')
+ e.error('invalid operands to -: Uint and ${right.type_name()}')
}
}
}
@@ -907,7 +907,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to -: Float and $right.type_name()')
+ e.error('invalid operands to -: Float and ${right.type_name()}')
}
}
}
@@ -989,7 +989,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to -: int literal and $right.type_name()')
+ e.error('invalid operands to -: int literal and ${right.type_name()}')
}
}
}
@@ -1071,12 +1071,12 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to -: float literal and $right.type_name()')
+ e.error('invalid operands to -: float literal and ${right.type_name()}')
}
}
}
else {
- e.error('invalid operands to -: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to -: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -1160,7 +1160,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to *: Int and $right.type_name()')
+ e.error('invalid operands to *: Int and ${right.type_name()}')
}
}
}
@@ -1242,7 +1242,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to *: Uint and $right.type_name()')
+ e.error('invalid operands to *: Uint and ${right.type_name()}')
}
}
}
@@ -1324,7 +1324,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to *: Float and $right.type_name()')
+ e.error('invalid operands to *: Float and ${right.type_name()}')
}
}
}
@@ -1406,7 +1406,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to *: int literal and $right.type_name()')
+ e.error('invalid operands to *: int literal and ${right.type_name()}')
}
}
}
@@ -1488,12 +1488,12 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to *: float literal and $right.type_name()')
+ e.error('invalid operands to *: float literal and ${right.type_name()}')
}
}
}
else {
- e.error('invalid operands to *: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to *: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -1577,7 +1577,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to /: Int and $right.type_name()')
+ e.error('invalid operands to /: Int and ${right.type_name()}')
}
}
}
@@ -1659,7 +1659,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to /: Uint and $right.type_name()')
+ e.error('invalid operands to /: Uint and ${right.type_name()}')
}
}
}
@@ -1741,7 +1741,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to /: Float and $right.type_name()')
+ e.error('invalid operands to /: Float and ${right.type_name()}')
}
}
}
@@ -1823,7 +1823,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to /: int literal and $right.type_name()')
+ e.error('invalid operands to /: int literal and ${right.type_name()}')
}
}
}
@@ -1905,12 +1905,12 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to /: float literal and $right.type_name()')
+ e.error('invalid operands to /: float literal and ${right.type_name()}')
}
}
}
else {
- e.error('invalid operands to /: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to /: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -1958,7 +1958,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to >>: Int and $right.type_name()')
+ e.error('invalid operands to >>: Int and ${right.type_name()}')
}
}
}
@@ -2004,7 +2004,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to >>: Uint and $right.type_name()')
+ e.error('invalid operands to >>: Uint and ${right.type_name()}')
}
}
}
@@ -2050,12 +2050,12 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to >>: int literal and $right.type_name()')
+ e.error('invalid operands to >>: int literal and ${right.type_name()}')
}
}
}
else {
- e.error('invalid operands to >>: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to >>: ${left.type_name()} and ${right.type_name()}')
}
}
}
@@ -2103,7 +2103,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to <<: Int and $right.type_name()')
+ e.error('invalid operands to <<: Int and ${right.type_name()}')
}
}
}
@@ -2149,7 +2149,7 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to <<: Uint and $right.type_name()')
+ e.error('invalid operands to <<: Uint and ${right.type_name()}')
}
}
}
@@ -2195,17 +2195,17 @@ fn (e Eval) infix_expr(left Object, right Object, op token.Kind, expecting ast.T
}
}
else {
- e.error('invalid operands to <<: int literal and $right.type_name()')
+ e.error('invalid operands to <<: int literal and ${right.type_name()}')
}
}
}
else {
- e.error('invalid operands to <<: $left.type_name() and $right.type_name()')
+ e.error('invalid operands to <<: ${left.type_name()} and ${right.type_name()}')
}
}
}
else {
- e.error('unknown infix expression: $op')
+ e.error('unknown infix expression: ${op}')
}
}
return empty // should e.error before this anyway
diff --git a/vlib/v/eval/interpret_test.v b/vlib/v/eval/interpret_test.v
index e3be905900..ad1e1ec7bd 100644
--- a/vlib/v/eval/interpret_test.v
+++ b/vlib/v/eval/interpret_test.v
@@ -31,19 +31,19 @@ fn test_interpret() {
res := os.execute(cmd)
if res.exit_code != 0 {
bench.fail()
- eprintln(bench.step_message_fail('$full_test_path failed to run'))
+ eprintln(bench.step_message_fail('${full_test_path} failed to run'))
eprintln(res.output)
continue
}
- mut expected := os.read_file('$dir/${test_name_without_postfix}.out')?
+ mut expected := os.read_file('${dir}/${test_name_without_postfix}.out')?
expected = normalise_line_endings(expected)
mut found := normalise_line_endings(res.output)
found = found.trim_space()
if expected != found {
println(term.red('FAIL'))
println('========================================================\n')
- println('============ expected len=$expected.len: "$expected"')
- println('============ found len=$found.len: "$found"')
+ println('============ expected len=${expected.len}: "${expected}"')
+ println('============ found len=${found.len}: "${found}"')
println('========================================================\n')
bench.fail()
continue
diff --git a/vlib/v/eval/stmt.v b/vlib/v/eval/stmt.v
index fa27622a49..116725943a 100644
--- a/vlib/v/eval/stmt.v
+++ b/vlib/v/eval/stmt.v
@@ -52,7 +52,7 @@ pub fn (mut e Eval) stmt(stmt ast.Stmt) {
}
}
else {
- e.error('unknown assign statment: $stmt.op')
+ e.error('unknown assign statment: ${stmt.op}')
}
}
}
@@ -105,7 +105,7 @@ pub fn (mut e Eval) stmt(stmt ast.Stmt) {
e.stmts(stmt.stmts)
}
else {
- e.error('unhandled statement $stmt.type_name()')
+ e.error('unhandled statement ${stmt.type_name()}')
}
}
}
diff --git a/vlib/v/eval/var.v b/vlib/v/eval/var.v
index 70226619ec..5a1e9f5c59 100644
--- a/vlib/v/eval/var.v
+++ b/vlib/v/eval/var.v
@@ -37,7 +37,7 @@ pub fn (mut e Eval) set(expr ast.Expr, val Object, init bool, typ ast.Type) {
}
}
ast.IndexExpr {
- panic('>>$expr.pos, $e.cur_file')
+ panic('>>${expr.pos}, ${e.cur_file}')
// if init {
// e.error('index init assignment')
@@ -49,7 +49,7 @@ pub fn (mut e Eval) set(expr ast.Expr, val Object, init bool, typ ast.Type) {
// }
}
else {
- panic('unknown left value to assign statment: $expr.type_name()')
+ panic('unknown left value to assign statment: ${expr.type_name()}')
}
}
}
@@ -62,7 +62,7 @@ pub fn (mut e Eval) add(expr ast.Expr, val Object) {
.plus, e.local_vars[expr.name].typ)
}
else {
- panic('unknown left value to add statment: $expr.type_name()')
+ panic('unknown left value to add statment: ${expr.type_name()}')
}
}
}
diff --git a/vlib/v/fmt/asm.v b/vlib/v/fmt/asm.v
index aa82fd0438..6ccc4c130d 100644
--- a/vlib/v/fmt/asm.v
+++ b/vlib/v/fmt/asm.v
@@ -12,7 +12,7 @@ fn (mut f Fmt) asm_stmt(stmt ast.AsmStmt) {
} else if stmt.is_goto {
f.write('goto ')
}
- f.writeln('$stmt.arch {')
+ f.writeln('${stmt.arch} {')
f.indent++
f.asm_templates(stmt.templates)
@@ -45,7 +45,7 @@ fn (mut f Fmt) asm_arg(arg ast.AsmArg) {
f.asm_reg(arg)
}
ast.AsmAlias {
- f.write('$arg.name')
+ f.write('${arg.name}')
}
ast.IntegerLiteral, ast.FloatLiteral, ast.CharLiteral {
f.write(arg.val)
@@ -80,7 +80,7 @@ fn (mut f Fmt) asm_arg(arg ast.AsmArg) {
}
.index_times_scale_plus_displacement {
f.asm_arg(index)
- f.write(' * $scale + ')
+ f.write(' * ${scale} + ')
f.asm_arg(displacement)
}
.base_plus_index_plus_displacement {
@@ -94,7 +94,7 @@ fn (mut f Fmt) asm_arg(arg ast.AsmArg) {
f.asm_arg(base)
f.write(' + ')
f.asm_arg(index)
- f.write(' * $scale + ')
+ f.write(' * ${scale} + ')
f.asm_arg(displacement)
}
.rip_plus_displacement {
@@ -129,7 +129,7 @@ fn (mut f Fmt) asm_templates(templates []ast.AsmTemplate) {
if template.is_directive {
f.write('.')
}
- f.write('$template.name')
+ f.write('${template.name}')
if template.is_label {
f.write(':')
} else if template.args.len > 0 {
@@ -170,7 +170,7 @@ fn (mut f Fmt) asm_ios(ios []ast.AsmIO) {
f.write(' ')
}
- f.write('$io.constraint ($io.expr)')
+ f.write('${io.constraint} (${io.expr})')
mut as_block := true
if io.expr is ast.Ident {
if io.expr.name == io.alias {
@@ -178,7 +178,7 @@ fn (mut f Fmt) asm_ios(ios []ast.AsmIO) {
}
}
if as_block && io.alias != '' {
- f.write(' as $io.alias')
+ f.write(' as ${io.alias}')
}
if io.comments.len == 0 {
f.writeln('')
diff --git a/vlib/v/fmt/attrs.v b/vlib/v/fmt/attrs.v
index 4b31fd1ea7..74a838a065 100644
--- a/vlib/v/fmt/attrs.v
+++ b/vlib/v/fmt/attrs.v
@@ -17,7 +17,7 @@ pub fn (mut f Fmt) attrs(attrs []ast.Attr) {
f.single_line_attrs(sorted_attrs[i..])
break
}
- f.writeln('[$attr]')
+ f.writeln('[${attr}]')
}
}
@@ -40,7 +40,7 @@ pub fn (mut f Fmt) single_line_attrs(attrs []ast.Attr, options AttrsOptions) {
if i > 0 {
f.write('; ')
}
- f.write('$attr')
+ f.write('${attr}')
}
f.write(']')
if !options.inline {
@@ -57,7 +57,7 @@ fn inline_attrs_len(attrs []ast.Attr) int {
if i > 0 {
n += 2 // '; '.len
}
- n += '$attr'.len
+ n += '${attr}'.len
}
n++ // ']'.len
return n
diff --git a/vlib/v/fmt/comments.v b/vlib/v/fmt/comments.v
index 43b6ffe0d8..456542d59d 100644
--- a/vlib/v/fmt/comments.v
+++ b/vlib/v/fmt/comments.v
@@ -51,7 +51,7 @@ pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) {
f.writeln(x)
f.write('*/')
} else {
- f.write('/* $x */')
+ f.write('/* ${x} */')
}
} else if !node.text.contains('\n') {
is_separate_line := !options.inline || node.text.starts_with('\x01')
diff --git a/vlib/v/fmt/fmt.v b/vlib/v/fmt/fmt.v
index 26f5ef319f..bb1339c62e 100644
--- a/vlib/v/fmt/fmt.v
+++ b/vlib/v/fmt/fmt.v
@@ -97,11 +97,11 @@ pub fn (mut f Fmt) process_file_imports(file &ast.File) {
for imp in file.imports {
f.mod2alias[imp.mod] = imp.alias
for sym in imp.syms {
- f.mod2alias['${imp.mod}.$sym.name'] = sym.name
- f.mod2alias['${imp.mod.all_after_last('.')}.$sym.name'] = sym.name
+ f.mod2alias['${imp.mod}.${sym.name}'] = sym.name
+ f.mod2alias['${imp.mod.all_after_last('.')}.${sym.name}'] = sym.name
f.mod2alias[sym.name] = sym.name
- f.mod2syms['${imp.mod}.$sym.name'] = sym.name
- f.mod2syms['${imp.mod.all_after_last('.')}.$sym.name'] = sym.name
+ f.mod2syms['${imp.mod}.${sym.name}'] = sym.name
+ f.mod2syms['${imp.mod.all_after_last('.')}.${sym.name}'] = sym.name
f.mod2syms[sym.name] = sym.name
f.import_syms_used[sym.name] = false
}
@@ -253,7 +253,7 @@ pub fn (mut f Fmt) short_module(name string) string {
mut res := '${f.short_module(generic_levels[0])}'
for i in 1 .. generic_levels.len {
genshorts := generic_levels[i].split(', ').map(f.short_module(it)).join(', ')
- res += '<$genshorts'
+ res += '<${genshorts}'
}
res += '>'
return res
@@ -275,9 +275,9 @@ pub fn (mut f Fmt) short_module(name string) string {
}
}
if aname == '' {
- return '$tprefix$symname'
+ return '${tprefix}${symname}'
}
- return '$tprefix${aname}.$symname'
+ return '${tprefix}${aname}.${symname}'
}
//=== Import-related methods ===//
@@ -359,7 +359,7 @@ pub fn (mut f Fmt) imports(imports []ast.Import) {
pub fn (f Fmt) imp_stmt_str(imp ast.Import) string {
mod := if imp.mod.len == 0 { imp.alias } else { imp.mod }
is_diff := imp.alias != mod && !mod.ends_with('.' + imp.alias)
- mut imp_alias_suffix := if is_diff { ' as $imp.alias' } else { '' }
+ mut imp_alias_suffix := if is_diff { ' as ${imp.alias}' } else { '' }
mut syms := imp.syms.map(it.name).filter(f.import_syms_used[it])
syms.sort()
if syms.len > 0 {
@@ -369,7 +369,7 @@ pub fn (f Fmt) imp_stmt_str(imp ast.Import) string {
' {\n\t' + syms.join(',\n\t') + ',\n}'
}
}
- return '$mod$imp_alias_suffix'
+ return '${mod}${imp_alias_suffix}'
}
//=== Node helpers ===//
@@ -459,7 +459,7 @@ pub fn (mut f Fmt) stmts(stmts []ast.Stmt) {
pub fn (mut f Fmt) stmt(node ast.Stmt) {
if f.is_debug {
- eprintln('stmt ${node.type_name():-20} | pos: $node.pos.line_str()')
+ eprintln('stmt ${node.type_name():-20} | pos: ${node.pos.line_str()}')
}
match node {
ast.EmptyStmt, ast.NodeError {}
@@ -562,7 +562,7 @@ fn stmt_is_single_line(stmt ast.Stmt) bool {
pub fn (mut f Fmt) expr(node_ ast.Expr) {
mut node := unsafe { node_ }
if f.is_debug {
- eprintln('expr ${node.type_name():-20} | pos: $node.pos().line_str() | $node.str()')
+ eprintln('expr ${node.type_name():-20} | pos: ${node.pos().line_str()} | ${node.str()}')
}
match mut node {
ast.NodeError {}
@@ -613,7 +613,7 @@ pub fn (mut f Fmt) expr(node_ ast.Expr) {
f.concat_expr(node)
}
ast.CTempVar {
- eprintln('ast.CTempVar of $node.orig.str() should be generated/used only in cgen')
+ eprintln('ast.CTempVar of ${node.orig.str()} should be generated/used only in cgen')
}
ast.DumpExpr {
f.dump_expr(node)
@@ -800,7 +800,7 @@ pub fn (mut f Fmt) assign_stmt(node ast.AssignStmt) {
}
}
f.is_assign = true
- f.write(' $node.op.str() ')
+ f.write(' ${node.op.str()} ')
for i, val in node.right {
f.expr(val)
if i < node.right.len - 1 {
@@ -832,7 +832,7 @@ pub fn (mut f Fmt) branch_stmt(node ast.BranchStmt) {
pub fn (mut f Fmt) comptime_for(node ast.ComptimeFor) {
typ := f.no_cur_mod(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
- f.write('\$for $node.val_var in ${typ}.$node.kind.str() {')
+ f.write('\$for ${node.val_var} in ${typ}.${node.kind.str()} {')
f.mark_types_import_as_used(node.typ)
if node.stmts.len > 0 || node.pos.line_nr < node.pos.last_line {
f.writeln('')
@@ -902,7 +902,7 @@ pub fn (mut f Fmt) const_decl(node ast.ConstDecl) {
f.writeln('')
}
name := field.name.after('.')
- f.write('$name ')
+ f.write('${name} ')
f.write(strings.repeat(` `, align_infos[align_idx].max - field.name.len))
f.write('= ')
f.expr(field.expr)
@@ -959,16 +959,16 @@ pub fn (mut f Fmt) enum_decl(node ast.EnumDecl) {
mut name := node.name.after('.')
if node.typ != ast.int_type {
senum_type := f.table.type_to_str_using_aliases(node.typ, f.mod2alias)
- name += ' as $senum_type'
+ name += ' as ${senum_type}'
}
if node.fields.len == 0 && node.pos.line_nr == node.pos.last_line {
- f.writeln('enum $name {}\n')
+ f.writeln('enum ${name} {}\n')
return
}
- f.writeln('enum $name {')
+ f.writeln('enum ${name} {')
f.comments(node.comments, inline: true, level: .indent)
for field in node.fields {
- f.write('\t$field.name')
+ f.write('\t${field.name}')
if field.has_expr {
f.write(' = ')
f.expr(field.expr)
@@ -1044,7 +1044,7 @@ fn (mut f Fmt) fn_body(node ast.FnDecl) {
pub fn (mut f Fmt) for_c_stmt(node ast.ForCStmt) {
if node.label.len > 0 {
- f.write('$node.label: ')
+ f.write('${node.label}: ')
}
if node.comments.len > 0 {
f.comments(node.comments)
@@ -1070,7 +1070,7 @@ pub fn (mut f Fmt) for_c_stmt(node ast.ForCStmt) {
pub fn (mut f Fmt) for_in_stmt(node ast.ForInStmt) {
if node.label.len > 0 {
- f.write('$node.label: ')
+ f.write('${node.label}: ')
}
if node.comments.len > 0 {
f.comments(node.comments)
@@ -1104,7 +1104,7 @@ pub fn (mut f Fmt) for_in_stmt(node ast.ForInStmt) {
pub fn (mut f Fmt) for_stmt(node ast.ForStmt) {
if node.label.len > 0 {
- f.write('$node.label: ')
+ f.write('${node.label}: ')
}
if node.comments.len > 0 {
f.comments(node.comments)
@@ -1148,7 +1148,7 @@ pub fn (mut f Fmt) global_decl(node ast.GlobalDecl) {
if field.is_volatile {
f.write('volatile ')
}
- f.write('$field.name ')
+ f.write('${field.name} ')
f.write(strings.repeat(` `, max - field.name.len))
if field.has_expr {
f.write('= ')
@@ -1176,15 +1176,15 @@ pub fn (mut f Fmt) go_expr(node ast.GoExpr) {
}
pub fn (mut f Fmt) goto_label(node ast.GotoLabel) {
- f.writeln('$node.name:')
+ f.writeln('${node.name}:')
}
pub fn (mut f Fmt) goto_stmt(node ast.GotoStmt) {
- f.writeln('goto $node.name')
+ f.writeln('goto ${node.name}')
}
pub fn (mut f Fmt) hash_stmt(node ast.HashStmt) {
- f.writeln('#$node.val')
+ f.writeln('#${node.val}')
}
pub fn (mut f Fmt) interface_decl(node ast.InterfaceDecl) {
@@ -1203,7 +1203,7 @@ pub fn (mut f Fmt) interface_decl(node ast.InterfaceDecl) {
}
f.comments_before_field(node.pre_comments)
for embed in node.embeds {
- f.write('\t$embed.name')
+ f.write('\t${embed.name}')
f.comments(embed.comments, inline: true, has_nl: false, level: .indent)
f.writeln('')
}
@@ -1245,7 +1245,7 @@ pub fn (mut f Fmt) interface_field(field ast.StructField) {
before_comments := field.comments.filter(it.pos.pos < field.pos.pos)
between_comments := field.comments[before_comments.len..].filter(it.pos.pos < end_pos)
after_type_comments := field.comments[(before_comments.len + between_comments.len)..]
- f.write('\t$field.name $ft')
+ f.write('\t${field.name} ${ft}')
if after_type_comments.len > 0 {
f.comments(after_type_comments, level: .indent)
} else {
@@ -1272,7 +1272,7 @@ pub fn (mut f Fmt) module_stmt(mod ast.Module) {
return
}
f.attrs(mod.attrs)
- f.writeln('module $mod.short_name\n')
+ f.writeln('module ${mod.short_name}\n')
if f.import_pos == 0 {
f.import_pos = f.out.len
}
@@ -1322,12 +1322,12 @@ pub fn (mut f Fmt) sql_stmt_line(node ast.SqlStmtLine) {
f.write('\t')
match node.kind {
.insert {
- f.writeln('insert $node.object_var_name into $table_name')
+ f.writeln('insert ${node.object_var_name} into ${table_name}')
}
.update {
- f.write('update $table_name set ')
+ f.write('update ${table_name} set ')
for i, col in node.updated_columns {
- f.write('$col = ')
+ f.write('${col} = ')
f.expr(node.update_exprs[i])
if i < node.updated_columns.len - 1 {
f.write(', ')
@@ -1341,15 +1341,15 @@ pub fn (mut f Fmt) sql_stmt_line(node ast.SqlStmtLine) {
f.writeln('')
}
.delete {
- f.write('delete from $table_name where ')
+ f.write('delete from ${table_name} where ')
f.expr(node.where_expr)
f.writeln('')
}
.create {
- f.writeln('create table $table_name')
+ f.writeln('create table ${table_name}')
}
.drop {
- f.writeln('drop table $table_name')
+ f.writeln('drop table ${table_name}')
}
}
}
@@ -1368,7 +1368,7 @@ pub fn (mut f Fmt) alias_type_decl(node ast.AliasTypeDecl) {
f.write('pub ')
}
ptype := f.table.type_to_str_using_aliases(node.parent_type, f.mod2alias)
- f.write('type $node.name = $ptype')
+ f.write('type ${node.name} = ${ptype}')
f.comments(node.comments, has_nl: false)
f.mark_types_import_as_used(node.parent_type)
@@ -1383,7 +1383,7 @@ pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
fn_typ_info := typ_sym.info as ast.FnType
fn_info := fn_typ_info.func
fn_name := f.no_cur_mod(node.name)
- f.write('type $fn_name = fn (')
+ f.write('type ${fn_name} = fn (')
for i, arg in fn_info.params {
if arg.is_mut {
f.write(arg.typ.share().str() + ' ')
@@ -1417,7 +1417,7 @@ pub fn (mut f Fmt) fn_type_decl(node ast.FnTypeDecl) {
f.mark_types_import_as_used(fn_info.return_type)
ret_str := f.no_cur_mod(f.table.type_to_str_using_aliases(fn_info.return_type,
f.mod2alias))
- f.write(' $ret_str')
+ f.write(' ${ret_str}')
} else if fn_info.return_type.has_flag(.optional) {
f.write(' ?')
} else if fn_info.return_type.has_flag(.result) {
@@ -1434,7 +1434,7 @@ pub fn (mut f Fmt) sum_type_decl(node ast.SumTypeDecl) {
if node.is_pub {
f.write('pub ')
}
- f.write('type $node.name')
+ f.write('type ${node.name}')
f.write_generic_types(node.generic_types)
f.write(' = ')
@@ -1698,15 +1698,15 @@ pub fn (mut f Fmt) as_cast(node ast.AsCast) {
f.mark_types_import_as_used(node.typ)
type_str := f.table.type_to_str_using_aliases(node.typ, f.mod2alias)
f.expr(node.expr)
- f.write(' as $type_str')
+ f.write(' as ${type_str}')
}
pub fn (mut f Fmt) assoc(node ast.Assoc) {
f.writeln('{')
f.indent++
- f.writeln('...$node.var_name')
+ f.writeln('...${node.var_name}')
for i, field in node.fields {
- f.write('$field: ')
+ f.write('${field}: ')
f.expr(node.exprs[i])
f.writeln('')
}
@@ -1758,7 +1758,7 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
} else {
mut name := f.short_module(node.name)
f.mark_import_as_used(name)
- f.write('$name')
+ f.write('${name}')
}
}
if node.mod == '' && node.name == '' {
@@ -1877,21 +1877,21 @@ pub fn (mut f Fmt) comptime_call(node ast.ComptimeCall) {
if node.method_name == 'html' {
f.write('\$vweb.html()')
} else {
- f.write("\$tmpl('$node.args_var')")
+ f.write("\$tmpl('${node.args_var}')")
}
} else {
if node.is_embed {
if node.embed_file.compression_type == 'none' {
f.write('\$embed_file(${node.args[0].expr})')
} else {
- f.write('\$embed_file(${node.args[0].expr}, .$node.embed_file.compression_type)')
+ f.write('\$embed_file(${node.args[0].expr}, .${node.embed_file.compression_type})')
}
} else if node.is_env {
- f.write("\$env('$node.args_var')")
+ f.write("\$env('${node.args_var}')")
} else if node.is_pkgconfig {
- f.write("\$pkgconfig('$node.args_var')")
+ f.write("\$pkgconfig('${node.args_var}')")
} else if node.method_name == 'compile_error' {
- f.write("\$compile_error('$node.args_var')")
+ f.write("\$compile_error('${node.args_var}')")
} else {
inner_args := if node.args_var != '' {
node.args_var
@@ -1899,19 +1899,19 @@ pub fn (mut f Fmt) comptime_call(node ast.ComptimeCall) {
node.args.map(it.str()).join(', ')
}
method_expr := if node.has_parens {
- '(${node.method_name}($inner_args))'
+ '(${node.method_name}(${inner_args}))'
} else {
- '${node.method_name}($inner_args)'
+ '${node.method_name}(${inner_args})'
}
f.expr(node.left)
- f.write('.$$method_expr')
+ f.write('.$${method_expr}')
}
}
}
pub fn (mut f Fmt) comptime_selector(node ast.ComptimeSelector) {
f.expr(node.left)
- f.write('.\$($node.field_expr)')
+ f.write('.\$(${node.field_expr})')
}
pub fn (mut f Fmt) concat_expr(node ast.ConcatExpr) {
@@ -2125,7 +2125,7 @@ pub fn (mut f Fmt) infix_expr(node ast.InfixExpr) {
} else if is_and {
f.write(' && ')
} else {
- f.write(' $node.op.str() ')
+ f.write(' ${node.op.str()} ')
}
if is_one_val_array_init {
// `var in [val]` => `var == val`
@@ -2171,20 +2171,20 @@ fn split_up_infix(infix_str string, ignore_paren bool, is_cond_infix bool) ([]st
for p in parts {
if is_cond_infix && p in ['&&', '||'] {
if inside_paren {
- conditions[ind] += '$p '
+ conditions[ind] += '${p} '
} else {
pen := if p == '||' { or_pen } else { 5 }
penalties << pen
- conditions << '$p '
+ conditions << '${p} '
ind++
}
} else if !is_cond_infix && p == '+' {
penalties << 5
- conditions[ind] += '$p '
+ conditions[ind] += '${p} '
conditions << ''
ind++
} else {
- conditions[ind] += '$p '
+ conditions[ind] += '${p} '
if ignore_paren {
continue
}
@@ -2426,7 +2426,7 @@ pub fn (mut f Fmt) match_expr(node ast.MatchExpr) {
}
pub fn (mut f Fmt) offset_of(node ast.OffsetOf) {
- f.write('__offsetof(${f.table.type_to_str_using_aliases(node.struct_type, f.mod2alias)}, $node.field)')
+ f.write('__offsetof(${f.table.type_to_str_using_aliases(node.struct_type, f.mod2alias)}, ${node.field})')
f.mark_types_import_as_used(node.struct_type)
}
@@ -2445,7 +2445,7 @@ pub fn (mut f Fmt) or_expr(node ast.OrExpr) {
// the control stmts (return/break/continue...) print a newline inside them,
// so, since this'll all be on one line, trim any possible whitespace
str := f.node_str(node.stmts[0]).trim_space()
- single_line := ' or { $str }'
+ single_line := ' or { ${str} }'
if single_line.len + f.line_len <= fmt.max_len.last() {
f.write(single_line)
return
@@ -2489,7 +2489,7 @@ pub fn (mut f Fmt) postfix_expr(node ast.PostfixExpr) {
if node.op == .question {
f.write(' ?')
} else {
- f.write('$node.op')
+ f.write('${node.op}')
}
if node.is_c2v_prefix {
f.write('$')
@@ -2611,7 +2611,7 @@ pub fn (mut f Fmt) sql_expr(node ast.SqlExpr) {
}
}
}
- f.write('from $table_name')
+ f.write('from ${table_name}')
if node.has_where {
f.write(' where ')
f.expr(node.where_expr)
@@ -2644,11 +2644,11 @@ pub fn (mut f Fmt) char_literal(node ast.CharLiteral) {
if node.val.len == 1 {
clit := node.val[0]
if clit < 32 || clit > 127 || clit == 92 || clit == 96 {
- f.write('`\\x$clit.hex()`')
+ f.write('`\\x${clit.hex()}`')
return
}
}
- f.write('`$node.val`')
+ f.write('`${node.val}`')
}
pub fn (mut f Fmt) string_literal(node ast.StringLiteral) {
@@ -2659,16 +2659,16 @@ pub fn (mut f Fmt) string_literal(node ast.StringLiteral) {
f.write('c')
}
if node.is_raw {
- f.write('$quote$node.val$quote')
+ f.write('${quote}${node.val}${quote}')
} else {
- unescaped_val := node.val.replace('$fmt.bs$fmt.bs', '\x01').replace_each([
- "$fmt.bs'",
+ unescaped_val := node.val.replace('${fmt.bs}${fmt.bs}', '\x01').replace_each([
+ "${fmt.bs}'",
"'",
- '$fmt.bs"',
+ '${fmt.bs}"',
'"',
])
- s := unescaped_val.replace_each(['\x01', '$fmt.bs$fmt.bs', quote, '$fmt.bs$quote'])
- f.write('$quote$s$quote')
+ s := unescaped_val.replace_each(['\x01', '${fmt.bs}${fmt.bs}', quote, '${fmt.bs}${quote}'])
+ f.write('${quote}${s}${quote}')
}
}
@@ -2695,14 +2695,14 @@ pub fn (mut f Fmt) string_inter_literal(node ast.StringInterLiteral) {
// work too different for the various exprs that are interpolated
f.write(quote)
for i, val in node.vals {
- unescaped_val := val.replace('$fmt.bs$fmt.bs', '\x01').replace_each([
- "$fmt.bs'",
+ unescaped_val := val.replace('${fmt.bs}${fmt.bs}', '\x01').replace_each([
+ "${fmt.bs}'",
"'",
- '$fmt.bs"',
+ '${fmt.bs}"',
'"',
])
- s := unescaped_val.replace_each(['\x01', '$fmt.bs$fmt.bs', quote, '$fmt.bs$quote'])
- f.write('$s')
+ s := unescaped_val.replace_each(['\x01', '${fmt.bs}${fmt.bs}', quote, '${fmt.bs}${quote}'])
+ f.write('${s}')
if i >= node.exprs.len {
break
}
@@ -2714,7 +2714,9 @@ pub fn (mut f Fmt) string_inter_literal(node ast.StringInterLiteral) {
f.write(fspec_str)
f.write('}')
} else {
+ f.write('{')
f.expr(node.exprs[i])
+ f.write('}')
}
}
f.write(quote)
@@ -2753,6 +2755,6 @@ pub fn (mut f Fmt) unsafe_expr(node ast.UnsafeExpr) {
fn (mut f Fmt) trace(fbase string, message string) {
if f.file.path_base == fbase {
- println('> f.trace | ${fbase:-10s} | $message')
+ println('> f.trace | ${fbase:-10s} | ${message}')
}
}
diff --git a/vlib/v/fmt/fmt_keep_test.v b/vlib/v/fmt/fmt_keep_test.v
index 56f0fd4c53..5f0787e4a8 100644
--- a/vlib/v/fmt/fmt_keep_test.v
+++ b/vlib/v/fmt/fmt_keep_test.v
@@ -50,7 +50,7 @@ fn test_fmt() {
opath := ipath
expected_ocontent := os.read_file(opath) or {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('cannot read from $vrelpath'))
+ eprintln(fmt_bench.step_message_fail('cannot read from ${vrelpath}'))
continue
}
table := ast.new_table()
@@ -58,7 +58,7 @@ fn test_fmt() {
result_ocontent := fmt.fmt(file_ast, table, fpref, false)
if expected_ocontent != result_ocontent {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('file $vrelpath after formatting, does not look as expected.'))
+ eprintln(fmt_bench.step_message_fail('file ${vrelpath} after formatting, does not look as expected.'))
if ipath.ends_with(b2v_keep_path) {
continue
}
@@ -66,7 +66,7 @@ fn test_fmt() {
eprintln('>> sorry, but no working "diff" CLI command can be found')
continue
}
- vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
+ vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}')
os.write_file(vfmt_result_file, result_ocontent) or { panic(err) }
eprintln(diff.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue
@@ -75,7 +75,7 @@ fn test_fmt() {
eprintln(fmt_bench.step_message_ok(vrelpath))
}
restore_bin2v_placeholder() or {
- eprintln('failed restoring vbin2v_keep.vv placeholder: $err.msg()')
+ eprintln('failed restoring vbin2v_keep.vv placeholder: ${err.msg()}')
}
fmt_bench.stop()
eprintln(term.h_divider('-'))
@@ -90,7 +90,7 @@ fn prepare_bin2v_file(mut fmt_bench benchmark.Benchmark) {
fmt_bench.step()
write_bin2v_keep_content() or {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('Failed preparing bin2v_keep.vv: $err.msg()'))
+ eprintln(fmt_bench.step_message_fail('Failed preparing bin2v_keep.vv: ${err.msg()}'))
return
}
fmt_bench.ok()
diff --git a/vlib/v/fmt/fmt_test.v b/vlib/v/fmt/fmt_test.v
index 88e71c065a..d7c319921d 100644
--- a/vlib/v/fmt/fmt_test.v
+++ b/vlib/v/fmt/fmt_test.v
@@ -31,7 +31,7 @@ fn test_fmt() {
diff_cmd := diff.find_working_diff_command() or { '' }
mut fmt_bench := benchmark.new_benchmark()
// Lookup the existing test _input.vv files:
- input_files := os.walk_ext('$vroot/vlib/v/fmt/tests', '_input.vv')
+ input_files := os.walk_ext('${vroot}/vlib/v/fmt/tests', '_input.vv')
fmt_bench.set_total_expected_steps(input_files.len)
for istep, ipath in input_files {
fmt_bench.cstep = istep
@@ -40,12 +40,12 @@ fn test_fmt() {
opath := ipath.replace('_input.vv', '_expected.vv')
if !os.exists(opath) {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('missing file $opath'))
+ eprintln(fmt_bench.step_message_fail('missing file ${opath}'))
continue
}
expected_ocontent := os.read_file(opath) or {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('cannot read from $opath'))
+ eprintln(fmt_bench.step_message_fail('cannot read from ${opath}'))
continue
}
table := ast.new_table()
@@ -53,18 +53,18 @@ fn test_fmt() {
result_ocontent := fmt.fmt(file_ast, table, fpref, false)
if expected_ocontent != result_ocontent {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('file $ipath after formatting, does not look as expected.'))
+ eprintln(fmt_bench.step_message_fail('file ${ipath} after formatting, does not look as expected.'))
if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found')
continue
}
- vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
+ vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}')
os.write_file(vfmt_result_file, result_ocontent) or { panic(err) }
eprintln(diff.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue
}
fmt_bench.ok()
- eprintln(fmt_bench.step_message_ok('$ipath'))
+ eprintln(fmt_bench.step_message_ok('${ipath}'))
}
fmt_bench.stop()
eprintln(term.h_divider('-'))
diff --git a/vlib/v/fmt/fmt_vlib_test.v b/vlib/v/fmt/fmt_vlib_test.v
index bbcc631cf9..315c020b50 100644
--- a/vlib/v/fmt/fmt_vlib_test.v
+++ b/vlib/v/fmt/fmt_vlib_test.v
@@ -43,7 +43,7 @@ fn test_vlib_fmt() {
opath := ipath
expected_ocontent := os.read_file(opath) or {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('cannot read from $opath'))
+ eprintln(fmt_bench.step_message_fail('cannot read from ${opath}'))
continue
}
table := ast.new_table()
@@ -51,18 +51,18 @@ fn test_vlib_fmt() {
result_ocontent := fmt.fmt(file_ast, table, fpref, false)
if expected_ocontent != result_ocontent {
fmt_bench.fail()
- eprintln(fmt_bench.step_message_fail('file $ipath after formatting, does not look as expected.'))
+ eprintln(fmt_bench.step_message_fail('file ${ipath} after formatting, does not look as expected.'))
if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found')
continue
}
- vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
+ vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}')
os.write_file(vfmt_result_file, result_ocontent) or { panic(err) }
eprintln(diff.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue
}
fmt_bench.ok()
- eprintln(fmt_bench.step_message_ok('$ipath'))
+ eprintln(fmt_bench.step_message_ok('${ipath}'))
}
fmt_bench.stop()
eprintln(term.h_divider('-'))
diff --git a/vlib/v/fmt/struct.v b/vlib/v/fmt/struct.v
index 7b63c8af4e..acbd995447 100644
--- a/vlib/v/fmt/struct.v
+++ b/vlib/v/fmt/struct.v
@@ -67,9 +67,9 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
f.comments_before_field(pre_comments)
if comments.len == 0 {
- f.writeln('\t$styp')
+ f.writeln('\t${styp}')
} else {
- f.write('\t$styp')
+ f.write('\t${styp}')
f.comments(comments, level: .indent)
}
}
@@ -120,7 +120,7 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl, is_anon bool) {
// Handle comments before the field
f.comments_before_field(before_comments)
volatile_prefix := if field.is_volatile { 'volatile ' } else { '' }
- f.write('\t$volatile_prefix$field.name ')
+ f.write('\t${volatile_prefix}${field.name} ')
// Handle comments between field name and type
before_len := f.line_len
f.comments(between_comments, iembed: true, has_nl: false)
@@ -223,16 +223,16 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
if node.fields.len == 0 && !node.has_update_expr {
// `Foo{}` on one line if there are no fields or comments
if node.pre_comments.len == 0 {
- f.write('$name{}')
+ f.write('${name}{}')
} else {
- f.writeln('$name{')
+ f.writeln('${name}{')
f.comments(node.pre_comments, inline: true, has_nl: true, level: .indent)
f.write('}')
}
f.mark_import_as_used(name)
} else if node.no_keys {
// `Foo{1,2,3}` (short syntax, no keys)
- f.write('$name{')
+ f.write('${name}{')
f.mark_import_as_used(name)
if node.has_update_expr {
f.write('...')
@@ -255,7 +255,7 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
single_line_fields = false
}
if !use_short_args || node.is_anon {
- f.write('$name{')
+ f.write('${name}{')
f.mark_import_as_used(name)
if single_line_fields {
f.write(' ')
@@ -289,7 +289,7 @@ pub fn (mut f Fmt) struct_init(node ast.StructInit) {
f.comments(node.update_expr_comments, inline: true, has_nl: true, level: .keep)
}
for i, field in node.fields {
- f.write('$field.name: ')
+ f.write('${field.name}: ')
f.expr(field.expr)
f.comments(field.comments, inline: true, has_nl: false, level: .indent)
if single_line_fields {
diff --git a/vlib/v/fmt/tests/anon_fn_expected.vv b/vlib/v/fmt/tests/anon_fn_expected.vv
index 35641ee8ea..a72956ae5b 100644
--- a/vlib/v/fmt/tests/anon_fn_expected.vv
+++ b/vlib/v/fmt/tests/anon_fn_expected.vv
@@ -3,17 +3,17 @@ fn has_anon_fn() {
println('Hello there !')
}
an_fn_w_param := fn (s string) {
- println('I received $s')
+ println('I received ${s}')
}
an_fn_w_multi_params := fn (s1 string, s2 string, s3 string) {
- println('I received $s1, $s2, $s3')
+ println('I received ${s1}, ${s2}, ${s3}')
}
an_fn_w_multi_params2 := fn (s string, i int) {
- println('I received $s, $i')
+ println('I received ${s}, ${i}')
}
fn_w_var_args := fn (ss ...string) {
for s in ss {
- println('yo $s')
+ println('yo ${s}')
}
}
an_fn()
diff --git a/vlib/v/fmt/tests/assert_extra_message_keep.vv b/vlib/v/fmt/tests/assert_extra_message_keep.vv
index 2e3755f2e2..815904acd5 100644
--- a/vlib/v/fmt/tests/assert_extra_message_keep.vv
+++ b/vlib/v/fmt/tests/assert_extra_message_keep.vv
@@ -2,5 +2,5 @@ fn main() {
i := 123
assert 4 == 2 * 2
assert 2 == 6 / 3, 'math works'
- assert 10 == i - 120, 'i: $i'
+ assert 10 == i - 120, 'i: ${i}'
}
diff --git a/vlib/v/fmt/tests/chan_init_keep.vv b/vlib/v/fmt/tests/chan_init_keep.vv
index 47eaac6b5a..b7ef78cd9a 100644
--- a/vlib/v/fmt/tests/chan_init_keep.vv
+++ b/vlib/v/fmt/tests/chan_init_keep.vv
@@ -22,6 +22,6 @@ fn produce_events(ch chan FSMEvent) {
fn main() {
ch_fsm_events := chan FSMEvent{cap: 1000}
- eprintln('ch_fsm_events.len: $ch_fsm_events.len')
- eprintln('ch_fsm_events.cap: $ch_fsm_events.cap')
+ eprintln('ch_fsm_events.len: ${ch_fsm_events.len}')
+ eprintln('ch_fsm_events.cap: ${ch_fsm_events.cap}')
}
diff --git a/vlib/v/fmt/tests/chan_or_keep.vv b/vlib/v/fmt/tests/chan_or_keep.vv
index 4d12505bab..70ca21ab48 100644
--- a/vlib/v/fmt/tests/chan_or_keep.vv
+++ b/vlib/v/fmt/tests/chan_or_keep.vv
@@ -5,7 +5,7 @@ fn f(ch chan int) {
for _ in 0 .. n {
s += <-ch or {
println('Something went wrong:')
- println('got $err')
+ println('got ${err}')
}
}
assert s == n * (n + 1) / 2
diff --git a/vlib/v/fmt/tests/comptime_keep.vv b/vlib/v/fmt/tests/comptime_keep.vv
index da5aa7884d..96206dddb9 100644
--- a/vlib/v/fmt/tests/comptime_keep.vv
+++ b/vlib/v/fmt/tests/comptime_keep.vv
@@ -17,14 +17,14 @@ pub mut:
fn comptime_for() {
println(@FN)
$for method in App.methods {
- println(' method: $method.name | $method')
+ println(' method: ${method.name} | ${method}')
}
}
fn comptime_for_with_if() {
println(@FN)
$for method in App.methods {
- println(' method: $method')
+ println(' method: ${method}')
$if method.typ is fn () {
assert method.name in ['run', 'method2']
}
@@ -40,7 +40,7 @@ fn comptime_for_with_if() {
fn comptime_for_fields() {
println(@FN)
$for field in App.fields {
- println(' field: $field.name | $field')
+ println(' field: ${field.name} | ${field}')
$if field.typ is string {
assert field.name in ['a', 'b', 'g']
}
@@ -62,7 +62,7 @@ fn comptime_for_fields() {
struct Result {}
fn (mut a App) my_method(p string) Result {
- println('>>>> ${@FN} | p: $p')
+ println('>>>> ${@FN} | p: ${p}')
return Result{}
}
diff --git a/vlib/v/fmt/tests/empty_lines_keep.vv b/vlib/v/fmt/tests/empty_lines_keep.vv
index a99b63bfba..f0272789a7 100644
--- a/vlib/v/fmt/tests/empty_lines_keep.vv
+++ b/vlib/v/fmt/tests/empty_lines_keep.vv
@@ -21,7 +21,7 @@ const (
// This comment doesn't really belong anywhere
_ = 'A multiline string with a StringInterLiteral...
- $foo
+ ${foo}
...and the ending brace on a newline had a wrong pos.last_line.
'
_ = 4
@@ -85,9 +85,9 @@ bar'
typ: params[0].typ.set_nr_muls(1)
}
env_value = environ()[env_lit] or {
- return error('the environment variable "$env_lit" does not exist.')
+ return error('the environment variable "${env_lit}" does not exist.')
}
- assert '$mr_one_two()' == "(One{
+ assert '${mr_one_two()}' == "(One{
value: 'one'
}, Two{
value: 'two'
diff --git a/vlib/v/fmt/tests/expressions_expected.vv b/vlib/v/fmt/tests/expressions_expected.vv
index 1498e2e88f..ad8cffd239 100644
--- a/vlib/v/fmt/tests/expressions_expected.vv
+++ b/vlib/v/fmt/tests/expressions_expected.vv
@@ -48,7 +48,7 @@ fn main() {
}
v_str := 'v'
s := []string{}
- s << ' `$v_str`'
+ s << ' `${v_str}`'
println(s)
println('this is quite a long string' +
' that is followd by an even longer part that should go to another line')
@@ -60,7 +60,7 @@ fn main() {
fn gen_str_for_multi_return(mut g gen.Gen, info table.MultiReturn, styp string, str_fn_name string) {
for i, _ in info.types {
- println('\tstrings__Builder_write(&sb, _STR("\'%.*s\\000\'", 2, a.arg$i));')
+ println('\tstrings__Builder_write(&sb, _STR("\'%.*s\\000\'", 2, a.arg${i}));')
}
}
diff --git a/vlib/v/fmt/tests/fn_trailing_arg_syntax_keep.vv b/vlib/v/fmt/tests/fn_trailing_arg_syntax_keep.vv
index dc76db48ce..97113a42b4 100644
--- a/vlib/v/fmt/tests/fn_trailing_arg_syntax_keep.vv
+++ b/vlib/v/fmt/tests/fn_trailing_arg_syntax_keep.vv
@@ -52,5 +52,5 @@ fn main() {
}
fn trailing_struct_with_update_expr() {
- c.error('duplicate const `$field.name`', Position{ ...field.pos, len: name_len })
+ c.error('duplicate const `${field.name}`', Position{ ...field.pos, len: name_len })
}
diff --git a/vlib/v/fmt/tests/integer_literal_keep.vv b/vlib/v/fmt/tests/integer_literal_keep.vv
index f26d917084..105bd71dc6 100644
--- a/vlib/v/fmt/tests/integer_literal_keep.vv
+++ b/vlib/v/fmt/tests/integer_literal_keep.vv
@@ -13,7 +13,7 @@ fn main() {
x := 0xdead_beef
u := 9_978_654_321
o := 0o66_4
- eprintln(' hex constant in decimal: $x')
- eprintln(' u constant in decimal: $u')
- eprintln('octal constant in decimal: $o')
+ eprintln(' hex constant in decimal: ${x}')
+ eprintln(' u constant in decimal: ${u}')
+ eprintln('octal constant in decimal: ${o}')
}
diff --git a/vlib/v/fmt/tests/interface_method_with_fntype_arg_keep.vv b/vlib/v/fmt/tests/interface_method_with_fntype_arg_keep.vv
index 7ac1abe2d9..62bbd8b611 100644
--- a/vlib/v/fmt/tests/interface_method_with_fntype_arg_keep.vv
+++ b/vlib/v/fmt/tests/interface_method_with_fntype_arg_keep.vv
@@ -15,9 +15,9 @@ fn main() {
t := Test1{}
t.test(fn [t] (t1 Test) {
- println('$t, $t1')
+ println('${t}, ${t1}')
t.test(fn [t] (t2 Test) {
- println('$t, $t2')
+ println('${t}, ${t2}')
})
})
}
diff --git a/vlib/v/fmt/tests/or_keep.vv b/vlib/v/fmt/tests/or_keep.vv
index a1d1f65d6b..ca2f26ba6f 100644
--- a/vlib/v/fmt/tests/or_keep.vv
+++ b/vlib/v/fmt/tests/or_keep.vv
@@ -17,9 +17,9 @@ fn (f Foo) method_with_or() int {
fn unwrapped_single_line_if() {
namefound := publisher.name_fix_check(name_to_find, state.site.id, ispage) or {
if err.contains('Could not find') {
- state.error('cannot find link: $name_to_find')
+ state.error('cannot find link: ${name_to_find}')
} else {
- state.error('cannot find link: $name_to_find\n$err')
+ state.error('cannot find link: ${name_to_find}\n${err}')
}
println('Another stmt')
}
diff --git a/vlib/v/fmt/tests/orm_keep.vv b/vlib/v/fmt/tests/orm_keep.vv
index 316f55d3c2..c4f30c9489 100644
--- a/vlib/v/fmt/tests/orm_keep.vv
+++ b/vlib/v/fmt/tests/orm_keep.vv
@@ -20,7 +20,7 @@ fn main() {
nr_customers := sql db {
select count from Customer
}
- println('number of all customers: $nr_customers')
+ println('number of all customers: ${nr_customers}')
// V syntax can be used to build queries
// db.select returns an array
uk_customers := sql db {
@@ -28,7 +28,7 @@ fn main() {
}
println(uk_customers.len)
for customer in uk_customers {
- println('$customer.id - $customer.name')
+ println('${customer.id} - ${customer.name}')
}
// by adding `limit 1` we tell V that there will be only one object
customer := sql db {
@@ -40,7 +40,7 @@ fn main() {
second_best := sql db {
select from UCustomerser order by nr_orders desc limit 1 offset 1
}
- println('$customer.id - $customer.name')
+ println('${customer.id} - ${customer.name}')
// insert a new customer
new_customer := Customer{
name: 'Bob'
diff --git a/vlib/v/fmt/tests/orm_or_keep.vv b/vlib/v/fmt/tests/orm_or_keep.vv
index a0f60c3f03..0e2aa47911 100644
--- a/vlib/v/fmt/tests/orm_or_keep.vv
+++ b/vlib/v/fmt/tests/orm_or_keep.vv
@@ -13,7 +13,7 @@ fn main() {
sql db {
insert user into User
} or {
- println('user should have been inserted, but could not, err: $err')
+ println('user should have been inserted, but could not, err: ${err}')
exit(1)
}
}
diff --git a/vlib/v/fmt/tests/star__amp_int__cast_keep.vv b/vlib/v/fmt/tests/star__amp_int__cast_keep.vv
index 73312cd5e1..f2984513d5 100644
--- a/vlib/v/fmt/tests/star__amp_int__cast_keep.vv
+++ b/vlib/v/fmt/tests/star__amp_int__cast_keep.vv
@@ -1,5 +1,5 @@
fn main() {
body := [1, 2, 3]
size := *&int(body.data)
- eprintln('size: $size')
+ eprintln('size: ${size}')
}
diff --git a/vlib/v/fmt/tests/string_interpolation_complex_keep.vv b/vlib/v/fmt/tests/string_interpolation_complex_keep.vv
index 470ee54141..b7912fffa6 100644
--- a/vlib/v/fmt/tests/string_interpolation_complex_keep.vv
+++ b/vlib/v/fmt/tests/string_interpolation_complex_keep.vv
@@ -5,5 +5,5 @@ struct Container {
container := Container{}
docker_pubkey := '1234657890'
-cmd := "docker exec $container.id sh -c 'echo \"$docker_pubkey\" >> ~/.ssh/authorized_keys'"
+cmd := "docker exec ${container.id} sh -c 'echo \"${docker_pubkey}\" >> ~/.ssh/authorized_keys'"
println(cmd)
diff --git a/vlib/v/fmt/tests/string_interpolation_expected.vv b/vlib/v/fmt/tests/string_interpolation_expected.vv
index 4b0944b11a..f5b88ef556 100644
--- a/vlib/v/fmt/tests/string_interpolation_expected.vv
+++ b/vlib/v/fmt/tests/string_interpolation_expected.vv
@@ -23,10 +23,10 @@ fn main() {
ar := Cc{[Aa{3}, Aa{-4}, Aa{12}]}
aa := Aa{-13}
z := -14.75
- println('$st.a.xy ${ar.a[2].xy} $aa.xy $z')
- println('$st.a.xy${ar.a[2].xy}$aa.xy$z')
+ println('${st.a.xy} ${ar.a[2].xy} ${aa.xy} ${z}')
+ println('${st.a.xy}${ar.a[2].xy}${aa.xy}${z}')
println('${st.a.xy}ya ${ar.a[2].xy}X2 ${aa.xy}.b ${z}3')
println('${z:-5} ${z:+5.3} ${z:+09.3f} ${z:-7.2} ${z:+09} ${z:08.3f}')
- println('$ar.f() ${ar.g(1, 2)} ${ar.a}() ${z}(')
+ println('${ar.f()} ${ar.g(1, 2)} ${ar.a}() ${z}(')
println('${z > 12.3 * z - 3} ${@VEXE} ${4 * 5}')
}
diff --git a/vlib/v/fmt/tests/string_interpolation_keep.vv b/vlib/v/fmt/tests/string_interpolation_keep.vv
index 62c82a1814..242e240acc 100644
--- a/vlib/v/fmt/tests/string_interpolation_keep.vv
+++ b/vlib/v/fmt/tests/string_interpolation_keep.vv
@@ -1,17 +1,17 @@
import os
fn main() {
- println('Hello world, args: $os.args')
+ println('Hello world, args: ${os.args}')
i := 123
a := 'abc'
b := 'xyz'
- e := 'a: $a b: $b i: $i'
+ e := 'a: ${a} b: ${b} i: ${i}'
d := 'a: ${a:5s} b: ${b:-5s} i: ${i:20d}'
f := 'a byte string'.bytes()
- println('a: $a $b xxx')
- eprintln('e: $e')
+ println('a: ${a} ${b} xxx')
+ eprintln('e: ${e}')
_ = ' ${foo.method(bar).str()} '
- println('(${some_struct.@type}, $some_struct.y)')
+ println('(${some_struct.@type}, ${some_struct.y})')
_ := 'CastExpr ${int(d.e).str()}'
println('${f[0..4].bytestr()}')
_ := '${generic_fn()}'
diff --git a/vlib/v/fmt/tests/struct_init_with_custom_len_keep.vv b/vlib/v/fmt/tests/struct_init_with_custom_len_keep.vv
index 57d916d8e1..9375a32926 100644
--- a/vlib/v/fmt/tests/struct_init_with_custom_len_keep.vv
+++ b/vlib/v/fmt/tests/struct_init_with_custom_len_keep.vv
@@ -13,5 +13,5 @@ fn main() {
f := &Foo{
a: []int{len: int(size)}
}
- println('f.a: $f.a')
+ println('f.a: ${f.a}')
}
diff --git a/vlib/v/fmt/tests/to_string_2_forms_keep.vv b/vlib/v/fmt/tests/to_string_2_forms_keep.vv
index adb49bd3b0..c3ab72cc20 100644
--- a/vlib/v/fmt/tests/to_string_2_forms_keep.vv
+++ b/vlib/v/fmt/tests/to_string_2_forms_keep.vv
@@ -27,7 +27,7 @@ fn main() {
assert 'abc' == def()
abc_str1 := ptr_str(abc().str)
abc_str2 := ptr_str(abc().str)
- println('abc_str1: $abc_str1')
- println('abc_str2: $abc_str2')
+ println('abc_str1: ${abc_str1}')
+ println('abc_str2: ${abc_str2}')
assert abc_str1 != abc_str2
}
diff --git a/vlib/v/fmt/tests/vargs_reference_param_keep.vv b/vlib/v/fmt/tests/vargs_reference_param_keep.vv
index cef84b368a..8053738471 100644
--- a/vlib/v/fmt/tests/vargs_reference_param_keep.vv
+++ b/vlib/v/fmt/tests/vargs_reference_param_keep.vv
@@ -23,5 +23,5 @@ fn main() {
foo22 := arr_stuff([foo1, foo2])
println(foo22)
- assert '$foo11' == '$foo22'
+ assert '${foo11}' == '${foo22}'
}
diff --git a/vlib/v/fmt/tests/void_optional_keep.vv b/vlib/v/fmt/tests/void_optional_keep.vv
index 97ed7db7aa..60bc5f58f0 100644
--- a/vlib/v/fmt/tests/void_optional_keep.vv
+++ b/vlib/v/fmt/tests/void_optional_keep.vv
@@ -3,5 +3,5 @@ fn tt() ? {
}
fn main() {
- tt() or { panic('$err') }
+ tt() or { panic('${err}') }
}
diff --git a/vlib/v/gen/c/array.v b/vlib/v/gen/c/array.v
index 76a85fd518..135368b0f1 100644
--- a/vlib/v/gen/c/array.v
+++ b/vlib/v/gen/c/array.v
@@ -17,10 +17,10 @@ fn (mut g Gen) array_init(node ast.ArrayInit, var_name string) {
}
if g.is_shared {
shared_styp = g.typ(array_type.typ.set_flag(.shared_f))
- g.writeln('($shared_styp*)__dup_shared_array(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup_shared_array(&(${shared_styp}){.mtx = {0}, .val =')
} else if is_amp {
array_styp = g.typ(array_type.typ)
- g.write('HEAP($array_styp, ')
+ g.write('HEAP(${array_styp}, ')
}
len := node.exprs.len
if array_type.unaliased_sym.kind == .array_fixed {
@@ -33,11 +33,11 @@ fn (mut g Gen) array_init(node ast.ArrayInit, var_name string) {
elem_styp := g.typ(elem_type.typ)
noscan := g.check_noscan(elem_type.typ)
if elem_type.unaliased_sym.kind == .function {
- g.write('new_array_from_c_array($len, $len, sizeof(voidptr), _MOV((voidptr[$len]){')
+ g.write('new_array_from_c_array(${len}, ${len}, sizeof(voidptr), _MOV((voidptr[${len}]){')
} else if g.is_empty_struct(elem_type) {
- g.write('new_array_from_c_array${noscan}($len, $len, sizeof(voidptr), _MOV(($elem_styp[$len]){')
+ g.write('new_array_from_c_array${noscan}(${len}, ${len}, sizeof(voidptr), _MOV((${elem_styp}[${len}]){')
} else {
- g.write('new_array_from_c_array${noscan}($len, $len, sizeof($elem_styp), _MOV(($elem_styp[$len]){')
+ g.write('new_array_from_c_array${noscan}(${len}, ${len}, sizeof(${elem_styp}), _MOV((${elem_styp}[${len}]){')
}
if len > 8 {
g.writeln('')
@@ -63,7 +63,7 @@ fn (mut g Gen) array_init(node ast.ArrayInit, var_name string) {
}
g.write('}))')
if g.is_shared {
- g.write('}, sizeof($shared_styp))')
+ g.write('}, sizeof(${shared_styp}))')
} else if is_amp {
g.write(')')
}
@@ -86,7 +86,7 @@ fn (mut g Gen) fixed_array_init(node ast.ArrayInit, array_type Type, var_name st
elem_typ := g.typ(node.elem_type)
g.empty_line = true
if var_name.len == 0 {
- g.write('$ret_typ $tmp =')
+ g.write('${ret_typ} ${tmp} =')
}
g.write('{')
if node.has_val {
@@ -113,8 +113,8 @@ fn (mut g Gen) fixed_array_init(node ast.ArrayInit, array_type Type, var_name st
g.writeln(';')
g.writeln('{')
g.indent++
- g.writeln('$elem_typ* pelem = ($elem_typ*)$tmp;')
- g.writeln('int _len = (int)sizeof($tmp) / sizeof($elem_typ);')
+ g.writeln('${elem_typ}* pelem = (${elem_typ}*)${tmp};')
+ g.writeln('int _len = (int)sizeof(${tmp}) / sizeof(${elem_typ});')
g.writeln('for(int it=0; it<_len; it++, pelem++) {')
g.indent++
g.write('*pelem = ')
@@ -143,7 +143,7 @@ fn (mut g Gen) fixed_array_init(node ast.ArrayInit, array_type Type, var_name st
stmt_str = g.go_before_stmt(0)
ret_typ := g.typ(node.typ)
g.empty_line = true
- g.write('$ret_typ $tmp_var = ')
+ g.write('${ret_typ} ${tmp_var} = ')
}
g.write('{')
if node.has_val {
@@ -195,7 +195,7 @@ fn (mut g Gen) array_init_with_fields(node ast.ArrayInit, elem_type Type, is_amp
elem_typ := g.typ(node.elem_type)
g.empty_line = true
if var_name.len == 0 {
- g.write('$ret_typ $tmp =')
+ g.write('${ret_typ} ${tmp} =')
}
if is_default_array {
g.write('__new_array_with_array_default${noscan}(')
@@ -219,32 +219,32 @@ fn (mut g Gen) array_init_with_fields(node ast.ArrayInit, elem_type Type, is_amp
if elem_type.unaliased_sym.kind == .function || g.is_empty_struct(elem_type) {
g.write('sizeof(voidptr), ')
} else {
- g.write('sizeof($elem_styp), ')
+ g.write('sizeof(${elem_styp}), ')
}
if is_default_array {
- g.write('($elem_styp[]){')
+ g.write('(${elem_styp}[]){')
g.write(g.type_default(node.elem_type))
g.write('}[0])')
} else if node.has_len && node.elem_type == ast.string_type {
- g.write('&($elem_styp[]){')
+ g.write('&(${elem_styp}[]){')
g.write('_SLIT("")')
g.write('})')
} else if node.has_len && elem_type.unaliased_sym.kind in [.array, .map] {
- g.write('(voidptr)&($elem_styp[]){')
+ g.write('(voidptr)&(${elem_styp}[]){')
g.write(g.type_default(node.elem_type))
g.write('}[0])')
} else {
g.write('0)')
}
if g.is_shared {
- g.write('}, sizeof($shared_styp))')
+ g.write('}, sizeof(${shared_styp}))')
} else if is_amp {
g.write(')')
}
g.writeln(';')
g.writeln('{')
g.indent++
- g.writeln('$elem_typ* pelem = ($elem_typ*)${tmp}.data;')
+ g.writeln('${elem_typ}* pelem = (${elem_typ}*)${tmp}.data;')
g.writeln('for(int it=0; it<${tmp}.len; it++, pelem++) {')
g.set_current_pos_as_last_stmt_pos()
g.indent++
@@ -289,29 +289,29 @@ fn (mut g Gen) array_init_with_fields(node ast.ArrayInit, elem_type Type, is_amp
if elem_type.unaliased_sym.kind == .function || g.is_empty_struct(elem_type) {
g.write('sizeof(voidptr), ')
} else {
- g.write('sizeof($elem_styp), ')
+ g.write('sizeof(${elem_styp}), ')
}
if is_default_array || is_default_map {
- g.write('($elem_styp[]){')
+ g.write('(${elem_styp}[]){')
g.expr(node.default_expr)
g.write('}[0])')
} else if node.has_default {
- g.write('&($elem_styp[]){')
+ g.write('&(${elem_styp}[]){')
g.expr_with_cast(node.default_expr, node.default_type, node.elem_type)
g.write('})')
} else if node.has_len && node.elem_type == ast.string_type {
- g.write('&($elem_styp[]){')
+ g.write('&(${elem_styp}[]){')
g.write('_SLIT("")')
g.write('})')
} else if node.has_len && elem_type.unaliased_sym.kind in [.array, .map] {
- g.write('(voidptr)&($elem_styp[]){')
+ g.write('(voidptr)&(${elem_styp}[]){')
g.write(g.type_default(node.elem_type))
g.write('}[0])')
} else {
g.write('0)')
}
if g.is_shared {
- g.write('}, sizeof($shared_styp))')
+ g.write('}, sizeof(${shared_styp}))')
} else if is_amp {
g.write(')')
}
@@ -339,9 +339,9 @@ fn (mut g Gen) gen_array_map(node ast.CallExpr) {
g.empty_line = true
noscan := g.check_noscan(ret_info.elem_type)
has_infix_left_var_name := g.write_prepared_tmp_value(tmp, node, ret_typ, '{0}')
- g.writeln('$tmp = __new_array${noscan}(0, ${tmp}_len, sizeof($ret_elem_type));\n')
+ g.writeln('${tmp} = __new_array${noscan}(0, ${tmp}_len, sizeof(${ret_elem_type}));\n')
i := g.new_tmp_var()
- g.writeln('for (int $i = 0; $i < ${tmp}_len; ++$i) {')
+ g.writeln('for (int ${i} = 0; ${i} < ${tmp}_len; ++${i}) {')
g.indent++
g.write_prepared_it(inp_info, inp_elem_type, tmp, i)
g.set_current_pos_as_last_stmt_pos()
@@ -349,12 +349,12 @@ fn (mut g Gen) gen_array_map(node ast.CallExpr) {
mut expr := node.args[0].expr
match mut expr {
ast.AnonFn {
- g.write('$ret_elem_type ti = ')
+ g.write('${ret_elem_type} ti = ')
g.gen_anon_fn_decl(mut expr)
g.write('${expr.decl.name}(it)')
}
ast.Ident {
- g.write('$ret_elem_type ti = ')
+ g.write('${ret_elem_type} ti = ')
if expr.kind == .function {
g.write('${c_name(expr.name)}(it)')
} else if expr.kind == .variable {
@@ -374,16 +374,16 @@ fn (mut g Gen) gen_array_map(node ast.CallExpr) {
is_embed_map_filter = true
g.set_current_pos_as_last_stmt_pos()
}
- g.write('$ret_elem_type ti = ')
+ g.write('${ret_elem_type} ti = ')
g.expr(node.args[0].expr)
}
else {
- g.write('$ret_elem_type ti = ')
+ g.write('${ret_elem_type} ti = ')
g.expr(node.args[0].expr)
}
}
g.writeln(';')
- g.writeln('array_push${noscan}((array*)&$tmp, &ti);')
+ g.writeln('array_push${noscan}((array*)&${tmp}, &ti);')
g.indent--
g.writeln('}')
if !is_embed_map_filter {
@@ -481,15 +481,15 @@ fn (mut g Gen) gen_array_sort(node ast.CallExpr) {
}
stype_arg := g.typ(info.elem_type)
- g.definitions.writeln('VV_LOCAL_SYMBOL $g.static_modifier int ${compare_fn}($stype_arg* a, $stype_arg* b) {')
+ g.definitions.writeln('VV_LOCAL_SYMBOL ${g.static_modifier} int ${compare_fn}(${stype_arg}* a, ${stype_arg}* b) {')
c_condition := if comparison_type.sym.has_method('<') {
- '${g.typ(comparison_type.typ)}__lt($left_expr, $right_expr)'
+ '${g.typ(comparison_type.typ)}__lt(${left_expr}, ${right_expr})'
} else if comparison_type.unaliased_sym.has_method('<') {
- '${g.typ(comparison_type.unaliased)}__lt($left_expr, $right_expr)'
+ '${g.typ(comparison_type.unaliased)}__lt(${left_expr}, ${right_expr})'
} else {
- '$left_expr < $right_expr'
+ '${left_expr} < ${right_expr}'
}
- g.definitions.writeln('\tif ($c_condition) return -1;')
+ g.definitions.writeln('\tif (${c_condition}) return -1;')
g.definitions.writeln('\telse return 1;')
g.definitions.writeln('}\n')
@@ -514,7 +514,7 @@ fn (mut g Gen) gen_array_sort_call(node ast.CallExpr, compare_fn string) {
g.expr(node.left)
g.write('${deref_field}len, ')
g.expr(node.left)
- g.write('${deref_field}element_size, (int (*)(const void *, const void *))&$compare_fn)')
+ g.write('${deref_field}element_size, (int (*)(const void *, const void *))&${compare_fn})')
}
// `nums.filter(it % 2 == 0)`
@@ -534,9 +534,9 @@ fn (mut g Gen) gen_array_filter(node ast.CallExpr) {
g.empty_line = true
noscan := g.check_noscan(info.elem_type)
has_infix_left_var_name := g.write_prepared_tmp_value(tmp, node, styp, '{0}')
- g.writeln('$tmp = __new_array${noscan}(0, ${tmp}_len, sizeof($elem_type_str));\n')
+ g.writeln('${tmp} = __new_array${noscan}(0, ${tmp}_len, sizeof(${elem_type_str}));\n')
i := g.new_tmp_var()
- g.writeln('for (int $i = 0; $i < ${tmp}_len; ++$i) {')
+ g.writeln('for (int ${i} = 0; ${i} < ${tmp}_len; ++${i}) {')
g.indent++
g.write_prepared_it(info, elem_type_str, tmp, i)
g.set_current_pos_as_last_stmt_pos()
@@ -578,7 +578,7 @@ fn (mut g Gen) gen_array_filter(node ast.CallExpr) {
}
}
g.writeln(') {')
- g.writeln('\tarray_push${noscan}((array*)&$tmp, &it);')
+ g.writeln('\tarray_push${noscan}((array*)&${tmp}, &it);')
g.writeln('}')
g.indent--
g.writeln('}')
@@ -607,9 +607,9 @@ fn (mut g Gen) gen_array_insert(node ast.CallExpr) {
noscan := g.check_noscan(left_info.elem_type)
addr := if node.left_type.is_ptr() { '' } else { '&' }
if is_arg2_array {
- g.write('array_insert_many${noscan}($addr')
+ g.write('array_insert_many${noscan}(${addr}')
} else {
- g.write('array_insert${noscan}($addr')
+ g.write('array_insert${noscan}(${addr}')
}
g.expr(node.left)
g.write(', ')
@@ -621,7 +621,7 @@ fn (mut g Gen) gen_array_insert(node ast.CallExpr) {
g.expr(node.args[1].expr)
g.write('.len)')
} else {
- g.write(', &($elem_type_str[]){')
+ g.write(', &(${elem_type_str}[]){')
if left_info.elem_type == ast.string_type {
g.write('string_clone(')
}
@@ -643,9 +643,9 @@ fn (mut g Gen) gen_array_prepend(node ast.CallExpr) {
noscan := g.check_noscan(left_info.elem_type)
addr := if node.left_type.is_ptr() { '' } else { '&' }
if is_arg_array {
- g.write('array_prepend_many${noscan}($addr')
+ g.write('array_prepend_many${noscan}(${addr}')
} else {
- g.write('array_prepend${noscan}($addr')
+ g.write('array_prepend${noscan}(${addr}')
}
g.expr(node.left)
if is_arg_array {
@@ -655,7 +655,7 @@ fn (mut g Gen) gen_array_prepend(node ast.CallExpr) {
g.expr(node.args[0].expr)
g.write('.len)')
} else {
- g.write(', &($elem_type_str[]){')
+ g.write(', &(${elem_type_str}[]){')
g.expr_with_cast(node.args[0].expr, node.args[0].typ, left_info.elem_type)
g.write('})')
}
@@ -688,33 +688,33 @@ fn (mut g Gen) gen_array_contains_methods() {
left_type_str = 'Array_voidptr'
elem_type_str = 'voidptr'
}
- g.type_definitions.writeln('static bool ${fn_name}($left_type_str a, $elem_type_str v); // auto')
- fn_builder.writeln('static bool ${fn_name}($left_type_str a, $elem_type_str v) {')
+ g.type_definitions.writeln('static bool ${fn_name}(${left_type_str} a, ${elem_type_str} v); // auto')
+ fn_builder.writeln('static bool ${fn_name}(${left_type_str} a, ${elem_type_str} v) {')
fn_builder.writeln('\tfor (int i = 0; i < a.len; ++i) {')
if elem_kind == .string {
fn_builder.writeln('\t\tif (fast_string_eq(((string*)a.data)[i], v)) {')
} else if elem_kind == .array && elem_is_not_ptr {
ptr_typ := g.equality_fn(elem_type)
- fn_builder.writeln('\t\tif (${ptr_typ}_arr_eq((($elem_type_str*)a.data)[i], v)) {')
+ fn_builder.writeln('\t\tif (${ptr_typ}_arr_eq(((${elem_type_str}*)a.data)[i], v)) {')
} else if elem_kind == .function {
fn_builder.writeln('\t\tif (((voidptr*)a.data)[i] == v) {')
} else if elem_kind == .map && elem_is_not_ptr {
ptr_typ := g.equality_fn(elem_type)
- fn_builder.writeln('\t\tif (${ptr_typ}_map_eq((($elem_type_str*)a.data)[i], v)) {')
+ fn_builder.writeln('\t\tif (${ptr_typ}_map_eq(((${elem_type_str}*)a.data)[i], v)) {')
} else if elem_kind == .struct_ && elem_is_not_ptr {
ptr_typ := g.equality_fn(elem_type)
- fn_builder.writeln('\t\tif (${ptr_typ}_struct_eq((($elem_type_str*)a.data)[i], v)) {')
+ fn_builder.writeln('\t\tif (${ptr_typ}_struct_eq(((${elem_type_str}*)a.data)[i], v)) {')
} else if elem_kind == .interface_ && elem_is_not_ptr {
ptr_typ := g.equality_fn(elem_type)
- fn_builder.writeln('\t\tif (${ptr_typ}_interface_eq((($elem_type_str*)a.data)[i], v)) {')
+ fn_builder.writeln('\t\tif (${ptr_typ}_interface_eq(((${elem_type_str}*)a.data)[i], v)) {')
} else if elem_kind == .sum_type && elem_is_not_ptr {
ptr_typ := g.equality_fn(elem_type)
- fn_builder.writeln('\t\tif (${ptr_typ}_sumtype_eq((($elem_type_str*)a.data)[i], v)) {')
+ fn_builder.writeln('\t\tif (${ptr_typ}_sumtype_eq(((${elem_type_str}*)a.data)[i], v)) {')
} else if elem_kind == .alias && elem_is_not_ptr {
ptr_typ := g.equality_fn(elem_type)
- fn_builder.writeln('\t\tif (${ptr_typ}_alias_eq((($elem_type_str*)a.data)[i], v)) {')
+ fn_builder.writeln('\t\tif (${ptr_typ}_alias_eq(((${elem_type_str}*)a.data)[i], v)) {')
} else {
- fn_builder.writeln('\t\tif ((($elem_type_str*)a.data)[i] == v) {')
+ fn_builder.writeln('\t\tif (((${elem_type_str}*)a.data)[i] == v) {')
}
} else if left_final_sym.kind == .array_fixed {
left_info := left_final_sym.info as ast.ArrayFixed
@@ -727,9 +727,9 @@ fn (mut g Gen) gen_array_contains_methods() {
left_type_str = 'Array_voidptr'
elem_type_str = 'voidptr'
}
- g.type_definitions.writeln('static bool ${fn_name}($left_type_str a, $elem_type_str v); // auto')
- fn_builder.writeln('static bool ${fn_name}($left_type_str a, $elem_type_str v) {')
- fn_builder.writeln('\tfor (int i = 0; i < $size; ++i) {')
+ g.type_definitions.writeln('static bool ${fn_name}(${left_type_str} a, ${elem_type_str} v); // auto')
+ fn_builder.writeln('static bool ${fn_name}(${left_type_str} a, ${elem_type_str} v) {')
+ fn_builder.writeln('\tfor (int i = 0; i < ${size}; ++i) {')
if elem_kind == .string {
fn_builder.writeln('\t\tif (fast_string_eq(a[i], v)) {')
} else if elem_kind == .array && elem_is_not_ptr {
@@ -808,10 +808,10 @@ fn (mut g Gen) gen_array_index_methods() {
left_type_str = 'Array_voidptr'
elem_type_str = 'voidptr'
}
- g.type_definitions.writeln('static int ${fn_name}($left_type_str a, $elem_type_str v); // auto')
+ g.type_definitions.writeln('static int ${fn_name}(${left_type_str} a, ${elem_type_str} v); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static int ${fn_name}($left_type_str a, $elem_type_str v) {')
- fn_builder.writeln('\t$elem_type_str* pelem = a.data;')
+ fn_builder.writeln('static int ${fn_name}(${left_type_str} a, ${elem_type_str} v) {')
+ fn_builder.writeln('\t${elem_type_str}* pelem = a.data;')
fn_builder.writeln('\tfor (int i = 0; i < a.len; ++i, ++pelem) {')
if elem_sym.kind == .string {
fn_builder.writeln('\t\tif (fast_string_eq(*pelem, v)) {')
@@ -895,7 +895,7 @@ fn (mut g Gen) gen_array_any(node ast.CallExpr) {
g.empty_line = true
has_infix_left_var_name := g.write_prepared_tmp_value(tmp, node, 'bool', 'false')
i := g.new_tmp_var()
- g.writeln('for (int $i = 0; $i < ${tmp}_len; ++$i) {')
+ g.writeln('for (int ${i} = 0; ${i} < ${tmp}_len; ++${i}) {')
g.indent++
g.write_prepared_it(info, elem_type_str, tmp, i)
g.set_current_pos_as_last_stmt_pos()
@@ -937,7 +937,7 @@ fn (mut g Gen) gen_array_any(node ast.CallExpr) {
}
}
g.writeln(') {')
- g.writeln('\t$tmp = true;')
+ g.writeln('\t${tmp} = true;')
g.writeln('\tbreak;')
g.writeln('}')
g.indent--
@@ -969,7 +969,7 @@ fn (mut g Gen) gen_array_all(node ast.CallExpr) {
g.empty_line = true
has_infix_left_var_name := g.write_prepared_tmp_value(tmp, node, 'bool', 'true')
i := g.new_tmp_var()
- g.writeln('for (int $i = 0; $i < ${tmp}_len; ++$i) {')
+ g.writeln('for (int ${i} = 0; ${i} < ${tmp}_len; ++${i}) {')
g.indent++
g.write_prepared_it(info, elem_type_str, tmp, i)
g.empty_line = true
@@ -1012,7 +1012,7 @@ fn (mut g Gen) gen_array_all(node ast.CallExpr) {
}
}
g.writeln(')) {')
- g.writeln('\t$tmp = false;')
+ g.writeln('\t${tmp} = false;')
g.writeln('\tbreak;')
g.writeln('}')
g.indent--
@@ -1033,10 +1033,10 @@ fn (mut g Gen) gen_array_all(node ast.CallExpr) {
}
fn (mut g Gen) write_prepared_tmp_value(tmp string, node &ast.CallExpr, tmp_stype string, initial_value string) bool {
- g.writeln('$tmp_stype $tmp = $initial_value;')
+ g.writeln('${tmp_stype} ${tmp} = ${initial_value};')
has_infix_left_var_name := g.infix_left_var_name.len > 0
if has_infix_left_var_name {
- g.writeln('if ($g.infix_left_var_name) {')
+ g.writeln('if (${g.infix_left_var_name}) {')
g.infix_left_var_name = ''
g.indent++
}
@@ -1062,9 +1062,9 @@ fn (mut g Gen) write_prepared_tmp_value(tmp string, node &ast.CallExpr, tmp_styp
fn (mut g Gen) write_prepared_it(inp_info ast.Array, inp_elem_type string, tmp string, i string) {
if g.table.sym(inp_info.elem_type).kind == .array_fixed {
- g.writeln('$inp_elem_type it;')
- g.writeln('memcpy(&it, (($inp_elem_type*) ${tmp}_orig.data)[$i], sizeof($inp_elem_type));')
+ g.writeln('${inp_elem_type} it;')
+ g.writeln('memcpy(&it, ((${inp_elem_type}*) ${tmp}_orig.data)[${i}], sizeof(${inp_elem_type}));')
} else {
- g.writeln('$inp_elem_type it = (($inp_elem_type*) ${tmp}_orig.data)[$i];')
+ g.writeln('${inp_elem_type} it = ((${inp_elem_type}*) ${tmp}_orig.data)[${i}];')
}
}
diff --git a/vlib/v/gen/c/assert.v b/vlib/v/gen/c/assert.v
index 3456e1c0de..83420cb660 100644
--- a/vlib/v/gen/c/assert.v
+++ b/vlib/v/gen/c/assert.v
@@ -27,10 +27,10 @@ fn (mut g Gen) assert_stmt(original_assert_statement ast.AssertStmt) {
g.decrement_inside_ternary()
g.writeln(' {')
metaname_ok := g.gen_assert_metainfo(node)
- g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_assert_pass(test_runner._object, &$metaname_ok);')
+ g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_assert_pass(test_runner._object, &${metaname_ok});')
g.writeln('} else {')
metaname_fail := g.gen_assert_metainfo(node)
- g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_assert_fail(test_runner._object, &$metaname_fail);')
+ g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_assert_fail(test_runner._object, &${metaname_fail});')
g.gen_assert_postfailure_mode(node)
g.writeln('}')
} else {
@@ -40,7 +40,7 @@ fn (mut g Gen) assert_stmt(original_assert_statement ast.AssertStmt) {
g.decrement_inside_ternary()
g.writeln(' {')
metaname_panic := g.gen_assert_metainfo(node)
- g.writeln('\t__print_assert_failure(&$metaname_panic);')
+ g.writeln('\t__print_assert_failure(&${metaname_panic});')
g.gen_assert_postfailure_mode(node)
g.writeln('}')
}
@@ -110,21 +110,21 @@ fn (mut g Gen) gen_assert_metainfo(node ast.AssertStmt) string {
src += ', ' + node.extra.str()
}
src = cestring(src)
- metaname := 'v_assert_meta_info_$g.new_tmp_var()'
- g.writeln('\tVAssertMetaInfo $metaname = {0};')
+ metaname := 'v_assert_meta_info_${g.new_tmp_var()}'
+ g.writeln('\tVAssertMetaInfo ${metaname} = {0};')
g.writeln('\t${metaname}.fpath = ${ctoslit(mod_path)};')
- g.writeln('\t${metaname}.line_nr = $line_nr;')
+ g.writeln('\t${metaname}.line_nr = ${line_nr};')
g.writeln('\t${metaname}.fn_name = ${ctoslit(fn_name)};')
metasrc := cnewlines(ctoslit(src))
- g.writeln('\t${metaname}.src = $metasrc;')
+ g.writeln('\t${metaname}.src = ${metasrc};')
match node.expr {
ast.InfixExpr {
expr_op_str := ctoslit(node.expr.op.str())
expr_left_str := cnewlines(ctoslit(node.expr.left.str()))
expr_right_str := cnewlines(ctoslit(node.expr.right.str()))
- g.writeln('\t${metaname}.op = $expr_op_str;')
- g.writeln('\t${metaname}.llabel = $expr_left_str;')
- g.writeln('\t${metaname}.rlabel = $expr_right_str;')
+ g.writeln('\t${metaname}.op = ${expr_op_str};')
+ g.writeln('\t${metaname}.llabel = ${expr_left_str};')
+ g.writeln('\t${metaname}.rlabel = ${expr_right_str};')
g.write('\t${metaname}.lvalue = ')
g.gen_assert_single_expr(node.expr.left, node.expr.left_type)
g.writeln(';')
@@ -175,7 +175,7 @@ fn (mut g Gen) gen_assert_single_expr(expr ast.Expr, typ ast.Type) {
}
ast.TypeNode {
sym := g.table.sym(g.unwrap_generic(typ))
- g.write(ctoslit('$sym.name'))
+ g.write(ctoslit('${sym.name}'))
}
else {
mut should_clone := true
diff --git a/vlib/v/gen/c/assign.v b/vlib/v/gen/c/assign.v
index 90959daa5a..7db743ff72 100644
--- a/vlib/v/gen/c/assign.v
+++ b/vlib/v/gen/c/assign.v
@@ -50,15 +50,15 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
}
}
if ok {
- sref_name = '_sref$node.pos.pos'
- g.write('$type_to_free $sref_name = (') // TODO we are copying the entire string here, optimize
+ sref_name = '_sref${node.pos.pos}'
+ g.write('${type_to_free} ${sref_name} = (') // TODO we are copying the entire string here, optimize
// we can't just do `.str` since we need the extra data from the string struct
// doing `&string` is also not an option since the stack memory with the data will be overwritten
g.expr(left0) // node.left[0])
- g.writeln('); // free $type_to_free on re-assignment2')
+ g.writeln('); // free ${type_to_free} on re-assignment2')
defer {
if af {
- g.writeln('${type_to_free}_free(&$sref_name);')
+ g.writeln('${type_to_free}_free(&${sref_name});')
}
}
} else {
@@ -211,7 +211,7 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
if left_sym.kind == .function {
g.write('{void* _ = ')
} else {
- g.write('{$styp _ = ')
+ g.write('{${styp} _ = ')
}
g.expr(val)
g.writeln(';}')
@@ -224,7 +224,7 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
if is_fixed_array_init {
right := val as ast.ArrayInit
v_var = g.new_tmp_var()
- g.write('$arr_typ $v_var = ')
+ g.write('${arr_typ} ${v_var} = ')
g.expr(right)
g.writeln(';')
} else {
@@ -236,7 +236,7 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
if g.is_arraymap_set && g.arraymap_set_pos > 0 {
g.go_back_to(g.arraymap_set_pos)
- g.write(', &$v_var)')
+ g.write(', &${v_var})')
g.is_arraymap_set = false
g.arraymap_set_pos = 0
} else {
@@ -244,10 +244,10 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
is_var_mut := !is_decl && left.is_auto_deref_var()
addr_left := if is_var_mut { '' } else { '&' }
g.writeln('')
- g.write('memcpy($addr_left')
+ g.write('memcpy(${addr_left}')
g.expr(left)
addr_val := if is_fixed_array_var { '' } else { '&' }
- g.writeln(', $addr_val$v_var, sizeof($arr_typ));')
+ g.writeln(', ${addr_val}${v_var}, sizeof(${arr_typ}));')
}
g.is_assign_lhs = false
} else {
@@ -303,7 +303,7 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
g.write(' = ${styp}_${util.replace_op(extracted_op)}(')
method := g.table.find_method(left_sym, extracted_op) or {
// the checker will most likely have found this, already...
- g.error('assignment operator `$extracted_op=` used but no `$extracted_op` method defined',
+ g.error('assignment operator `${extracted_op}=` used but no `${extracted_op}` method defined',
node.pos)
ast.Fn{}
}
@@ -325,26 +325,26 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
match attr.name {
'callconv' {
if g.is_cc_msvc {
- msvc_call_conv = '__$attr.arg '
+ msvc_call_conv = '__${attr.arg} '
} else {
- call_conv = '$attr.arg'
+ call_conv = '${attr.arg}'
}
}
else {}
}
}
call_conv_attribute_suffix := if call_conv.len != 0 {
- '__attribute__(($call_conv))'
+ '__attribute__((${call_conv}))'
} else {
''
}
fn_name := c_name(g.get_ternary_name(ident.name))
- g.write('$ret_styp ($msvc_call_conv*$fn_name) (')
+ g.write('${ret_styp} (${msvc_call_conv}*${fn_name}) (')
def_pos := g.definitions.len
g.fn_decl_params(func.func.params, unsafe { nil }, false)
g.definitions.go_back(g.definitions.len - def_pos)
- g.write(')$call_conv_attribute_suffix')
+ g.write(')${call_conv_attribute_suffix}')
} else {
if is_decl {
if is_inside_ternary {
@@ -357,19 +357,19 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
if val_sym.info.generic_types.len > 0 {
if val is ast.StructInit {
var_styp := g.typ(val.typ)
- g.write('$var_styp ')
+ g.write('${var_styp} ')
is_used_var_styp = true
} else if val is ast.PrefixExpr {
if val.op == .amp && val.right is ast.StructInit {
var_styp := g.typ(val.right.typ.ref())
- g.write('$var_styp ')
+ g.write('${var_styp} ')
is_used_var_styp = true
}
}
}
}
if !is_used_var_styp {
- g.write('$styp ')
+ g.write('${styp} ')
}
if is_auto_heap {
g.write('*')
@@ -394,7 +394,7 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
}
}
if is_inside_ternary && is_decl {
- g.write(';\n$cur_line')
+ g.write(';\n${cur_line}')
g.out.write_string(util.tabs(g.indent))
g.expr(left)
}
@@ -404,7 +404,7 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
g.writeln(';')
}
} else if !g.is_arraymap_set && !str_add && !op_overloaded {
- g.write(' $op ')
+ g.write(' ${op} ')
} else if str_add || op_overloaded {
g.write(', ')
}
@@ -439,11 +439,11 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
// and don't generate memcpy &
typ_str := g.typ(val_type).trim('*')
ref_str := if val_type.is_ptr() { '' } else { '&' }
- g.write('memcpy(($typ_str*)')
+ g.write('memcpy((${typ_str}*)')
g.expr(left)
- g.write(', (byte*)$ref_str')
+ g.write(', (byte*)${ref_str}')
g.expr(val)
- g.write(', sizeof($typ_str))')
+ g.write(', sizeof(${typ_str}))')
} else if is_decl {
g.is_shared = var_type.has_flag(.shared_f)
if is_fixed_array_init && !has_val {
@@ -454,7 +454,7 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
}
} else {
if is_auto_heap {
- g.write('HEAP($styp, (')
+ g.write('HEAP(${styp}, (')
}
if val.is_auto_deref_var() {
g.write('*')
@@ -507,9 +507,9 @@ fn (mut g Gen) gen_multi_return_assign(node &ast.AssignStmt, return_type ast.Typ
// multi return
// TODO Handle in if_expr
is_opt := return_type.has_flag(.optional) || return_type.has_flag(.result)
- mr_var_name := 'mr_$node.pos.pos'
+ mr_var_name := 'mr_${node.pos.pos}'
mr_styp := g.typ(return_type.clear_flag(.optional).clear_flag(.result))
- g.write('$mr_styp $mr_var_name = ')
+ g.write('${mr_styp} ${mr_var_name} = ')
g.expr(node.right[0])
g.writeln(';')
for i, lx in node.left {
@@ -528,7 +528,7 @@ fn (mut g Gen) gen_multi_return_assign(node &ast.AssignStmt, return_type ast.Typ
}
styp := if ident.name in g.defer_vars { '' } else { g.typ(node.left_types[i]) }
if node.op == .decl_assign {
- g.write('$styp ')
+ g.write('${styp} ')
}
if lx.is_auto_deref_var() {
g.write('*')
@@ -539,30 +539,30 @@ fn (mut g Gen) gen_multi_return_assign(node &ast.AssignStmt, return_type ast.Typ
if is_opt {
mr_base_styp := g.base_type(return_type)
if is_auto_heap {
- g.writeln('HEAP${noscan}($mr_base_styp, ${mr_var_name}.arg$i) });')
+ g.writeln('HEAP${noscan}(${mr_base_styp}, ${mr_var_name}.arg${i}) });')
} else {
- g.writeln('${mr_var_name}.arg$i });')
+ g.writeln('${mr_var_name}.arg${i} });')
}
} else {
if is_auto_heap {
- g.writeln('HEAP${noscan}($styp, ${mr_var_name}.arg$i) });')
+ g.writeln('HEAP${noscan}(${styp}, ${mr_var_name}.arg${i}) });')
} else {
- g.writeln('${mr_var_name}.arg$i });')
+ g.writeln('${mr_var_name}.arg${i} });')
}
}
} else {
if is_opt {
mr_base_styp := g.base_type(return_type)
if is_auto_heap {
- g.writeln(' = HEAP${noscan}($mr_base_styp, ${mr_var_name}.arg$i);')
+ g.writeln(' = HEAP${noscan}(${mr_base_styp}, ${mr_var_name}.arg${i});')
} else {
- g.writeln(' = ${mr_var_name}.arg$i;')
+ g.writeln(' = ${mr_var_name}.arg${i};')
}
} else {
if is_auto_heap {
- g.writeln(' = HEAP${noscan}($styp, ${mr_var_name}.arg$i);')
+ g.writeln(' = HEAP${noscan}(${styp}, ${mr_var_name}.arg${i});')
} else {
- g.writeln(' = ${mr_var_name}.arg$i;')
+ g.writeln(' = ${mr_var_name}.arg${i};')
}
}
}
@@ -620,13 +620,13 @@ fn (mut g Gen) gen_cross_var_assign(node &ast.AssignStmt) {
ast.Ident {
left_typ := node.left_types[i]
left_sym := g.table.sym(left_typ)
- anon_ctx := if g.anon_fn { '$closure_ctx->' } else { '' }
+ anon_ctx := if g.anon_fn { '${closure_ctx}->' } else { '' }
if left_sym.kind == .function {
- g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_$left.pos.pos')
- g.writeln(' = $anon_ctx${c_name(left.name)};')
+ g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_${left.pos.pos}')
+ g.writeln(' = ${anon_ctx}${c_name(left.name)};')
} else {
styp := g.typ(left_typ)
- g.writeln('$styp _var_$left.pos.pos = $anon_ctx${c_name(left.name)};')
+ g.writeln('${styp} _var_${left.pos.pos} = ${anon_ctx}${c_name(left.name)};')
}
}
ast.IndexExpr {
@@ -637,11 +637,11 @@ fn (mut g Gen) gen_cross_var_assign(node &ast.AssignStmt) {
if elem_typ.kind == .function {
left_typ := node.left_types[i]
left_sym := g.table.sym(left_typ)
- g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_$left.pos.pos')
+ g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_${left.pos.pos}')
g.write(' = *(voidptr*)array_get(')
} else {
styp := g.typ(info.elem_type)
- g.write('$styp _var_$left.pos.pos = *($styp*)array_get(')
+ g.write('${styp} _var_${left.pos.pos} = *(${styp}*)array_get(')
}
if left.left_type.is_ptr() {
g.write('*')
@@ -663,11 +663,11 @@ fn (mut g Gen) gen_cross_var_assign(node &ast.AssignStmt) {
if elem_typ.kind == .function {
left_typ := node.left_types[i]
left_sym := g.table.sym(left_typ)
- g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_$left.pos.pos')
+ g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_${left.pos.pos}')
g.write(' = *(voidptr*)')
} else {
styp := g.typ(info.elem_type)
- g.write('$styp _var_$left.pos.pos = ')
+ g.write('${styp} _var_${left.pos.pos} = ')
}
if left.left_type.is_ptr() {
g.write('*')
@@ -690,10 +690,10 @@ fn (mut g Gen) gen_cross_var_assign(node &ast.AssignStmt) {
if val_typ.kind == .function {
left_type := node.left_types[i]
left_sym := g.table.sym(left_type)
- g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_$left.pos.pos')
+ g.write_fn_ptr_decl(left_sym.info as ast.FnType, '_var_${left.pos.pos}')
g.write(' = *(voidptr*)map_get(')
} else {
- g.write('$styp _var_$left.pos.pos = *($styp*)map_get(')
+ g.write('${styp} _var_${left.pos.pos} = *(${styp}*)map_get(')
}
if !left.left_type.is_ptr() {
g.write('ADDR(map, ')
@@ -702,19 +702,19 @@ fn (mut g Gen) gen_cross_var_assign(node &ast.AssignStmt) {
} else {
g.expr(left.left)
}
- g.write(', &($skeytyp[]){')
+ g.write(', &(${skeytyp}[]){')
g.expr(left.index)
g.write('}')
if val_typ.kind == .function {
- g.writeln(', &(voidptr[]){ $zero });')
+ g.writeln(', &(voidptr[]){ ${zero} });')
} else {
- g.writeln(', &($styp[]){ $zero });')
+ g.writeln(', &(${styp}[]){ ${zero} });')
}
}
}
ast.SelectorExpr {
styp := g.typ(left.typ)
- g.write('$styp _var_$left.pos.pos = ')
+ g.write('${styp} _var_${left.pos.pos} = ')
g.expr(left.expr)
mut sel := '.'
if left.expr_type.is_ptr() {
@@ -724,7 +724,7 @@ fn (mut g Gen) gen_cross_var_assign(node &ast.AssignStmt) {
sel = '->'
}
}
- g.writeln('$sel$left.field_name;')
+ g.writeln('${sel}${left.field_name};')
}
else {}
}
diff --git a/vlib/v/gen/c/auto_eq_methods.v b/vlib/v/gen/c/auto_eq_methods.v
index 76b998e238..62aeb61230 100644
--- a/vlib/v/gen/c/auto_eq_methods.v
+++ b/vlib/v/gen/c/auto_eq_methods.v
@@ -43,7 +43,7 @@ fn (mut g Gen) gen_equality_fns() {
g.gen_interface_equality_fn(needed_typ)
}
else {
- verror('could not generate equality function for type $sym.kind')
+ verror('could not generate equality function for type ${sym.kind}')
}
}
}
@@ -59,39 +59,39 @@ fn (mut g Gen) gen_sumtype_equality_fn(left_type ast.Type) string {
g.generated_eq_fns << left_type
info := left.sym.sumtype_info()
- g.definitions.writeln('static bool ${ptr_styp}_sumtype_eq($ptr_styp a, $ptr_styp b); // auto')
+ g.definitions.writeln('static bool ${ptr_styp}_sumtype_eq(${ptr_styp} a, ${ptr_styp} b); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static bool ${ptr_styp}_sumtype_eq($ptr_styp a, $ptr_styp b) {')
+ fn_builder.writeln('static bool ${ptr_styp}_sumtype_eq(${ptr_styp} a, ${ptr_styp} b) {')
fn_builder.writeln('\tif (a._typ != b._typ) { return false; }')
for typ in info.variants {
variant := g.unwrap(typ)
- fn_builder.writeln('\tif (a._typ == $variant.typ.idx()) {')
- name := '_$variant.sym.cname'
+ fn_builder.writeln('\tif (a._typ == ${variant.typ.idx()}) {')
+ name := '_${variant.sym.cname}'
if variant.sym.kind == .string {
- fn_builder.writeln('\t\treturn string__eq(*a.$name, *b.$name);')
+ fn_builder.writeln('\t\treturn string__eq(*a.${name}, *b.${name});')
} else if variant.sym.kind == .sum_type && !typ.is_ptr() {
eq_fn := g.gen_sumtype_equality_fn(typ)
- fn_builder.writeln('\t\treturn ${eq_fn}_sumtype_eq(*a.$name, *b.$name);')
+ fn_builder.writeln('\t\treturn ${eq_fn}_sumtype_eq(*a.${name}, *b.${name});')
} else if variant.sym.kind == .struct_ && !typ.is_ptr() {
eq_fn := g.gen_struct_equality_fn(typ)
- fn_builder.writeln('\t\treturn ${eq_fn}_struct_eq(*a.$name, *b.$name);')
+ fn_builder.writeln('\t\treturn ${eq_fn}_struct_eq(*a.${name}, *b.${name});')
} else if variant.sym.kind == .array && !typ.is_ptr() {
eq_fn := g.gen_array_equality_fn(typ)
- fn_builder.writeln('\t\treturn ${eq_fn}_arr_eq(*a.$name, *b.$name);')
+ fn_builder.writeln('\t\treturn ${eq_fn}_arr_eq(*a.${name}, *b.${name});')
} else if variant.sym.kind == .array_fixed && !typ.is_ptr() {
eq_fn := g.gen_fixed_array_equality_fn(typ)
- fn_builder.writeln('\t\treturn ${eq_fn}_arr_eq(*a.$name, *b.$name);')
+ fn_builder.writeln('\t\treturn ${eq_fn}_arr_eq(*a.${name}, *b.${name});')
} else if variant.sym.kind == .map && !typ.is_ptr() {
eq_fn := g.gen_map_equality_fn(typ)
- fn_builder.writeln('\t\treturn ${eq_fn}_map_eq(*a.$name, *b.$name);')
+ fn_builder.writeln('\t\treturn ${eq_fn}_map_eq(*a.${name}, *b.${name});')
} else if variant.sym.kind == .alias && !typ.is_ptr() {
eq_fn := g.gen_alias_equality_fn(typ)
- fn_builder.writeln('\t\treturn ${eq_fn}_alias_eq(*a.$name, *b.$name);')
+ fn_builder.writeln('\t\treturn ${eq_fn}_alias_eq(*a.${name}, *b.${name});')
} else if variant.sym.kind == .function {
- fn_builder.writeln('\t\treturn *((voidptr*)(*a.$name)) == *((voidptr*)(*b.$name));')
+ fn_builder.writeln('\t\treturn *((voidptr*)(*a.${name})) == *((voidptr*)(*b.${name}));')
} else {
- fn_builder.writeln('\t\treturn *a.$name == *b.$name;')
+ fn_builder.writeln('\t\treturn *a.${name} == *b.${name};')
}
fn_builder.writeln('\t}')
}
@@ -110,13 +110,13 @@ fn (mut g Gen) gen_struct_equality_fn(left_type ast.Type) string {
}
g.generated_eq_fns << left_type
info := left.sym.struct_info()
- g.definitions.writeln('static bool ${fn_name}_struct_eq($ptr_styp a, $ptr_styp b); // auto')
+ g.definitions.writeln('static bool ${fn_name}_struct_eq(${ptr_styp} a, ${ptr_styp} b); // auto')
mut fn_builder := strings.new_builder(512)
defer {
g.auto_fn_definitions << fn_builder.str()
}
- fn_builder.writeln('static bool ${fn_name}_struct_eq($ptr_styp a, $ptr_styp b) {')
+ fn_builder.writeln('static bool ${fn_name}_struct_eq(${ptr_styp} a, ${ptr_styp} b) {')
// overloaded
if left.sym.has_method('==') {
@@ -134,33 +134,33 @@ fn (mut g Gen) gen_struct_equality_fn(left_type ast.Type) string {
field_type := g.unwrap(field.typ)
field_name := c_name(field.name)
if field_type.sym.kind == .string {
- fn_builder.write_string('string__eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('string__eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .sum_type && !field.typ.is_ptr() {
eq_fn := g.gen_sumtype_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_sumtype_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_sumtype_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .struct_ && !field.typ.is_ptr() {
eq_fn := g.gen_struct_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_struct_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_struct_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .array && !field.typ.is_ptr() {
eq_fn := g.gen_array_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_arr_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_arr_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .array_fixed && !field.typ.is_ptr() {
eq_fn := g.gen_fixed_array_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_arr_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_arr_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .map && !field.typ.is_ptr() {
eq_fn := g.gen_map_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_map_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_map_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .alias && !field.typ.is_ptr() {
eq_fn := g.gen_alias_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_alias_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_alias_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .function {
- fn_builder.write_string('*((voidptr*)(a.$field_name)) == *((voidptr*)(b.$field_name))')
+ fn_builder.write_string('*((voidptr*)(a.${field_name})) == *((voidptr*)(b.${field_name}))')
} else if field_type.sym.kind == .interface_ {
ptr := if field.typ.is_ptr() { '*'.repeat(field.typ.nr_muls()) } else { '' }
eq_fn := g.gen_interface_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_interface_eq(${ptr}a.$field_name, ${ptr}b.$field_name)')
+ fn_builder.write_string('${eq_fn}_interface_eq(${ptr}a.${field_name}, ${ptr}b.${field_name})')
} else {
- fn_builder.write_string('a.$field_name == b.$field_name')
+ fn_builder.write_string('a.${field_name} == b.${field_name}')
}
}
} else {
@@ -179,10 +179,10 @@ fn (mut g Gen) gen_alias_equality_fn(left_type ast.Type) string {
}
g.generated_eq_fns << left_type
info := left.sym.info as ast.Alias
- g.definitions.writeln('static bool ${ptr_styp}_alias_eq($ptr_styp a, $ptr_styp b); // auto')
+ g.definitions.writeln('static bool ${ptr_styp}_alias_eq(${ptr_styp} a, ${ptr_styp} b); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static bool ${ptr_styp}_alias_eq($ptr_styp a, $ptr_styp b) {')
+ fn_builder.writeln('static bool ${ptr_styp}_alias_eq(${ptr_styp} a, ${ptr_styp} b) {')
sym := g.table.sym(info.parent_type)
if sym.kind == .string {
fn_builder.writeln('\treturn string__eq(a, b);')
@@ -223,42 +223,42 @@ fn (mut g Gen) gen_array_equality_fn(left_type ast.Type) string {
g.generated_eq_fns << left_type
elem := g.unwrap(left.sym.array_info().elem_type)
ptr_elem_styp := g.typ(elem.typ)
- g.definitions.writeln('static bool ${ptr_styp}_arr_eq($ptr_styp a, $ptr_styp b); // auto')
+ g.definitions.writeln('static bool ${ptr_styp}_arr_eq(${ptr_styp} a, ${ptr_styp} b); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static bool ${ptr_styp}_arr_eq($ptr_styp a, $ptr_styp b) {')
+ fn_builder.writeln('static bool ${ptr_styp}_arr_eq(${ptr_styp} a, ${ptr_styp} b) {')
fn_builder.writeln('\tif (a.len != b.len) {')
fn_builder.writeln('\t\treturn false;')
fn_builder.writeln('\t}')
fn_builder.writeln('\tfor (int i = 0; i < a.len; ++i) {')
// compare every pair of elements of the two arrays
if elem.sym.kind == .string {
- fn_builder.writeln('\t\tif (!string__eq(*(($ptr_elem_styp*)((byte*)a.data+(i*a.element_size))), *(($ptr_elem_styp*)((byte*)b.data+(i*b.element_size))))) {')
+ fn_builder.writeln('\t\tif (!string__eq(*((${ptr_elem_styp}*)((byte*)a.data+(i*a.element_size))), *((${ptr_elem_styp}*)((byte*)b.data+(i*b.element_size))))) {')
} else if elem.sym.kind == .sum_type && !elem.typ.is_ptr() {
eq_fn := g.gen_sumtype_equality_fn(elem.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_sumtype_eq((($ptr_elem_styp*)a.data)[i], (($ptr_elem_styp*)b.data)[i])) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_sumtype_eq(((${ptr_elem_styp}*)a.data)[i], ((${ptr_elem_styp}*)b.data)[i])) {')
} else if elem.sym.kind == .struct_ && !elem.typ.is_ptr() {
eq_fn := g.gen_struct_equality_fn(elem.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_struct_eq((($ptr_elem_styp*)a.data)[i], (($ptr_elem_styp*)b.data)[i])) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_struct_eq(((${ptr_elem_styp}*)a.data)[i], ((${ptr_elem_styp}*)b.data)[i])) {')
} else if elem.sym.kind == .interface_ && !elem.typ.is_ptr() {
eq_fn := g.gen_interface_equality_fn(elem.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_interface_eq((($ptr_elem_styp*)a.data)[i], (($ptr_elem_styp*)b.data)[i])) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_interface_eq(((${ptr_elem_styp}*)a.data)[i], ((${ptr_elem_styp}*)b.data)[i])) {')
} else if elem.sym.kind == .array && !elem.typ.is_ptr() {
eq_fn := g.gen_array_equality_fn(elem.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq((($ptr_elem_styp*)a.data)[i], (($ptr_elem_styp*)b.data)[i])) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq(((${ptr_elem_styp}*)a.data)[i], ((${ptr_elem_styp}*)b.data)[i])) {')
} else if elem.sym.kind == .array_fixed && !elem.typ.is_ptr() {
eq_fn := g.gen_fixed_array_equality_fn(elem.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq((($ptr_elem_styp*)a.data)[i], (($ptr_elem_styp*)b.data)[i])) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq(((${ptr_elem_styp}*)a.data)[i], ((${ptr_elem_styp}*)b.data)[i])) {')
} else if elem.sym.kind == .map && !elem.typ.is_ptr() {
eq_fn := g.gen_map_equality_fn(elem.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_map_eq((($ptr_elem_styp*)a.data)[i], (($ptr_elem_styp*)b.data)[i])) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_map_eq(((${ptr_elem_styp}*)a.data)[i], ((${ptr_elem_styp}*)b.data)[i])) {')
} else if elem.sym.kind == .alias && !elem.typ.is_ptr() {
eq_fn := g.gen_alias_equality_fn(elem.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_alias_eq((($ptr_elem_styp*)a.data)[i], (($ptr_elem_styp*)b.data)[i])) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_alias_eq(((${ptr_elem_styp}*)a.data)[i], ((${ptr_elem_styp}*)b.data)[i])) {')
} else if elem.sym.kind == .function {
fn_builder.writeln('\t\tif (*((voidptr*)((byte*)a.data+(i*a.element_size))) != *((voidptr*)((byte*)b.data+(i*b.element_size)))) {')
} else {
- fn_builder.writeln('\t\tif (*(($ptr_elem_styp*)((byte*)a.data+(i*a.element_size))) != *(($ptr_elem_styp*)((byte*)b.data+(i*b.element_size)))) {')
+ fn_builder.writeln('\t\tif (*((${ptr_elem_styp}*)((byte*)a.data+(i*a.element_size))) != *((${ptr_elem_styp}*)((byte*)b.data+(i*b.element_size)))) {')
}
fn_builder.writeln('\t\t\treturn false;')
fn_builder.writeln('\t\t}')
@@ -279,11 +279,11 @@ fn (mut g Gen) gen_fixed_array_equality_fn(left_type ast.Type) string {
elem_info := left.sym.array_fixed_info()
elem := g.unwrap(elem_info.elem_type)
size := elem_info.size
- g.definitions.writeln('static bool ${ptr_styp}_arr_eq($ptr_styp a, $ptr_styp b); // auto')
+ g.definitions.writeln('static bool ${ptr_styp}_arr_eq(${ptr_styp} a, ${ptr_styp} b); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static bool ${ptr_styp}_arr_eq($ptr_styp a, $ptr_styp b) {')
- fn_builder.writeln('\tfor (int i = 0; i < $size; ++i) {')
+ fn_builder.writeln('static bool ${ptr_styp}_arr_eq(${ptr_styp} a, ${ptr_styp} b) {')
+ fn_builder.writeln('\tfor (int i = 0; i < ${size}; ++i) {')
// compare every pair of elements of the two fixed arrays
if elem.sym.kind == .string {
fn_builder.writeln('\t\tif (!string__eq(a[i], b[i])) {')
@@ -331,10 +331,10 @@ fn (mut g Gen) gen_map_equality_fn(left_type ast.Type) string {
g.generated_eq_fns << left_type
value := g.unwrap(left.sym.map_info().value_type)
ptr_value_styp := g.typ(value.typ)
- g.definitions.writeln('static bool ${ptr_styp}_map_eq($ptr_styp a, $ptr_styp b); // auto')
+ g.definitions.writeln('static bool ${ptr_styp}_map_eq(${ptr_styp} a, ${ptr_styp} b); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static bool ${ptr_styp}_map_eq($ptr_styp a, $ptr_styp b) {')
+ fn_builder.writeln('static bool ${ptr_styp}_map_eq(${ptr_styp} a, ${ptr_styp} b) {')
fn_builder.writeln('\tif (a.len != b.len) {')
fn_builder.writeln('\t\treturn false;')
fn_builder.writeln('\t}')
@@ -347,9 +347,9 @@ fn (mut g Gen) gen_map_equality_fn(left_type ast.Type) string {
info := value.sym.info as ast.FnType
sig := g.fn_var_signature(info.func.return_type, info.func.params.map(it.typ),
'v')
- fn_builder.writeln('\t\t$sig = *(voidptr*)map_get(&a, k, &(voidptr[]){ 0 });')
+ fn_builder.writeln('\t\t${sig} = *(voidptr*)map_get(&a, k, &(voidptr[]){ 0 });')
} else {
- fn_builder.writeln('\t\t$ptr_value_styp v = *($ptr_value_styp*)map_get(&a, k, &($ptr_value_styp[]){ 0 });')
+ fn_builder.writeln('\t\t${ptr_value_styp} v = *(${ptr_value_styp}*)map_get(&a, k, &(${ptr_value_styp}[]){ 0 });')
}
match kind {
.string {
@@ -357,37 +357,37 @@ fn (mut g Gen) gen_map_equality_fn(left_type ast.Type) string {
}
.sum_type {
eq_fn := g.gen_sumtype_equality_fn(value.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_sumtype_eq(*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }), v)) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_sumtype_eq(*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }), v)) {')
}
.struct_ {
eq_fn := g.gen_struct_equality_fn(value.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_struct_eq(*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }), v)) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_struct_eq(*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }), v)) {')
}
.interface_ {
eq_fn := g.gen_interface_equality_fn(value.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_interface_eq(*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }), v)) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_interface_eq(*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }), v)) {')
}
.array {
eq_fn := g.gen_array_equality_fn(value.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq(*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }), v)) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq(*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }), v)) {')
}
.array_fixed {
eq_fn := g.gen_fixed_array_equality_fn(value.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq(*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }), v)) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_arr_eq(*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }), v)) {')
}
.map {
eq_fn := g.gen_map_equality_fn(value.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_map_eq(*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }), v)) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_map_eq(*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }), v)) {')
}
.alias {
eq_fn := g.gen_alias_equality_fn(value.typ)
- fn_builder.writeln('\t\tif (!${eq_fn}_alias_eq(*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }), v)) {')
+ fn_builder.writeln('\t\tif (!${eq_fn}_alias_eq(*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }), v)) {')
}
.function {
fn_builder.writeln('\t\tif (*(voidptr*)map_get(&b, k, &(voidptr[]){ 0 }) != v) {')
}
else {
- fn_builder.writeln('\t\tif (*($ptr_value_styp*)map_get(&b, k, &($ptr_value_styp[]){ 0 }) != v) {')
+ fn_builder.writeln('\t\tif (*(${ptr_value_styp}*)map_get(&b, k, &(${ptr_value_styp}[]){ 0 }) != v) {')
}
}
fn_builder.writeln('\t\t\treturn false;')
@@ -408,47 +408,47 @@ fn (mut g Gen) gen_interface_equality_fn(left_type ast.Type) string {
}
g.generated_eq_fns << left_type
info := left.sym.info
- g.definitions.writeln('static bool ${ptr_styp}_interface_eq($ptr_styp a, $ptr_styp b); // auto')
+ g.definitions.writeln('static bool ${ptr_styp}_interface_eq(${ptr_styp} a, ${ptr_styp} b); // auto')
mut fn_builder := strings.new_builder(512)
defer {
g.auto_fn_definitions << fn_builder.str()
}
fn_builder.writeln('static int v_typeof_interface_idx_${ptr_styp}(int sidx); // for auto eq method')
- fn_builder.writeln('static bool ${fn_name}_interface_eq($ptr_styp a, $ptr_styp b) {')
+ fn_builder.writeln('static bool ${fn_name}_interface_eq(${ptr_styp} a, ${ptr_styp} b) {')
fn_builder.writeln('\tif (a._typ == b._typ) {')
fn_builder.writeln('\t\tint idx = v_typeof_interface_idx_${ptr_styp}(a._typ);')
if info is ast.Interface {
for typ in info.types {
- fn_builder.writeln('\t\tif (idx == $typ.idx()) {')
+ fn_builder.writeln('\t\tif (idx == ${typ.idx()}) {')
fn_builder.write_string('\t\t\treturn ')
match g.table.type_kind(typ) {
.struct_ {
eq_fn := g.gen_struct_equality_fn(typ)
- fn_builder.write_string('${eq_fn}_struct_eq(*(a._$eq_fn), *(b._$eq_fn))')
+ fn_builder.write_string('${eq_fn}_struct_eq(*(a._${eq_fn}), *(b._${eq_fn}))')
}
.string {
fn_builder.write_string('string__eq(*(a._string), *(b._string))')
}
.sum_type {
eq_fn := g.gen_sumtype_equality_fn(typ)
- fn_builder.write_string('${eq_fn}_sumtype_eq(*(a._$eq_fn), *(b._$eq_fn))')
+ fn_builder.write_string('${eq_fn}_sumtype_eq(*(a._${eq_fn}), *(b._${eq_fn}))')
}
.array {
eq_fn := g.gen_array_equality_fn(typ)
- fn_builder.write_string('${eq_fn}_arr_eq(*(a._$eq_fn), *(b._$eq_fn))')
+ fn_builder.write_string('${eq_fn}_arr_eq(*(a._${eq_fn}), *(b._${eq_fn}))')
}
.array_fixed {
eq_fn := g.gen_fixed_array_equality_fn(typ)
- fn_builder.write_string('${eq_fn}_arr_eq(*(a._$eq_fn), *(b._$eq_fn))')
+ fn_builder.write_string('${eq_fn}_arr_eq(*(a._${eq_fn}), *(b._${eq_fn}))')
}
.map {
eq_fn := g.gen_map_equality_fn(typ)
- fn_builder.write_string('${eq_fn}_map_eq(*(a._$eq_fn), *(b._$eq_fn))')
+ fn_builder.write_string('${eq_fn}_map_eq(*(a._${eq_fn}), *(b._${eq_fn}))')
}
.alias {
eq_fn := g.gen_alias_equality_fn(typ)
- fn_builder.write_string('${eq_fn}_alias_eq(*(a._$eq_fn), *(b._$eq_fn))')
+ fn_builder.write_string('${eq_fn}_alias_eq(*(a._${eq_fn}), *(b._${eq_fn}))')
}
else {
fn_builder.write_string('true')
diff --git a/vlib/v/gen/c/auto_free_methods.v b/vlib/v/gen/c/auto_free_methods.v
index a9dd14c9e4..5680677bbe 100644
--- a/vlib/v/gen/c/auto_free_methods.v
+++ b/vlib/v/gen/c/auto_free_methods.v
@@ -58,19 +58,19 @@ fn (mut g Gen) gen_free_method(typ ast.Type) string {
}
else {
println(g.table.type_str(typ))
- verror("could not generate free method '$fn_name' for type '$styp'")
+ verror("could not generate free method '${fn_name}' for type '${styp}'")
}
}
return fn_name
}
fn (mut g Gen) gen_free_for_struct(info ast.Struct, styp string, fn_name string) {
- g.definitions.writeln('$g.static_modifier void ${fn_name}($styp* it); // auto')
+ g.definitions.writeln('${g.static_modifier} void ${fn_name}(${styp}* it); // auto')
mut fn_builder := strings.new_builder(128)
defer {
g.auto_fn_definitions << fn_builder.str()
}
- fn_builder.writeln('$g.static_modifier void ${fn_name}($styp* it) {')
+ fn_builder.writeln('${g.static_modifier} void ${fn_name}(${styp}* it) {')
for field in info.fields {
field_name := c_name(field.name)
sym := g.table.sym(g.unwrap_generic(field.typ))
@@ -89,21 +89,21 @@ fn (mut g Gen) gen_free_for_struct(info ast.Struct, styp string, fn_name string)
g.gen_free_method(field.typ)
}
if is_shared {
- fn_builder.writeln('\t${field_styp_fn_name}(&(it->$field_name->val));')
+ fn_builder.writeln('\t${field_styp_fn_name}(&(it->${field_name}->val));')
} else {
- fn_builder.writeln('\t${field_styp_fn_name}(&(it->$field_name));')
+ fn_builder.writeln('\t${field_styp_fn_name}(&(it->${field_name}));')
}
}
fn_builder.writeln('}')
}
fn (mut g Gen) gen_free_for_array(info ast.Array, styp string, fn_name string) {
- g.definitions.writeln('$g.static_modifier void ${fn_name}($styp* it); // auto')
+ g.definitions.writeln('${g.static_modifier} void ${fn_name}(${styp}* it); // auto')
mut fn_builder := strings.new_builder(128)
defer {
g.auto_fn_definitions << fn_builder.str()
}
- fn_builder.writeln('$g.static_modifier void ${fn_name}($styp* it) {')
+ fn_builder.writeln('${g.static_modifier} void ${fn_name}(${styp}* it) {')
sym := g.table.sym(g.unwrap_generic(info.elem_type))
if sym.kind in [.string, .array, .map, .struct_] {
@@ -115,7 +115,7 @@ fn (mut g Gen) gen_free_for_array(info ast.Array, styp string, fn_name string) {
} else {
g.gen_free_method(info.elem_type)
}
- fn_builder.writeln('\t\t${elem_styp_fn_name}(&((($elem_styp*)it->data)[i]));')
+ fn_builder.writeln('\t\t${elem_styp_fn_name}(&(((${elem_styp}*)it->data)[i]));')
fn_builder.writeln('\t}')
}
fn_builder.writeln('\tarray_free(it);')
@@ -123,12 +123,12 @@ fn (mut g Gen) gen_free_for_array(info ast.Array, styp string, fn_name string) {
}
fn (mut g Gen) gen_free_for_map(info ast.Map, styp string, fn_name string) {
- g.definitions.writeln('$g.static_modifier void ${fn_name}($styp* it); // auto')
+ g.definitions.writeln('${g.static_modifier} void ${fn_name}(${styp}* it); // auto')
mut fn_builder := strings.new_builder(128)
defer {
g.auto_fn_definitions << fn_builder.str()
}
- fn_builder.writeln('$g.static_modifier void ${fn_name}($styp* it) {')
+ fn_builder.writeln('${g.static_modifier} void ${fn_name}(${styp}* it) {')
fn_builder.writeln('\tmap_free(it);')
fn_builder.writeln('}')
diff --git a/vlib/v/gen/c/auto_str_methods.v b/vlib/v/gen/c/auto_str_methods.v
index e6d84ab575..71298687f5 100644
--- a/vlib/v/gen/c/auto_str_methods.v
+++ b/vlib/v/gen/c/auto_str_methods.v
@@ -14,7 +14,7 @@ const (
fn (mut g Gen) gen_str_default(sym ast.TypeSymbol, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_default: $sym.name | $styp | str_fn_name')
+ eprintln('> gen_str_default: ${sym.name} | ${styp} | str_fn_name')
}
mut convertor := ''
mut typename_ := ''
@@ -31,14 +31,14 @@ fn (mut g Gen) gen_str_default(sym ast.TypeSymbol, styp string, str_fn_name stri
convertor = 'bool'
typename_ = 'bool'
} else {
- verror('could not generate string method for type `$styp`')
+ verror('could not generate string method for type `${styp}`')
}
- g.definitions.writeln('string ${str_fn_name}($styp it); // auto')
- g.auto_str_funcs.writeln('string ${str_fn_name}($styp it) {')
+ g.definitions.writeln('string ${str_fn_name}(${styp} it); // auto')
+ g.auto_str_funcs.writeln('string ${str_fn_name}(${styp} it) {')
if convertor == 'bool' {
- g.auto_str_funcs.writeln('\tstring tmp1 = string__plus(_SLIT("${styp}("), ($convertor)it ? _SLIT("true") : _SLIT("false"));')
+ g.auto_str_funcs.writeln('\tstring tmp1 = string__plus(_SLIT("${styp}("), (${convertor})it ? _SLIT("true") : _SLIT("false"));')
} else {
- g.auto_str_funcs.writeln('\tstring tmp1 = string__plus(_SLIT("${styp}("), tos3(${typename_}_str(($convertor)it).str));')
+ g.auto_str_funcs.writeln('\tstring tmp1 = string__plus(_SLIT("${styp}("), tos3(${typename_}_str((${convertor})it).str));')
}
g.auto_str_funcs.writeln('\tstring tmp2 = string__plus(tmp1, _SLIT(")"));')
g.auto_str_funcs.writeln('\tstring_free(&tmp1);')
@@ -54,7 +54,7 @@ mut:
fn (mut g Gen) get_str_fn(typ ast.Type) string {
$if trace_autostr ? {
- eprintln('> get_str_fn: $typ.debug()')
+ eprintln('> get_str_fn: ${typ.debug()}')
}
mut unwrapped := g.unwrap_generic(typ).set_nr_muls(0).clear_flag(.variadic)
if g.pref.nofloat {
@@ -99,7 +99,7 @@ fn (mut g Gen) final_gen_str(typ StrType) {
return
}
$if trace_autostr ? {
- eprintln('> final_gen_str: $typ')
+ eprintln('> final_gen_str: ${typ}')
}
g.generated_str_fns << typ
sym := g.table.sym(typ.typ)
@@ -160,7 +160,7 @@ fn (mut g Gen) final_gen_str(typ StrType) {
}
else {
if sym.name != 'nil' {
- verror('could not generate string method `$str_fn_name` for type `$styp`')
+ verror('could not generate string method `${str_fn_name}` for type `${styp}`')
}
}
}
@@ -168,31 +168,31 @@ fn (mut g Gen) final_gen_str(typ StrType) {
fn (mut g Gen) gen_str_for_option(typ ast.Type, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_option: $typ.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_option: ${typ.debug()} | ${styp} | ${str_fn_name}')
}
parent_type := typ.clear_flag(.optional)
sym := g.table.sym(parent_type)
sym_has_str_method, _, _ := sym.str_method_info()
parent_str_fn_name := g.get_str_fn(parent_type)
- g.definitions.writeln('string ${str_fn_name}($styp it); // auto')
- g.auto_str_funcs.writeln('string ${str_fn_name}($styp it) { return indent_${str_fn_name}(it, 0); }')
- g.definitions.writeln('string indent_${str_fn_name}($styp it, int indent_count); // auto')
- g.auto_str_funcs.writeln('string indent_${str_fn_name}($styp it, int indent_count) {')
+ g.definitions.writeln('string ${str_fn_name}(${styp} it); // auto')
+ g.auto_str_funcs.writeln('string ${str_fn_name}(${styp} it) { return indent_${str_fn_name}(it, 0); }')
+ g.definitions.writeln('string indent_${str_fn_name}(${styp} it, int indent_count); // auto')
+ g.auto_str_funcs.writeln('string indent_${str_fn_name}(${styp} it, int indent_count) {')
g.auto_str_funcs.writeln('\tstring res;')
g.auto_str_funcs.writeln('\tif (it.state == 0) {')
if sym.kind == .string {
- tmp_res := '${parent_str_fn_name}(*($sym.cname*)it.data)'
+ tmp_res := '${parent_str_fn_name}(*(${sym.cname}*)it.data)'
g.auto_str_funcs.writeln('\t\tres = ${str_intp_sq(tmp_res)};')
} else if should_use_indent_func(sym.kind) && !sym_has_str_method {
- g.auto_str_funcs.writeln('\t\tres = indent_${parent_str_fn_name}(*($sym.cname*)it.data, indent_count);')
+ g.auto_str_funcs.writeln('\t\tres = indent_${parent_str_fn_name}(*(${sym.cname}*)it.data, indent_count);')
} else {
- g.auto_str_funcs.writeln('\t\tres = ${parent_str_fn_name}(*($sym.cname*)it.data);')
+ g.auto_str_funcs.writeln('\t\tres = ${parent_str_fn_name}(*(${sym.cname}*)it.data);')
}
g.auto_str_funcs.writeln('\t} else {')
tmp_str := str_intp_sub('error: %%', 'IError_str(it.err)')
- g.auto_str_funcs.writeln('\t\tres = $tmp_str;')
+ g.auto_str_funcs.writeln('\t\tres = ${tmp_str};')
g.auto_str_funcs.writeln('\t}')
g.auto_str_funcs.writeln('\treturn ${str_intp_sub('Option(%%)', 'res')};')
@@ -201,31 +201,31 @@ fn (mut g Gen) gen_str_for_option(typ ast.Type, styp string, str_fn_name string)
fn (mut g Gen) gen_str_for_result(typ ast.Type, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_result: $typ.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_result: ${typ.debug()} | ${styp} | ${str_fn_name}')
}
parent_type := typ.clear_flag(.result)
sym := g.table.sym(parent_type)
sym_has_str_method, _, _ := sym.str_method_info()
parent_str_fn_name := g.get_str_fn(parent_type)
- g.definitions.writeln('string ${str_fn_name}($styp it); // auto')
- g.auto_str_funcs.writeln('string ${str_fn_name}($styp it) { return indent_${str_fn_name}(it, 0); }')
- g.definitions.writeln('string indent_${str_fn_name}($styp it, int indent_count); // auto')
- g.auto_str_funcs.writeln('string indent_${str_fn_name}($styp it, int indent_count) {')
+ g.definitions.writeln('string ${str_fn_name}(${styp} it); // auto')
+ g.auto_str_funcs.writeln('string ${str_fn_name}(${styp} it) { return indent_${str_fn_name}(it, 0); }')
+ g.definitions.writeln('string indent_${str_fn_name}(${styp} it, int indent_count); // auto')
+ g.auto_str_funcs.writeln('string indent_${str_fn_name}(${styp} it, int indent_count) {')
g.auto_str_funcs.writeln('\tstring res;')
g.auto_str_funcs.writeln('\tif (!it.is_error) {')
if sym.kind == .string {
- tmp_res := '${parent_str_fn_name}(*($sym.cname*)it.data)'
+ tmp_res := '${parent_str_fn_name}(*(${sym.cname}*)it.data)'
g.auto_str_funcs.writeln('\t\tres = ${str_intp_sq(tmp_res)};')
} else if should_use_indent_func(sym.kind) && !sym_has_str_method {
- g.auto_str_funcs.writeln('\t\tres = indent_${parent_str_fn_name}(*($sym.cname*)it.data, indent_count);')
+ g.auto_str_funcs.writeln('\t\tres = indent_${parent_str_fn_name}(*(${sym.cname}*)it.data, indent_count);')
} else {
- g.auto_str_funcs.writeln('\t\tres = ${parent_str_fn_name}(*($sym.cname*)it.data);')
+ g.auto_str_funcs.writeln('\t\tres = ${parent_str_fn_name}(*(${sym.cname}*)it.data);')
}
g.auto_str_funcs.writeln('\t} else {')
tmp_str := str_intp_sub('error: %%', 'IError_str(it.err)')
- g.auto_str_funcs.writeln('\t\tres = $tmp_str;')
+ g.auto_str_funcs.writeln('\t\tres = ${tmp_str};')
g.auto_str_funcs.writeln('\t}')
g.auto_str_funcs.writeln('\treturn ${str_intp_sub('result(%%)', 'res')};')
@@ -235,18 +235,18 @@ fn (mut g Gen) gen_str_for_result(typ ast.Type, styp string, str_fn_name string)
fn (mut g Gen) gen_str_for_alias(info ast.Alias, styp string, str_fn_name string) {
parent_str_fn_name := g.get_str_fn(info.parent_type)
$if trace_autostr ? {
- eprintln('> gen_str_for_alias: $parent_str_fn_name | $styp | $str_fn_name')
+ eprintln('> gen_str_for_alias: ${parent_str_fn_name} | ${styp} | ${str_fn_name}')
}
mut clean_type_v_type_name := util.strip_main_name(styp.replace('__', '.'))
- g.definitions.writeln('static string ${str_fn_name}($styp it); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp it) { return indent_${str_fn_name}(it, 0); }')
- g.definitions.writeln('static string indent_${str_fn_name}($styp it, int indent_count); // auto')
- g.auto_str_funcs.writeln('static string indent_${str_fn_name}($styp it, int indent_count) {')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} it); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} it) { return indent_${str_fn_name}(it, 0); }')
+ g.definitions.writeln('static string indent_${str_fn_name}(${styp} it, int indent_count); // auto')
+ g.auto_str_funcs.writeln('static string indent_${str_fn_name}(${styp} it, int indent_count) {')
g.auto_str_funcs.writeln('\tstring indents = string_repeat(_SLIT(" "), indent_count);')
g.auto_str_funcs.writeln('\tstring tmp_ds = ${parent_str_fn_name}(it);')
g.auto_str_funcs.writeln('\tstring res = str_intp(3, _MOV((StrIntpData[]){
- {_SLIT0, $c.si_s_code, {.d_s = indents }},
- {_SLIT("${clean_type_v_type_name}("), $c.si_s_code, {.d_s = tmp_ds }},
+ {_SLIT0, ${c.si_s_code}, {.d_s = indents }},
+ {_SLIT("${clean_type_v_type_name}("), ${c.si_s_code}, {.d_s = tmp_ds }},
{_SLIT(")"), 0, {.d_c = 0 }}
}));')
g.auto_str_funcs.writeln('\tstring_free(&indents);')
@@ -257,12 +257,12 @@ fn (mut g Gen) gen_str_for_alias(info ast.Alias, styp string, str_fn_name string
fn (mut g Gen) gen_str_for_multi_return(info ast.MultiReturn, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_multi_return: $info.types | $styp | $str_fn_name')
+ eprintln('> gen_str_for_multi_return: ${info.types} | ${styp} | ${str_fn_name}')
}
- g.definitions.writeln('static string ${str_fn_name}($styp a); // auto')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} a); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static string ${str_fn_name}($styp a) {')
- fn_builder.writeln('\tstrings__Builder sb = strings__new_builder($info.types.len * 10);')
+ fn_builder.writeln('static string ${str_fn_name}(${styp} a) {')
+ fn_builder.writeln('\tstrings__Builder sb = strings__new_builder(${info.types.len} * 10);')
fn_builder.writeln('\tstrings__Builder_write_string(&sb, _SLIT("("));')
for i, typ in info.types {
sym := g.table.sym(typ)
@@ -271,24 +271,24 @@ fn (mut g Gen) gen_str_for_multi_return(info ast.MultiReturn, styp string, str_f
arg_str_fn_name := g.get_str_fn(typ)
if should_use_indent_func(sym.kind) && !sym_has_str_method {
- fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${arg_str_fn_name}(a.arg$i));')
+ fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${arg_str_fn_name}(a.arg${i}));')
} else if sym.kind in [.f32, .f64] {
if sym.kind == .f32 {
- tmp_val := str_intp_g32('a.arg$i')
- fn_builder.writeln('\tstrings__Builder_write_string(&sb, $tmp_val);')
+ tmp_val := str_intp_g32('a.arg${i}')
+ fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${tmp_val});')
} else {
- tmp_val := str_intp_g64('a.arg$i')
- fn_builder.writeln('\tstrings__Builder_write_string(&sb, $tmp_val);')
+ tmp_val := str_intp_g64('a.arg${i}')
+ fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${tmp_val});')
}
} else if sym.kind == .string {
- tmp_str := str_intp_sq('a.arg$i')
- fn_builder.writeln('\tstrings__Builder_write_string(&sb, $tmp_str);')
+ tmp_str := str_intp_sq('a.arg${i}')
+ fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${tmp_str});')
} else if sym.kind == .function {
fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${arg_str_fn_name}());')
} else {
deref, deref_label := deref_kind(str_method_expects_ptr, is_arg_ptr, typ)
- fn_builder.writeln('\t\tstrings__Builder_write_string(&sb, _SLIT("$deref_label"));')
- fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${arg_str_fn_name}( $deref a.arg$i));')
+ fn_builder.writeln('\t\tstrings__Builder_write_string(&sb, _SLIT("${deref_label}"));')
+ fn_builder.writeln('\tstrings__Builder_write_string(&sb, ${arg_str_fn_name}( ${deref} a.arg${i}));')
}
if i != info.types.len - 1 {
fn_builder.writeln('\tstrings__Builder_write_string(&sb, _SLIT(", "));')
@@ -304,20 +304,20 @@ fn (mut g Gen) gen_str_for_multi_return(info ast.MultiReturn, styp string, str_f
fn (mut g Gen) gen_str_for_enum(info ast.Enum, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_enum: $info | $styp | $str_fn_name')
+ eprintln('> gen_str_for_enum: ${info} | ${styp} | ${str_fn_name}')
}
s := util.no_dots(styp)
- g.definitions.writeln('static string ${str_fn_name}($styp it); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp it) { /* gen_str_for_enum */')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} it); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} it) { /* gen_str_for_enum */')
// Enums tagged with `[flag]` are special in that they can be a combination of enum values
if info.is_flag {
clean_name := util.strip_main_name(styp.replace('__', '.'))
- g.auto_str_funcs.writeln('\tstring ret = _SLIT("$clean_name{");')
+ g.auto_str_funcs.writeln('\tstring ret = _SLIT("${clean_name}{");')
g.auto_str_funcs.writeln('\tint first = 1;')
g.auto_str_funcs.writeln('\tu64 zit = (u64)it;')
for i, val in info.vals {
mask := u64(1) << i
- g.auto_str_funcs.writeln('\tif (zit & 0x${mask:016x}U) {if (!first) {ret = string__plus(ret, _SLIT(" | "));} ret = string__plus(ret, _SLIT(".$val")); first = 0;}')
+ g.auto_str_funcs.writeln('\tif (zit & 0x${mask:016x}U) {if (!first) {ret = string__plus(ret, _SLIT(" | "));} ret = string__plus(ret, _SLIT(".${val}")); first = 0;}')
}
g.auto_str_funcs.writeln('\tret = string__plus(ret, _SLIT("}"));')
g.auto_str_funcs.writeln('\treturn ret;')
@@ -331,7 +331,7 @@ fn (mut g Gen) gen_str_for_enum(info ast.Enum, styp string, str_fn_name string)
} else if info.is_multi_allowed {
seen << val
}
- g.auto_str_funcs.writeln('\t\tcase ${s}__$val: return _SLIT("$val");')
+ g.auto_str_funcs.writeln('\t\tcase ${s}__${val}: return _SLIT("${val}");')
}
g.auto_str_funcs.writeln('\t\tdefault: return _SLIT("unknown enum value");')
g.auto_str_funcs.writeln('\t}')
@@ -341,12 +341,12 @@ fn (mut g Gen) gen_str_for_enum(info ast.Enum, styp string, str_fn_name string)
fn (mut g Gen) gen_str_for_interface(info ast.Interface, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_interface: $info.types | $styp | $str_fn_name')
+ eprintln('> gen_str_for_interface: ${info.types} | ${styp} | ${str_fn_name}')
}
// _str() functions should have a single argument, the indenting ones take 2:
- g.definitions.writeln('static string ${str_fn_name}($styp x); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp x) { return indent_${str_fn_name}(x, 0); }')
- g.definitions.writeln('static string indent_${str_fn_name}($styp x, int indent_count); // auto')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} x); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} x) { return indent_${str_fn_name}(x, 0); }')
+ g.definitions.writeln('static string indent_${str_fn_name}(${styp} x, int indent_count); // auto')
mut fn_builder := strings.new_builder(512)
mut clean_interface_v_type_name := styp.replace('__', '.')
if styp.ends_with('*') {
@@ -358,41 +358,41 @@ fn (mut g Gen) gen_str_for_interface(info ast.Interface, styp string, str_fn_nam
'>'
}
clean_interface_v_type_name = util.strip_main_name(clean_interface_v_type_name)
- fn_builder.writeln('static string indent_${str_fn_name}($styp x, int indent_count) { /* gen_str_for_interface */')
+ fn_builder.writeln('static string indent_${str_fn_name}(${styp} x, int indent_count) { /* gen_str_for_interface */')
for typ in info.types {
sub_sym := g.table.sym(ast.mktyp(typ))
mut func_name := g.get_str_fn(typ)
sym_has_str_method, str_method_expects_ptr, _ := sub_sym.str_method_info()
if should_use_indent_func(sub_sym.kind) && !sym_has_str_method {
- func_name = 'indent_$func_name'
+ func_name = 'indent_${func_name}'
}
// str_intp
deref := if sym_has_str_method && str_method_expects_ptr { ' ' } else { '*' }
if typ == ast.string_type {
- mut val := '${func_name}(${deref}($sub_sym.cname*)x._$sub_sym.cname'
+ mut val := '${func_name}(${deref}(${sub_sym.cname}*)x._${sub_sym.cname}'
if should_use_indent_func(sub_sym.kind) && !sym_has_str_method {
val += ', indent_count'
}
val += ')'
res := 'str_intp(2, _MOV((StrIntpData[]){
- {_SLIT("${clean_interface_v_type_name}(\'"), $c.si_s_code, {.d_s = $val}},
+ {_SLIT("${clean_interface_v_type_name}(\'"), ${c.si_s_code}, {.d_s = ${val}}},
{_SLIT("\')"), 0, {.d_c = 0 }}
}))'
fn_builder.write_string('\tif (x._typ == _${styp}_${sub_sym.cname}_index)')
- fn_builder.write_string(' return $res;')
+ fn_builder.write_string(' return ${res};')
} else {
- mut val := '${func_name}(${deref}($sub_sym.cname*)x._$sub_sym.cname'
+ mut val := '${func_name}(${deref}(${sub_sym.cname}*)x._${sub_sym.cname}'
if should_use_indent_func(sub_sym.kind) && !sym_has_str_method {
val += ', indent_count'
}
val += ')'
res := 'str_intp(2, _MOV((StrIntpData[]){
- {_SLIT("${clean_interface_v_type_name}("), $c.si_s_code, {.d_s = $val}},
+ {_SLIT("${clean_interface_v_type_name}("), ${c.si_s_code}, {.d_s = ${val}}},
{_SLIT(")"), 0, {.d_c = 0 }}
}))'
fn_builder.write_string('\tif (x._typ == _${styp}_${sub_sym.cname}_index)')
- fn_builder.write_string(' return $res;\n')
+ fn_builder.write_string(' return ${res};\n')
}
}
fn_builder.writeln('\treturn _SLIT("unknown interface value");')
@@ -402,14 +402,14 @@ fn (mut g Gen) gen_str_for_interface(info ast.Interface, styp string, str_fn_nam
fn (mut g Gen) gen_str_for_union_sum_type(info ast.SumType, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_union_sum_type: $info.variants | $styp | $str_fn_name')
+ eprintln('> gen_str_for_union_sum_type: ${info.variants} | ${styp} | ${str_fn_name}')
}
// _str() functions should have a single argument, the indenting ones take 2:
- g.definitions.writeln('static string ${str_fn_name}($styp x); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp x) { return indent_${str_fn_name}(x, 0); }')
- g.definitions.writeln('static string indent_${str_fn_name}($styp x, int indent_count); // auto')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} x); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} x) { return indent_${str_fn_name}(x, 0); }')
+ g.definitions.writeln('static string indent_${str_fn_name}(${styp} x, int indent_count); // auto')
mut fn_builder := strings.new_builder(512)
- fn_builder.writeln('static string indent_${str_fn_name}($styp x, int indent_count) {')
+ fn_builder.writeln('static string indent_${str_fn_name}(${styp} x, int indent_count) {')
mut clean_sum_type_v_type_name := ''
if info.is_anon {
variant_names := info.variants.map(util.strip_main_name(g.table.sym(it).name))
@@ -434,32 +434,32 @@ fn (mut g Gen) gen_str_for_union_sum_type(info ast.SumType, styp string, str_fn_
sym_has_str_method, str_method_expects_ptr, _ := sym.str_method_info()
deref := if sym_has_str_method && str_method_expects_ptr { ' ' } else { '*' }
if should_use_indent_func(sym.kind) && !sym_has_str_method {
- func_name = 'indent_$func_name'
+ func_name = 'indent_${func_name}'
}
// str_intp
if typ == ast.string_type {
- mut val := '${func_name}(${deref}($typ_str*)x._$sym.cname'
+ mut val := '${func_name}(${deref}(${typ_str}*)x._${sym.cname}'
if should_use_indent_func(sym.kind) && !sym_has_str_method {
val += ', indent_count'
}
val += ')'
res := 'str_intp(2, _MOV((StrIntpData[]){
- {_SLIT("${clean_sum_type_v_type_name}(\'"), $c.si_s_code, {.d_s = $val}},
+ {_SLIT("${clean_sum_type_v_type_name}(\'"), ${c.si_s_code}, {.d_s = ${val}}},
{_SLIT("\')"), 0, {.d_c = 0 }}
}))'
- fn_builder.write_string('\t\tcase $typ.idx(): return $res;\n')
+ fn_builder.write_string('\t\tcase ${typ.idx()}: return ${res};\n')
} else {
- mut val := '${func_name}(${deref}($typ_str*)x._$sym.cname'
+ mut val := '${func_name}(${deref}(${typ_str}*)x._${sym.cname}'
if should_use_indent_func(sym.kind) && !sym_has_str_method {
val += ', indent_count'
}
val += ')'
res := 'str_intp(2, _MOV((StrIntpData[]){
- {_SLIT("${clean_sum_type_v_type_name}("), $c.si_s_code, {.d_s = $val}},
+ {_SLIT("${clean_sum_type_v_type_name}("), ${c.si_s_code}, {.d_s = ${val}}},
{_SLIT(")"), 0, {.d_c = 0 }}
}))'
- fn_builder.write_string('\t\tcase $typ.idx(): return $res;\n')
+ fn_builder.write_string('\t\tcase ${typ.idx()}: return ${res};\n')
}
}
fn_builder.writeln('\t\tdefault: return _SLIT("unknown sum type value");')
@@ -487,9 +487,9 @@ fn (mut g Gen) fn_decl_str(info ast.FnType) string {
} else if info.func.return_type != ast.void_type {
x := util.strip_main_name(g.table.get_type_name(g.unwrap_generic(info.func.return_type)))
if info.func.return_type.has_flag(.optional) {
- fn_str += ' ?$x'
+ fn_str += ' ?${x}'
} else {
- fn_str += ' $x'
+ fn_str += ' ${x}'
}
}
return fn_str
@@ -497,7 +497,7 @@ fn (mut g Gen) fn_decl_str(info ast.FnType) string {
fn (mut g Gen) gen_str_for_fn_type(info ast.FnType, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_fn_type: $info.func.name | $styp | $str_fn_name')
+ eprintln('> gen_str_for_fn_type: ${info.func.name} | ${styp} | ${str_fn_name}')
}
g.definitions.writeln('static string ${str_fn_name}(); // auto')
g.auto_str_funcs.writeln('static string ${str_fn_name}() { return _SLIT("${g.fn_decl_str(info)}");}')
@@ -505,20 +505,20 @@ fn (mut g Gen) gen_str_for_fn_type(info ast.FnType, styp string, str_fn_name str
fn (mut g Gen) gen_str_for_chan(info ast.Chan, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_chan: $info.elem_type.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_chan: ${info.elem_type.debug()} | ${styp} | ${str_fn_name}')
}
elem_type_name := util.strip_main_name(g.table.get_type_name(g.unwrap_generic(info.elem_type)))
- g.definitions.writeln('static string ${str_fn_name}($styp x); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp x) { return sync__Channel_auto_str(x, _SLIT("$elem_type_name")); }')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} x); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} x) { return sync__Channel_auto_str(x, _SLIT("${elem_type_name}")); }')
}
fn (mut g Gen) gen_str_for_thread(info ast.Thread, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_thread: $info.return_type.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_thread: ${info.return_type.debug()} | ${styp} | ${str_fn_name}')
}
ret_type_name := util.strip_main_name(g.table.get_type_name(info.return_type))
- g.definitions.writeln('static string ${str_fn_name}($styp _); // auto}')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp _) { return _SLIT("thread($ret_type_name)");}')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} _); // auto}')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} _) { return _SLIT("thread(${ret_type_name})");}')
}
[inline]
@@ -540,7 +540,7 @@ fn deref_kind(str_method_expects_ptr bool, is_elem_ptr bool, typ ast.Type) (stri
fn (mut g Gen) gen_str_for_array(info ast.Array, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_array: $info.elem_type.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_array: ${info.elem_type.debug()} | ${styp} | ${str_fn_name}')
}
mut typ := info.elem_type
mut sym := g.table.sym(info.elem_type)
@@ -553,10 +553,10 @@ fn (mut g Gen) gen_str_for_array(info ast.Array, styp string, str_fn_name string
sym_has_str_method, str_method_expects_ptr, _ := sym.str_method_info()
elem_str_fn_name := g.get_str_fn(typ)
- g.definitions.writeln('static string ${str_fn_name}($styp a); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp a) { return indent_${str_fn_name}(a, 0);}')
- g.definitions.writeln('static string indent_${str_fn_name}($styp a, int indent_count); // auto')
- g.auto_str_funcs.writeln('static string indent_${str_fn_name}($styp a, int indent_count) {')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} a); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} a) { return indent_${str_fn_name}(a, 0);}')
+ g.definitions.writeln('static string indent_${str_fn_name}(${styp} a, int indent_count); // auto')
+ g.auto_str_funcs.writeln('static string indent_${str_fn_name}(${styp} a, int indent_count) {')
g.auto_str_funcs.writeln('\tstrings__Builder sb = strings__new_builder(a.len * 10);')
g.auto_str_funcs.writeln('\tstrings__Builder_write_string(&sb, _SLIT("["));')
g.auto_str_funcs.writeln('\tfor (int i = 0; i < a.len; ++i) {')
@@ -564,10 +564,10 @@ fn (mut g Gen) gen_str_for_array(info ast.Array, styp string, str_fn_name string
g.auto_str_funcs.writeln('\t\tstring x = ${elem_str_fn_name}();')
} else {
if sym.kind == .array_fixed {
- g.auto_str_funcs.writeln('\t\t$field_styp it;')
- g.auto_str_funcs.writeln('\t\tmemcpy(*($field_styp*)it, (byte*)array_get(a, i), sizeof($field_styp));')
+ g.auto_str_funcs.writeln('\t\t${field_styp} it;')
+ g.auto_str_funcs.writeln('\t\tmemcpy(*(${field_styp}*)it, (byte*)array_get(a, i), sizeof(${field_styp}));')
} else {
- g.auto_str_funcs.writeln('\t\t$field_styp it = *($field_styp*)array_get(a, i);')
+ g.auto_str_funcs.writeln('\t\t${field_styp} it = *(${field_styp}*)array_get(a, i);')
}
if should_use_indent_func(sym.kind) && !sym_has_str_method {
if is_elem_ptr {
@@ -583,12 +583,12 @@ fn (mut g Gen) gen_str_for_array(info ast.Array, styp string, str_fn_name string
}
} else if sym.kind == .rune {
// Rune are managed at this level as strings
- g.auto_str_funcs.writeln('\t\tstring x = str_intp(2, _MOV((StrIntpData[]){{_SLIT("\`"), $c.si_s_code, {.d_s = ${elem_str_fn_name}(it) }}, {_SLIT("\`"), 0, {.d_c = 0 }}}));\n')
+ g.auto_str_funcs.writeln('\t\tstring x = str_intp(2, _MOV((StrIntpData[]){{_SLIT("\`"), ${c.si_s_code}, {.d_s = ${elem_str_fn_name}(it) }}, {_SLIT("\`"), 0, {.d_c = 0 }}}));\n')
} else if sym.kind == .string {
if is_elem_ptr {
- g.auto_str_funcs.writeln('\t\tstring x = str_intp(2, _MOV((StrIntpData[]){{_SLIT("&\'"), $c.si_s_code, {.d_s = *it }}, {_SLIT("\'"), 0, {.d_c = 0 }}}));\n')
+ g.auto_str_funcs.writeln('\t\tstring x = str_intp(2, _MOV((StrIntpData[]){{_SLIT("&\'"), ${c.si_s_code}, {.d_s = *it }}, {_SLIT("\'"), 0, {.d_c = 0 }}}));\n')
} else {
- g.auto_str_funcs.writeln('\t\tstring x = str_intp(2, _MOV((StrIntpData[]){{_SLIT("\'"), $c.si_s_code, {.d_s = it }}, {_SLIT("\'"), 0, {.d_c = 0 }}}));\n')
+ g.auto_str_funcs.writeln('\t\tstring x = str_intp(2, _MOV((StrIntpData[]){{_SLIT("\'"), ${c.si_s_code}, {.d_s = it }}, {_SLIT("\'"), 0, {.d_c = 0 }}}));\n')
}
} else {
// There is a custom .str() method, so use it.
@@ -598,11 +598,11 @@ fn (mut g Gen) gen_str_for_array(info ast.Array, styp string, str_fn_name string
if is_elem_ptr {
g.auto_str_funcs.writeln('\t\tstring x = _SLIT("nil");')
g.auto_str_funcs.writeln('\t\tif (it != 0) {')
- g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write_string(&sb, _SLIT("$deref_label"));')
+ g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write_string(&sb, _SLIT("${deref_label}"));')
g.auto_str_funcs.writeln('\t\t\tx = ${elem_str_fn_name}(${deref}it);')
g.auto_str_funcs.writeln('\t\t}')
} else {
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, _SLIT("$deref_label"));')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, _SLIT("${deref_label}"));')
g.auto_str_funcs.writeln('\t\tstring x = ${elem_str_fn_name}(${deref}it);')
}
}
@@ -625,7 +625,7 @@ fn (mut g Gen) gen_str_for_array(info ast.Array, styp string, str_fn_name string
fn (mut g Gen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_array_fixed: $info.elem_type.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_array_fixed: ${info.elem_type.debug()} | ${styp} | ${str_fn_name}')
}
mut typ := info.elem_type
mut sym := g.table.sym(info.elem_type)
@@ -637,13 +637,13 @@ fn (mut g Gen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_fn_
sym_has_str_method, str_method_expects_ptr, _ := sym.str_method_info()
elem_str_fn_name := g.get_str_fn(typ)
- g.definitions.writeln('static string ${str_fn_name}($styp a); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp a) { return indent_${str_fn_name}(a, 0);}')
- g.definitions.writeln('static string indent_${str_fn_name}($styp a, int indent_count); // auto')
- g.auto_str_funcs.writeln('static string indent_${str_fn_name}($styp a, int indent_count) {')
- g.auto_str_funcs.writeln('\tstrings__Builder sb = strings__new_builder($info.size * 10);')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} a); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} a) { return indent_${str_fn_name}(a, 0);}')
+ g.definitions.writeln('static string indent_${str_fn_name}(${styp} a, int indent_count); // auto')
+ g.auto_str_funcs.writeln('static string indent_${str_fn_name}(${styp} a, int indent_count) {')
+ g.auto_str_funcs.writeln('\tstrings__Builder sb = strings__new_builder(${info.size} * 10);')
g.auto_str_funcs.writeln('\tstrings__Builder_write_string(&sb, _SLIT("["));')
- g.auto_str_funcs.writeln('\tfor (int i = 0; i < $info.size; ++i) {')
+ g.auto_str_funcs.writeln('\tfor (int i = 0; i < ${info.size}; ++i) {')
if sym.kind == .function {
g.auto_str_funcs.writeln('\t\tstring x = ${elem_str_fn_name}();')
g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, x);')
@@ -651,14 +651,14 @@ fn (mut g Gen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_fn_
deref, deref_label := deref_kind(str_method_expects_ptr, is_elem_ptr, typ)
if should_use_indent_func(sym.kind) && !sym_has_str_method {
if is_elem_ptr {
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, _SLIT("$deref_label"));')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, _SLIT("${deref_label}"));')
g.auto_str_funcs.writeln('\t\tif ( 0 == a[i] ) {')
g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write_string(&sb, _SLIT("0"));')
g.auto_str_funcs.writeln('\t\t}else{')
- g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}( $deref a[i]) );')
+ g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}( ${deref} a[i]) );')
g.auto_str_funcs.writeln('\t\t}')
} else {
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}( $deref a[i]) );')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}( ${deref} a[i]) );')
}
} else if sym.kind in [.f32, .f64] {
if sym.kind == .f32 {
@@ -669,10 +669,10 @@ fn (mut g Gen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_fn_
} else if sym.kind == .string {
g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${str_intp_sq('a[i]')});')
} else if sym.kind == .rune {
- tmp_str := str_intp_rune('${elem_str_fn_name}( $deref a[i])')
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, $tmp_str);')
+ tmp_str := str_intp_rune('${elem_str_fn_name}( ${deref} a[i])')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${tmp_str});')
} else {
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}( $deref a[i]));')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}( ${deref} a[i]));')
}
}
g.auto_str_funcs.writeln('\t\tif (i < ${info.size - 1}) {')
@@ -688,7 +688,7 @@ fn (mut g Gen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_fn_
fn (mut g Gen) gen_str_for_map(info ast.Map, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_map: $info.key_type.debug() -> $info.value_type.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_map: ${info.key_type.debug()} -> ${info.value_type.debug()} | ${styp} | ${str_fn_name}')
}
mut key_typ := info.key_type
mut key_sym := g.table.sym(key_typ)
@@ -714,10 +714,10 @@ fn (mut g Gen) gen_str_for_map(info ast.Map, styp string, str_fn_name string) {
g.get_str_fn(val_typ)
}
- g.definitions.writeln('static string ${str_fn_name}($styp m); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp m) { return indent_${str_fn_name}(m, 0);}')
- g.definitions.writeln('static string indent_${str_fn_name}($styp m, int indent_count); // auto')
- g.auto_str_funcs.writeln('static string indent_${str_fn_name}($styp m, int indent_count) { /* gen_str_for_map */')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} m); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} m) { return indent_${str_fn_name}(m, 0);}')
+ g.definitions.writeln('static string indent_${str_fn_name}(${styp} m, int indent_count); // auto')
+ g.auto_str_funcs.writeln('static string indent_${str_fn_name}(${styp} m, int indent_count) { /* gen_str_for_map */')
g.auto_str_funcs.writeln('\tstrings__Builder sb = strings__new_builder(m.key_values.len*10);')
g.auto_str_funcs.writeln('\tstrings__Builder_write_string(&sb, _SLIT("{"));')
g.auto_str_funcs.writeln('\tfor (int i = 0; i < m.key_values.len; ++i) {')
@@ -726,13 +726,13 @@ fn (mut g Gen) gen_str_for_map(info ast.Map, styp string, str_fn_name string) {
if key_sym.kind == .string {
g.auto_str_funcs.writeln('\t\tstring key = *(string*)DenseArray_key(&m.key_values, i);')
} else {
- g.auto_str_funcs.writeln('\t\t$key_styp key = *($key_styp*)DenseArray_key(&m.key_values, i);')
+ g.auto_str_funcs.writeln('\t\t${key_styp} key = *(${key_styp}*)DenseArray_key(&m.key_values, i);')
}
if key_sym.kind == .string {
g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${str_intp_sq('key')});')
} else if key_sym.kind == .rune {
tmp_str := str_intp_rune('${key_str_fn_name}(key)')
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, $tmp_str);')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${tmp_str});')
} else {
g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${key_str_fn_name}(key));')
}
@@ -740,23 +740,23 @@ fn (mut g Gen) gen_str_for_map(info ast.Map, styp string, str_fn_name string) {
if val_sym.kind == .function {
g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}());')
} else if val_sym.kind == .string {
- tmp_str := str_intp_sq('*($val_styp*)DenseArray_value(&m.key_values, i)')
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, $tmp_str);')
+ tmp_str := str_intp_sq('*(${val_styp}*)DenseArray_value(&m.key_values, i)')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${tmp_str});')
} else if should_use_indent_func(val_sym.kind) && !val_sym.has_method('str') {
ptr_str := '*'.repeat(val_typ.nr_muls())
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, indent_${elem_str_fn_name}(*${ptr_str}($val_styp*)DenseArray_value(&m.key_values, i), indent_count));')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, indent_${elem_str_fn_name}(*${ptr_str}(${val_styp}*)DenseArray_value(&m.key_values, i), indent_count));')
} else if val_sym.kind in [.f32, .f64] {
- tmp_val := '*($val_styp*)DenseArray_value(&m.key_values, i)'
+ tmp_val := '*(${val_styp}*)DenseArray_value(&m.key_values, i)'
if val_sym.kind == .f32 {
g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${str_intp_g32(tmp_val)});')
} else {
g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${str_intp_g64(tmp_val)});')
}
} else if val_sym.kind == .rune {
- tmp_str := str_intp_rune('${elem_str_fn_name}(*($val_styp*)DenseArray_value(&m.key_values, i))')
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, $tmp_str);')
+ tmp_str := str_intp_rune('${elem_str_fn_name}(*(${val_styp}*)DenseArray_value(&m.key_values, i))')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${tmp_str});')
} else {
- g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}(*($val_styp*)DenseArray_value(&m.key_values, i)));')
+ g.auto_str_funcs.writeln('\t\tstrings__Builder_write_string(&sb, ${elem_str_fn_name}(*(${val_styp}*)DenseArray_value(&m.key_values, i)));')
}
g.auto_str_funcs.writeln('\t\tif (i != m.key_values.len-1) {')
g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write_string(&sb, _SLIT(", "));')
@@ -815,17 +815,17 @@ fn (g &Gen) type_to_fmt(typ ast.Type) StrIntpType {
fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name string) {
$if trace_autostr ? {
- eprintln('> gen_str_for_struct: $info.parent_type.debug() | $styp | $str_fn_name')
+ eprintln('> gen_str_for_struct: ${info.parent_type.debug()} | ${styp} | ${str_fn_name}')
}
// _str() functions should have a single argument, the indenting ones take 2:
- g.definitions.writeln('static string ${str_fn_name}($styp it); // auto')
- g.auto_str_funcs.writeln('static string ${str_fn_name}($styp it) { return indent_${str_fn_name}(it, 0);}')
- g.definitions.writeln('static string indent_${str_fn_name}($styp it, int indent_count); // auto')
+ g.definitions.writeln('static string ${str_fn_name}(${styp} it); // auto')
+ g.auto_str_funcs.writeln('static string ${str_fn_name}(${styp} it) { return indent_${str_fn_name}(it, 0);}')
+ g.definitions.writeln('static string indent_${str_fn_name}(${styp} it, int indent_count); // auto')
mut fn_builder := strings.new_builder(512)
defer {
g.auto_fn_definitions << fn_builder.str()
}
- fn_builder.writeln('static string indent_${str_fn_name}($styp it, int indent_count) {')
+ fn_builder.writeln('static string indent_${str_fn_name}(${styp} it, int indent_count) {')
mut clean_struct_v_type_name := styp.replace('__', '.')
if clean_struct_v_type_name.contains('_T_') {
// TODO: this is a bit hacky. styp shouldn't be even parsed with _T_
@@ -837,7 +837,7 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
clean_struct_v_type_name = util.strip_main_name(clean_struct_v_type_name)
// generate ident / indent length = 4 spaces
if info.fields.len == 0 {
- fn_builder.writeln('\treturn _SLIT("$clean_struct_v_type_name{}");')
+ fn_builder.writeln('\treturn _SLIT("${clean_struct_v_type_name}{}");')
fn_builder.writeln('}')
return
}
@@ -864,7 +864,7 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
}
}
fn_body.writeln('\tstring res = str_intp( ${(info.fields.len - field_skips.len) * 4 + 3}, _MOV((StrIntpData[]){')
- fn_body.writeln('\t\t{_SLIT("$clean_struct_v_type_name{\\n"), 0, {.d_c=0}},')
+ fn_body.writeln('\t\t{_SLIT("${clean_struct_v_type_name}{\\n"), 0, {.d_c=0}},')
mut is_first := true
for i, field in info.fields {
// Skip `str:skip` fields
@@ -896,10 +896,10 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
if is_first {
// first field doesn't need \n
- fn_body.write_string('\t\t{_SLIT0, $c.si_s_code, {.d_s=indents}}, {_SLIT(" $field.name: $ptr_amp$prefix"), 0, {.d_c=0}}, ')
+ fn_body.write_string('\t\t{_SLIT0, ${c.si_s_code}, {.d_s=indents}}, {_SLIT(" ${field.name}: ${ptr_amp}${prefix}"), 0, {.d_c=0}}, ')
is_first = false
} else {
- fn_body.write_string('\t\t{_SLIT("\\n"), $c.si_s_code, {.d_s=indents}}, {_SLIT(" $field.name: $ptr_amp$prefix"), 0, {.d_c=0}}, ')
+ fn_body.write_string('\t\t{_SLIT("\\n"), ${c.si_s_code}, {.d_s=indents}}, {_SLIT(" ${field.name}: ${ptr_amp}${prefix}"), 0, {.d_c=0}}, ')
}
// custom methods management
@@ -918,10 +918,10 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
// with floats we use always the g representation:
if sym.kind !in [.f32, .f64] {
- fn_body.write_string('{_SLIT("$quote_str"), ${int(base_fmt)}, {.${data_str(base_fmt)}=')
+ fn_body.write_string('{_SLIT("${quote_str}"), ${int(base_fmt)}, {.${data_str(base_fmt)}=')
} else {
g_fmt := '0x' + (u32(base_fmt) | u32(0x7F) << 9).hex()
- fn_body.write_string('{_SLIT("$quote_str"), $g_fmt, {.${data_str(base_fmt)}=')
+ fn_body.write_string('{_SLIT("${quote_str}"), ${g_fmt}, {.${data_str(base_fmt)}=')
}
mut funcprefix := ''
@@ -929,7 +929,7 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
field.name, sym_has_str_method, str_method_expects_ptr)
ftyp_nr_muls := field.typ.nr_muls()
if ftyp_nr_muls > 1 || field.typ in ast.cptr_types {
- func = '(voidptr) it.$field.name'
+ func = '(voidptr) it.${field.name}'
caller_should_free = false
} else if ftyp_noshared.is_ptr() {
// reference types can be "nil"
@@ -947,7 +947,7 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
} else {
// manage C charptr
if field.typ in ast.charptr_types {
- fn_body.write_string('tos4((byteptr)$func)')
+ fn_body.write_string('tos4((byteptr)${func})')
} else {
if field.typ.is_ptr() && sym.kind == .struct_ {
funcprefix += '(indent_count > 25)? _SLIT("") : '
@@ -955,7 +955,8 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
// eprintln('>>> caller_should_free: ${caller_should_free:6s} | funcprefix: $funcprefix | func: $func')
if caller_should_free {
tmpvar := g.new_tmp_var()
- fn_body_surrounder.add('\tstring $tmpvar = $funcprefix$func;', '\tstring_free(&$tmpvar);')
+ fn_body_surrounder.add('\tstring ${tmpvar} = ${funcprefix}${func};',
+ '\tstring_free(&${tmpvar});')
fn_body.write_string(tmpvar)
} else {
fn_body.write_string(funcprefix)
@@ -964,15 +965,15 @@ fn (mut g Gen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name stri
}
}
- fn_body.writeln('}}, {_SLIT("$quote_str"), 0, {.d_c=0}},')
+ fn_body.writeln('}}, {_SLIT("${quote_str}"), 0, {.d_c=0}},')
}
- fn_body.writeln('\t\t{_SLIT("\\n"), $c.si_s_code, {.d_s=indents}}, {_SLIT("}"), 0, {.d_c=0}},')
+ fn_body.writeln('\t\t{_SLIT("\\n"), ${c.si_s_code}, {.d_s=indents}}, {_SLIT("}"), 0, {.d_c=0}},')
fn_body.writeln('\t}));')
}
fn struct_auto_str_func(sym &ast.TypeSymbol, _field_type ast.Type, fn_name string, field_name string, has_custom_str bool, expects_ptr bool) (string, bool) {
$if trace_autostr ? {
- eprintln('> struct_auto_str_func: $sym.name | field_type.debug() | $fn_name | $field_name | $has_custom_str | $expects_ptr')
+ eprintln('> struct_auto_str_func: ${sym.name} | field_type.debug() | ${fn_name} | ${field_name} | ${has_custom_str} | ${expects_ptr}')
}
field_type := if _field_type.has_flag(.shared_f) { _field_type.deref() } else { _field_type }
sufix := if field_type.has_flag(.shared_f) { '->val' } else { '' }
@@ -980,42 +981,42 @@ fn struct_auto_str_func(sym &ast.TypeSymbol, _field_type ast.Type, fn_name strin
if sym.kind == .enum_ {
return '${fn_name}(${deref}(it.${c_name(field_name)}))', true
} else if should_use_indent_func(sym.kind) {
- obj := '${deref}it.${c_name(field_name)}$sufix'
+ obj := '${deref}it.${c_name(field_name)}${sufix}'
if has_custom_str {
- return '${fn_name}($obj)', true
+ return '${fn_name}(${obj})', true
}
- return 'indent_${fn_name}($obj, indent_count + 1)', true
+ return 'indent_${fn_name}(${obj}, indent_count + 1)', true
} else if sym.kind in [.array, .array_fixed, .map, .sum_type] {
- obj := '${deref}it.${c_name(field_name)}$sufix'
+ obj := '${deref}it.${c_name(field_name)}${sufix}'
if has_custom_str {
- return '${fn_name}($obj)', true
+ return '${fn_name}(${obj})', true
}
- return 'indent_${fn_name}($obj, indent_count + 1)', true
+ return 'indent_${fn_name}(${obj}, indent_count + 1)', true
} else if sym.kind == .function {
return '${fn_name}()', true
} else if sym.kind == .chan {
- return '${fn_name}(${deref}it.${c_name(field_name)}$sufix)', true
+ return '${fn_name}(${deref}it.${c_name(field_name)}${sufix})', true
} else {
mut method_str := 'it.${c_name(field_name)}'
if sym.kind == .bool {
- return '$method_str ? _SLIT("true") : _SLIT("false")', false
+ return '${method_str} ? _SLIT("true") : _SLIT("false")', false
} else if (field_type.is_int_valptr() || field_type.is_float_valptr())
&& field_type.is_ptr() && !expects_ptr {
// ptr int can be "nil", so this needs to be casted to a string
if sym.kind == .f32 {
return 'str_intp(1, _MOV((StrIntpData[]){
- {_SLIT0, $si_g32_code, {.d_f32 = *$method_str }}
+ {_SLIT0, ${si_g32_code}, {.d_f32 = *${method_str} }}
}))', true
} else if sym.kind == .f64 {
return 'str_intp(1, _MOV((StrIntpData[]){
- {_SLIT0, $si_g64_code, {.d_f64 = *$method_str }}
+ {_SLIT0, ${si_g64_code}, {.d_f64 = *${method_str} }}
}))', true
} else if sym.kind == .u64 {
fmt_type := StrIntpType.si_u64
- return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_u64 = *$method_str }}}))', true
+ return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_u64 = *${method_str} }}}))', true
}
fmt_type := StrIntpType.si_i32
- return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_i32 = *$method_str }}}))', true
+ return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_i32 = *${method_str} }}}))', true
}
return method_str, false
}
diff --git a/vlib/v/gen/c/cgen.v b/vlib/v/gen/c/cgen.v
index 69f78a39c5..4c47165b23 100644
--- a/vlib/v/gen/c/cgen.v
+++ b/vlib/v/gen/c/cgen.v
@@ -456,7 +456,7 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) (string,
if idx in [0, 30] {
continue
}
- g.definitions.writeln('int _v_type_idx_${sym.cname}() { return $idx; };')
+ g.definitions.writeln('int _v_type_idx_${sym.cname}() { return ${idx}; };')
}
}
//
@@ -590,7 +590,7 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) (string,
$if trace_all_generic_fn_keys ? {
gkeys := g.table.fn_generic_types.keys()
for gkey in gkeys {
- eprintln('>> g.table.fn_generic_types key: $gkey')
+ eprintln('>> g.table.fn_generic_types key: ${gkey}')
}
}
out_fn_start_pos := g.out_fn_start_pos.clone()
@@ -637,7 +637,7 @@ fn cgen_process_one_file_cb(mut p pool.PoolProcessor, idx int, wid int) &Gen {
module_built: global_g.module_built
timers: util.new_timers(
should_print: global_g.timers_should_print
- label: 'cgen_process_one_file_cb idx: $idx, wid: $wid'
+ label: 'cgen_process_one_file_cb idx: ${idx}, wid: ${wid}'
)
inner_loop: &ast.empty_stmt
field_data_type: ast.Type(global_g.table.find_type_idx('FieldData'))
@@ -695,7 +695,7 @@ pub fn (mut g Gen) free_builders() {
}
pub fn (mut g Gen) gen_file() {
- g.timers.start('cgen_file $g.file.path')
+ g.timers.start('cgen_file ${g.file.path}')
g.unique_file_path_hash = fnv1a.sum64_string(g.file.path)
if g.pref.is_vlines {
g.vlines_path = util.vlines_escape_path(g.file.path, g.pref.ccompiler)
@@ -710,7 +710,7 @@ pub fn (mut g Gen) gen_file() {
g.embedded_files << path
}
}
- g.timers.show('cgen_file $g.file.path')
+ g.timers.show('cgen_file ${g.file.path}')
}
pub fn (g &Gen) hashes() string {
@@ -780,7 +780,7 @@ pub fn (mut g Gen) init() {
g.comptime_definitions.writeln('// Turned ON custom defines: ' +
g.pref.compile_defines.join(','))
for cdefine in g.pref.compile_defines {
- g.comptime_definitions.writeln('#define CUSTOM_DEFINE_$cdefine')
+ g.comptime_definitions.writeln('#define CUSTOM_DEFINE_${cdefine}')
}
g.comptime_definitions.writeln('')
}
@@ -828,7 +828,7 @@ pub fn (mut g Gen) init() {
if f.mod != 'main' && key != 'main' { // !key.starts_with('main.') {
continue
}
- g.obf_table[key] = '_f$i'
+ g.obf_table[key] = '_f${i}'
i++
}
// methods
@@ -837,7 +837,7 @@ pub fn (mut g Gen) init() {
continue
}
for method in type_sym.methods {
- g.obf_table[type_sym.name + '.' + method.name] = '_f$i'
+ g.obf_table[type_sym.name + '.' + method.name] = '_f${i}'
i++
}
}
@@ -878,7 +878,7 @@ pub fn (mut g Gen) write_typeof_functions() {
if sum_info.is_generic {
continue
}
- g.writeln('${static_prefix}char * v_typeof_sumtype_${sym.cname}(int sidx) { /* $sym.name */ ')
+ g.writeln('${static_prefix}char * v_typeof_sumtype_${sym.cname}(int sidx) { /* ${sym.name} */ ')
// g.writeln('static char * v_typeof_sumtype_${sym.cname}(int sidx) { /* $sym.name */ ')
if g.pref.build_mode == .build_module {
g.writeln('\t\tif( sidx == _v_type_idx_${sym.cname}() ) return "${util.strip_main_name(sym.name)}";')
@@ -890,10 +890,10 @@ pub fn (mut g Gen) write_typeof_functions() {
} else {
tidx := g.table.find_type_idx(sym.name)
g.writeln('\tswitch(sidx) {')
- g.writeln('\t\tcase $tidx: return "${util.strip_main_name(sym.name)}";')
+ g.writeln('\t\tcase ${tidx}: return "${util.strip_main_name(sym.name)}";')
for v in sum_info.variants {
subtype := g.table.sym(v)
- g.writeln('\t\tcase $v.idx(): return "${util.strip_main_name(subtype.name)}";')
+ g.writeln('\t\tcase ${v.idx()}: return "${util.strip_main_name(subtype.name)}";')
}
g.writeln('\t\tdefault: return "unknown ${util.strip_main_name(sym.name)}";')
g.writeln('\t}')
@@ -901,7 +901,7 @@ pub fn (mut g Gen) write_typeof_functions() {
g.writeln('}')
g.writeln('')
// g.writeln('static int v_typeof_sumtype_idx_${sym.cname}(int sidx) { /* $sym.name */ ')
- g.writeln('${static_prefix}int v_typeof_sumtype_idx_${sym.cname}(int sidx) { /* $sym.name */ ')
+ g.writeln('${static_prefix}int v_typeof_sumtype_idx_${sym.cname}(int sidx) { /* ${sym.name} */ ')
if g.pref.build_mode == .build_module {
g.writeln('\t\tif( sidx == _v_type_idx_${sym.cname}() ) return ${int(ityp)};')
for v in sum_info.variants {
@@ -912,9 +912,9 @@ pub fn (mut g Gen) write_typeof_functions() {
} else {
tidx := g.table.find_type_idx(sym.name)
g.writeln('\tswitch(sidx) {')
- g.writeln('\t\tcase $tidx: return ${int(ityp)};')
+ g.writeln('\t\tcase ${tidx}: return ${int(ityp)};')
for v in sum_info.variants {
- g.writeln('\t\tcase $v.idx(): return ${int(v)};')
+ g.writeln('\t\tcase ${v.idx()}: return ${int(v)};')
}
g.writeln('\t\tdefault: return ${int(ityp)};')
g.writeln('\t}')
@@ -929,7 +929,7 @@ pub fn (mut g Gen) write_typeof_functions() {
continue
}
g.definitions.writeln('static char * v_typeof_interface_${sym.cname}(int sidx);')
- g.writeln('static char * v_typeof_interface_${sym.cname}(int sidx) { /* $sym.name */ ')
+ g.writeln('static char * v_typeof_interface_${sym.cname}(int sidx) { /* ${sym.name} */ ')
for t in inter_info.types {
sub_sym := g.table.sym(ast.mktyp(t))
g.writeln('\tif (sidx == _${sym.cname}_${sub_sym.cname}_index) return "${util.strip_main_name(sub_sym.name)}";')
@@ -937,7 +937,7 @@ pub fn (mut g Gen) write_typeof_functions() {
g.writeln('\treturn "unknown ${util.strip_main_name(sym.name)}";')
g.writeln('}')
g.writeln('')
- g.writeln('static int v_typeof_interface_idx_${sym.cname}(int sidx) { /* $sym.name */ ')
+ g.writeln('static int v_typeof_interface_idx_${sym.cname}(int sidx) { /* ${sym.name} */ ')
for t in inter_info.types {
sub_sym := g.table.sym(ast.mktyp(t))
g.writeln('\tif (sidx == _${sym.cname}_${sub_sym.cname}_index) return ${int(t)};')
@@ -1040,7 +1040,7 @@ fn (mut g Gen) optional_type_name(t ast.Type) (string, string) {
if sym.language == .c && sym.kind == .struct_ {
styp = '${c.option_name}_${base.replace(' ', '_')}'
} else {
- styp = '${c.option_name}_$base'
+ styp = '${c.option_name}_${base}'
}
if t.is_ptr() {
styp = styp.replace('*', '_ptr')
@@ -1055,7 +1055,7 @@ fn (mut g Gen) result_type_name(t ast.Type) (string, string) {
if sym.language == .c && sym.kind == .struct_ {
styp = '${c.result_name}_${base.replace(' ', '_')}'
} else {
- styp = '${c.result_name}_$base'
+ styp = '${c.result_name}_${base}'
}
if t.is_ptr() {
styp = styp.replace('*', '_ptr')
@@ -1070,12 +1070,12 @@ fn (g Gen) optional_type_text(styp string, base string) string {
} else if base.starts_with('anon_fn') {
'void*'
} else {
- if base.starts_with('struct ') && !base.ends_with('*') { '$base*' } else { base }
+ if base.starts_with('struct ') && !base.ends_with('*') { '${base}*' } else { base }
}
- ret := 'struct $styp {
+ ret := 'struct ${styp} {
byte state;
IError err;
- byte data[sizeof($size) > 1 ? sizeof($size) : 1];
+ byte data[sizeof(${size}) > 1 ? sizeof(${size}) : 1];
}'
return ret
}
@@ -1087,12 +1087,12 @@ fn (g Gen) result_type_text(styp string, base string) string {
} else if base.starts_with('anon_fn') {
'void*'
} else {
- if base.starts_with('struct ') && !base.ends_with('*') { '$base*' } else { base }
+ if base.starts_with('struct ') && !base.ends_with('*') { '${base}*' } else { base }
}
- ret := 'struct $styp {
+ ret := 'struct ${styp} {
bool is_error;
IError err;
- byte data[sizeof($size) > 1 ? sizeof($size) : 1];
+ byte data[sizeof(${size}) > 1 ? sizeof(${size}) : 1];
}'
return ret
}
@@ -1119,7 +1119,7 @@ fn (mut g Gen) write_optionals() {
continue
}
done << base
- g.typedefs.writeln('typedef struct $styp $styp;')
+ g.typedefs.writeln('typedef struct ${styp} ${styp};')
if base in g.options_forward {
g.out_options_forward.write_string(g.optional_type_text(styp, base) + ';\n\n')
} else {
@@ -1135,7 +1135,7 @@ fn (mut g Gen) write_results() {
continue
}
done << base
- g.typedefs.writeln('typedef struct $styp $styp;')
+ g.typedefs.writeln('typedef struct ${styp} ${styp};')
if base in g.results_forward {
g.out_results_forward.write_string(g.result_type_text(styp, base) + ';\n\n')
} else {
@@ -1144,13 +1144,13 @@ fn (mut g Gen) write_results() {
}
for k, _ in g.table.anon_struct_names {
ck := c_name(k)
- g.typedefs.writeln('typedef struct $ck $ck;')
+ g.typedefs.writeln('typedef struct ${ck} ${ck};')
}
}
fn (mut g Gen) find_or_register_shared(t ast.Type, base string) string {
g.shareds[t.idx()] = base
- return '__shared__$base'
+ return '__shared__${base}'
}
fn (mut g Gen) write_shareds() {
@@ -1160,18 +1160,18 @@ fn (mut g Gen) write_shareds() {
continue
}
done_types << typ
- sh_typ := '__shared__$base'
+ sh_typ := '__shared__${base}'
mtx_typ := 'sync__RwMutex'
- g.shared_types.writeln('struct $sh_typ {')
- g.shared_types.writeln('\t$mtx_typ mtx;')
- g.shared_types.writeln('\t$base val;')
+ g.shared_types.writeln('struct ${sh_typ} {')
+ g.shared_types.writeln('\t${mtx_typ} mtx;')
+ g.shared_types.writeln('\t${base} val;')
g.shared_types.writeln('};')
g.shared_functions.writeln('static inline voidptr __dup${sh_typ}(voidptr src, int sz) {')
- g.shared_functions.writeln('\t$sh_typ* dest = memdup(src, sz);')
+ g.shared_functions.writeln('\t${sh_typ}* dest = memdup(src, sz);')
g.shared_functions.writeln('\tsync__RwMutex_init(&dest->mtx);')
g.shared_functions.writeln('\treturn dest;')
g.shared_functions.writeln('}')
- g.typedefs.writeln('typedef struct $sh_typ $sh_typ;')
+ g.typedefs.writeln('typedef struct ${sh_typ} ${sh_typ};')
}
}
@@ -1197,9 +1197,9 @@ fn (mut g Gen) register_thread_void_wait_call() {
fn (mut g Gen) register_thread_array_wait_call(eltyp string) string {
is_void := eltyp == 'void'
- thread_typ := if is_void { '__v_thread' } else { '__v_thread_$eltyp' }
- ret_typ := if is_void { 'void' } else { 'Array_$eltyp' }
- thread_arr_typ := 'Array_$thread_typ'
+ thread_typ := if is_void { '__v_thread' } else { '__v_thread_${eltyp}' }
+ ret_typ := if is_void { 'void' } else { 'Array_${eltyp}' }
+ thread_arr_typ := 'Array_${thread_typ}'
fn_name := '${thread_arr_typ}_wait'
mut should_register := false
lock g.waiter_fns {
@@ -1212,25 +1212,25 @@ fn (mut g Gen) register_thread_array_wait_call(eltyp string) string {
if is_void {
g.register_thread_void_wait_call()
g.gowrappers.writeln('
-void ${fn_name}($thread_arr_typ a) {
+void ${fn_name}(${thread_arr_typ} a) {
for (int i = 0; i < a.len; ++i) {
- $thread_typ t = (($thread_typ*)a.data)[i];
+ ${thread_typ} t = ((${thread_typ}*)a.data)[i];
if (t == 0) continue;
__v_thread_wait(t);
}
}')
} else {
g.gowrappers.writeln('
-$ret_typ ${fn_name}($thread_arr_typ a) {
- $ret_typ res = __new_array_with_default(a.len, a.len, sizeof($eltyp), 0);
+${ret_typ} ${fn_name}(${thread_arr_typ} a) {
+ ${ret_typ} res = __new_array_with_default(a.len, a.len, sizeof(${eltyp}), 0);
for (int i = 0; i < a.len; ++i) {
- $thread_typ t = (($thread_typ*)a.data)[i];')
+ ${thread_typ} t = ((${thread_typ}*)a.data)[i];')
if g.pref.os == .windows {
g.gowrappers.writeln('\t\tif (t.handle == 0) continue;')
} else {
g.gowrappers.writeln('\t\tif (t == 0) continue;')
}
- g.gowrappers.writeln('\t\t(($eltyp*)res.data)[i] = __v_thread_${eltyp}_wait(t);
+ g.gowrappers.writeln('\t\t((${eltyp}*)res.data)[i] = __v_thread_${eltyp}_wait(t);
}
return res;
}')
@@ -1251,10 +1251,10 @@ fn (mut g Gen) write_chan_pop_optional_fns() {
}
done << opt_el_type
g.channel_definitions.writeln('
-static inline $opt_el_type __Option_${styp}_popval($styp ch) {
- $opt_el_type _tmp = {0};
+static inline ${opt_el_type} __Option_${styp}_popval(${styp} ch) {
+ ${opt_el_type} _tmp = {0};
if (sync__Channel_try_pop_priv(ch, _tmp.data, false)) {
- return ($opt_el_type){ .state = 2, .err = _v_error(_SLIT("channel closed")), .data = {EMPTY_STRUCT_INITIALIZATION} };
+ return (${opt_el_type}){ .state = 2, .err = _v_error(_SLIT("channel closed")), .data = {EMPTY_STRUCT_INITIALIZATION} };
}
return _tmp;
}')
@@ -1274,7 +1274,7 @@ fn (mut g Gen) write_chan_push_optional_fns() {
done << styp
g.register_optional(ast.void_type.set_flag(.optional))
g.channel_definitions.writeln('
-static inline ${c.option_name}_void __Option_${styp}_pushval($styp ch, $el_type e) {
+static inline ${c.option_name}_void __Option_${styp}_pushval(${styp} ch, ${el_type} e) {
if (sync__Channel_try_push_priv(ch, &e, false)) {
return (${c.option_name}_void){ .state = 2, .err = _v_error(_SLIT("channel closed")), .data = {EMPTY_STRUCT_INITIALIZATION} };
}
@@ -1294,7 +1294,7 @@ fn (mut g Gen) cc_type(typ ast.Type, is_prefix_struct bool) string {
mut sgtyps := '_T'
for gt in sym.info.generic_types {
gts := g.table.sym(g.unwrap_generic(gt))
- sgtyps += '_$gts.cname'
+ sgtyps += '_${gts.cname}'
}
styp += sgtyps
}
@@ -1306,7 +1306,7 @@ fn (mut g Gen) cc_type(typ ast.Type, is_prefix_struct bool) string {
if sym.kind == .struct_ {
info := sym.info as ast.Struct
if !info.is_typedef {
- styp = 'struct $styp'
+ styp = 'struct ${styp}'
}
}
}
@@ -1333,7 +1333,7 @@ pub fn (mut g Gen) write_typedef_types() {
info := sym.info as ast.Array
elem_sym := g.table.sym(info.elem_type)
if elem_sym.kind != .placeholder && !info.elem_type.has_flag(.generic) {
- g.type_definitions.writeln('typedef array $sym.cname;')
+ g.type_definitions.writeln('typedef array ${sym.cname};')
}
}
.array_fixed {
@@ -1351,11 +1351,11 @@ pub fn (mut g Gen) write_typedef_types() {
g.write_fn_ptr_decl(&elem_sym.info, '')
fixed = g.out.cut_to(pos)
// g.out_parallel[g.out_idx].cut_to(pos2)
- mut def_str := 'typedef $fixed;'
- def_str = def_str.replace_once('(*)', '(*$styp[$len])')
+ mut def_str := 'typedef ${fixed};'
+ def_str = def_str.replace_once('(*)', '(*${styp}[${len}])')
g.type_definitions.writeln(def_str)
} else {
- g.type_definitions.writeln('typedef $fixed $styp [$len];')
+ g.type_definitions.writeln('typedef ${fixed} ${styp} [${len}];')
base := g.typ(info.elem_type.clear_flag(.optional).clear_flag(.result))
if info.elem_type.has_flag(.optional) && base !in g.options_forward {
g.options_forward << base
@@ -1367,26 +1367,26 @@ pub fn (mut g Gen) write_typedef_types() {
}
.chan {
if sym.name != 'chan' {
- g.type_definitions.writeln('typedef chan $sym.cname;')
+ g.type_definitions.writeln('typedef chan ${sym.cname};')
chan_inf := sym.chan_info()
chan_elem_type := chan_inf.elem_type
if !chan_elem_type.has_flag(.generic) {
el_stype := g.typ(chan_elem_type)
g.channel_definitions.writeln('
-static inline $el_stype __${sym.cname}_popval($sym.cname ch) {
- $el_stype val;
+static inline ${el_stype} __${sym.cname}_popval(${sym.cname} ch) {
+ ${el_stype} val;
sync__Channel_try_pop_priv(ch, &val, false);
return val;
}')
g.channel_definitions.writeln('
-static inline void __${sym.cname}_pushval($sym.cname ch, $el_stype val) {
+static inline void __${sym.cname}_pushval(${sym.cname} ch, ${el_stype} val) {
sync__Channel_try_push_priv(ch, &val, false);
}')
}
}
}
.map {
- g.type_definitions.writeln('typedef map $sym.cname;')
+ g.type_definitions.writeln('typedef map ${sym.cname};')
}
else {
continue
@@ -1449,15 +1449,15 @@ pub fn (mut g Gen) write_alias_typesymbol_declaration(sym ast.TypeSymbol) {
return
}
if is_fixed_array_of_non_builtin {
- g.alias_definitions.writeln('typedef $parent_styp $sym.cname;')
+ g.alias_definitions.writeln('typedef ${parent_styp} ${sym.cname};')
} else {
- g.type_definitions.writeln('typedef $parent_styp $sym.cname;')
+ g.type_definitions.writeln('typedef ${parent_styp} ${sym.cname};')
}
}
pub fn (mut g Gen) write_interface_typedef(sym ast.TypeSymbol) {
struct_name := c_name(sym.cname)
- g.typedefs.writeln('typedef struct $struct_name $struct_name;')
+ g.typedefs.writeln('typedef struct ${struct_name} ${struct_name};')
}
pub fn (mut g Gen) write_interface_typesymbol_declaration(sym ast.TypeSymbol) {
@@ -1469,7 +1469,7 @@ pub fn (mut g Gen) write_interface_typesymbol_declaration(sym ast.TypeSymbol) {
return
}
struct_name := c_name(sym.cname)
- g.type_definitions.writeln('struct $struct_name {')
+ g.type_definitions.writeln('struct ${struct_name} {')
g.type_definitions.writeln('\tunion {')
g.type_definitions.writeln('\t\tvoid* _object;')
for variant in info.types {
@@ -1478,14 +1478,14 @@ pub fn (mut g Gen) write_interface_typesymbol_declaration(sym ast.TypeSymbol) {
continue
}
vcname := g.table.sym(mk_typ).cname
- g.type_definitions.writeln('\t\t$vcname* _$vcname;')
+ g.type_definitions.writeln('\t\t${vcname}* _${vcname};')
}
g.type_definitions.writeln('\t};')
g.type_definitions.writeln('\tint _typ;')
for field in info.fields {
styp := g.typ(field.typ)
cname := c_name(field.name)
- g.type_definitions.writeln('\t$styp* $cname;')
+ g.type_definitions.writeln('\t${styp}* ${cname};')
}
g.type_definitions.writeln('};')
}
@@ -1512,28 +1512,28 @@ pub fn (mut g Gen) write_fn_typesymbol_declaration(sym ast.TypeSymbol) {
match attr.name {
'callconv' {
if g.is_cc_msvc {
- msvc_call_conv = '__$attr.arg '
+ msvc_call_conv = '__${attr.arg} '
} else {
- call_conv = '$attr.arg'
+ call_conv = '${attr.arg}'
}
}
else {}
}
}
call_conv_attribute_suffix := if call_conv.len != 0 {
- '__attribute__(($call_conv))'
+ '__attribute__((${call_conv}))'
} else {
''
}
- g.type_definitions.write_string('typedef ${g.typ(func.return_type)} ($msvc_call_conv*$fn_name)(')
+ g.type_definitions.write_string('typedef ${g.typ(func.return_type)} (${msvc_call_conv}*${fn_name})(')
for i, param in func.params {
g.type_definitions.write_string(g.typ(param.typ))
if i < func.params.len - 1 {
g.type_definitions.write_string(',')
}
}
- g.type_definitions.writeln(')$call_conv_attribute_suffix;')
+ g.type_definitions.writeln(')${call_conv_attribute_suffix};')
}
}
@@ -1552,8 +1552,8 @@ pub fn (mut g Gen) write_multi_return_types() {
if info.types.filter(it.has_flag(.generic)).len > 0 {
continue
}
- g.typedefs.writeln('typedef struct $sym.cname $sym.cname;')
- g.type_definitions.writeln('struct $sym.cname {')
+ g.typedefs.writeln('typedef struct ${sym.cname} ${sym.cname};')
+ g.type_definitions.writeln('struct ${sym.cname} {')
for i, mr_typ in info.types {
type_name := g.typ(mr_typ)
if mr_typ.has_flag(.optional) {
@@ -1566,13 +1566,13 @@ pub fn (mut g Gen) write_multi_return_types() {
g.done_optionals << base
last_text := g.type_definitions.after(start_pos).clone()
g.type_definitions.go_back_to(start_pos)
- g.typedefs.writeln('typedef struct $styp $styp;')
+ g.typedefs.writeln('typedef struct ${styp} ${styp};')
g.type_definitions.writeln('${g.optional_type_text(styp, base)};')
g.type_definitions.write_string(last_text)
}
}
}
- g.type_definitions.writeln('\t$type_name arg$i;')
+ g.type_definitions.writeln('\t${type_name} arg${i};')
}
g.type_definitions.writeln('};\n')
}
@@ -1582,21 +1582,21 @@ pub fn (mut g Gen) write_multi_return_types() {
pub fn (mut g Gen) new_tmp_var() string {
g.tmp_count++
- return '_t$g.tmp_count'
+ return '_t${g.tmp_count}'
}
pub fn (mut g Gen) new_global_tmp_var() string {
g.global_tmp_count++
- return '_t$g.global_tmp_count'
+ return '_t${g.global_tmp_count}'
}
pub fn (mut g Gen) new_tmp_declaration_name() string {
g.tmp_count_declarations++
- return '_d$g.tmp_count_declarations'
+ return '_d${g.tmp_count_declarations}'
}
pub fn (mut g Gen) current_tmp_var() string {
- return '_t$g.tmp_count'
+ return '_t${g.tmp_count}'
}
/*
@@ -1665,15 +1665,15 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) bool {
}
if stmt.typ.has_flag(.optional) {
g.writeln('')
- g.write('$tmp_var = ')
+ g.write('${tmp_var} = ')
g.expr(stmt.expr)
g.writeln(';')
} else {
ret_typ := g.fn_decl.return_type.clear_flag(.optional)
styp = g.base_type(ret_typ)
- g.write('_option_ok(&($styp[]) { ')
+ g.write('_option_ok(&(${styp}[]) { ')
g.expr_with_cast(stmt.expr, stmt.typ, ret_typ)
- g.writeln(' }, ($c.option_name*)(&$tmp_var), sizeof($styp));')
+ g.writeln(' }, (${c.option_name}*)(&${tmp_var}), sizeof(${styp}));')
}
}
}
@@ -1697,15 +1697,15 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) bool {
}
if stmt.typ.has_flag(.result) {
g.writeln('')
- g.write('$tmp_var = ')
+ g.write('${tmp_var} = ')
g.expr(stmt.expr)
g.writeln(';')
} else {
ret_typ := g.fn_decl.return_type.clear_flag(.result)
styp = g.base_type(ret_typ)
- g.write('_result_ok(&($styp[]) { ')
+ g.write('_result_ok(&(${styp}[]) { ')
g.expr_with_cast(stmt.expr, stmt.typ, ret_typ)
- g.writeln(' }, ($c.result_name*)(&$tmp_var), sizeof($styp));')
+ g.writeln(' }, (${c.result_name}*)(&${tmp_var}), sizeof(${styp}));')
}
}
}
@@ -1717,7 +1717,7 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) bool {
is_noreturn = is_noreturn_callexpr(stmt.expr)
}
if !is_noreturn {
- g.write('$tmp_var = ')
+ g.write('${tmp_var} = ')
}
g.stmt(stmt)
if !g.out.last_n(2).contains(';') {
@@ -1765,7 +1765,7 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) bool {
}
if stmt_pos.pos == 0 {
$if trace_autofree ? {
- println('autofree: first stmt pos = 0. $stmt.type_name()')
+ println('autofree: first stmt pos = 0. ${stmt.type_name()}')
}
return last_stmt_was_return
}
@@ -1780,7 +1780,7 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) bool {
fn (mut g Gen) write_v_source_line_info(pos token.Pos) {
if g.inside_ternary == 0 && g.pref.is_vlines && g.is_vlines_enabled {
nline := pos.line_nr + 1
- lineinfo := '\n#line $nline "$g.vlines_path"'
+ lineinfo := '\n#line ${nline} "${g.vlines_path}"'
$if trace_gen_source_line_info ? {
eprintln('> lineinfo: ${lineinfo.replace('\n', '')}')
}
@@ -1821,7 +1821,7 @@ fn (mut g Gen) stmt(node ast.Stmt) {
if node.label != '' {
x := g.labeled_loops[node.label] or {
- panic('$node.label doesn\'t exist $g.file.path, $node.pos')
+ panic('${node.label} doesn\'t exist ${g.file.path}, ${node.pos}')
}
match x {
ast.ForCStmt {
@@ -1874,7 +1874,7 @@ fn (mut g Gen) stmt(node ast.Stmt) {
g.autofree_scope_vars_stop(node.pos.pos - 1, node.pos.line_nr, true,
g.branch_parent_pos)
}
- g.writeln('$node.kind;')
+ g.writeln('${node.kind};')
}
}
ast.ConstDecl {
@@ -1996,7 +1996,7 @@ fn (mut g Gen) stmt(node ast.Stmt) {
}
// #include etc
if node.kind == 'include' {
- mut missing_message := 'Header file $node.main, needed for module `$node.mod` was not found.'
+ mut missing_message := 'Header file ${node.main}, needed for module `${node.mod}` was not found.'
if node.msg != '' {
missing_message += ' ${node.msg}.'
} else {
@@ -2005,35 +2005,35 @@ fn (mut g Gen) stmt(node ast.Stmt) {
mut guarded_include := get_guarded_include_text(node.main, missing_message)
if node.main == '' {
// fails with musl-gcc and msvc; but an unguarded include works:
- guarded_include = '#include $node.main'
+ guarded_include = '#include ${node.main}'
}
if node.main.contains('.m') {
g.definitions.writeln('\n')
if ct_condition.len > 0 {
- g.definitions.writeln('#if $ct_condition')
+ g.definitions.writeln('#if ${ct_condition}')
}
// Objective C code import, include it after V types, so that e.g. `string` is
// available there
- g.definitions.writeln('// added by module `$node.mod`, file: ${os.file_name(node.source_file)}:$line_nr:')
+ g.definitions.writeln('// added by module `${node.mod}`, file: ${os.file_name(node.source_file)}:${line_nr}:')
g.definitions.writeln(guarded_include)
if ct_condition.len > 0 {
- g.definitions.writeln('#endif // \$if $ct_condition')
+ g.definitions.writeln('#endif // \$if ${ct_condition}')
}
g.definitions.writeln('\n')
} else {
g.includes.writeln('\n')
if ct_condition.len > 0 {
- g.includes.writeln('#if $ct_condition')
+ g.includes.writeln('#if ${ct_condition}')
}
- g.includes.writeln('// added by module `$node.mod`, file: ${os.file_name(node.source_file)}:$line_nr:')
+ g.includes.writeln('// added by module `${node.mod}`, file: ${os.file_name(node.source_file)}:${line_nr}:')
g.includes.writeln(guarded_include)
if ct_condition.len > 0 {
- g.includes.writeln('#endif // \$if $ct_condition')
+ g.includes.writeln('#endif // \$if ${ct_condition}')
}
g.includes.writeln('\n')
}
} else if node.kind == 'preinclude' {
- mut missing_message := 'Header file $node.main, needed for module `$node.mod` was not found.'
+ mut missing_message := 'Header file ${node.main}, needed for module `${node.mod}` was not found.'
if node.msg != '' {
missing_message += ' ${node.msg}.'
} else {
@@ -2042,52 +2042,52 @@ fn (mut g Gen) stmt(node ast.Stmt) {
mut guarded_include := get_guarded_include_text(node.main, missing_message)
if node.main == '' {
// fails with musl-gcc and msvc; but an unguarded include works:
- guarded_include = '#include $node.main'
+ guarded_include = '#include ${node.main}'
}
if node.main.contains('.m') {
// Might need to support '#preinclude' for .m files as well but for the moment
// this does the same as '#include' for them
g.definitions.writeln('\n')
if ct_condition.len > 0 {
- g.definitions.writeln('#if $ct_condition')
+ g.definitions.writeln('#if ${ct_condition}')
}
// Objective C code import, include it after V types, so that e.g. `string` is
// available there
- g.definitions.writeln('// added by module `$node.mod`, file: ${os.file_name(node.source_file)}:$line_nr:')
+ g.definitions.writeln('// added by module `${node.mod}`, file: ${os.file_name(node.source_file)}:${line_nr}:')
g.definitions.writeln(guarded_include)
if ct_condition.len > 0 {
- g.definitions.writeln('#endif // \$if $ct_condition')
+ g.definitions.writeln('#endif // \$if ${ct_condition}')
}
g.definitions.writeln('\n')
} else {
g.preincludes.writeln('\n')
if ct_condition.len > 0 {
- g.preincludes.writeln('#if $ct_condition')
+ g.preincludes.writeln('#if ${ct_condition}')
}
- g.preincludes.writeln('// added by module `$node.mod`, file: ${os.file_name(node.source_file)}:$line_nr:')
+ g.preincludes.writeln('// added by module `${node.mod}`, file: ${os.file_name(node.source_file)}:${line_nr}:')
g.preincludes.writeln(guarded_include)
if ct_condition.len > 0 {
- g.preincludes.writeln('#endif // \$if $ct_condition')
+ g.preincludes.writeln('#endif // \$if ${ct_condition}')
}
g.preincludes.writeln('\n')
}
} else if node.kind == 'insert' {
if ct_condition.len > 0 {
- g.includes.writeln('#if $ct_condition')
+ g.includes.writeln('#if ${ct_condition}')
}
- g.includes.writeln('// inserted by module `$node.mod`, file: ${os.file_name(node.source_file)}:$line_nr:')
+ g.includes.writeln('// inserted by module `${node.mod}`, file: ${os.file_name(node.source_file)}:${line_nr}:')
g.includes.writeln(node.val)
if ct_condition.len > 0 {
- g.includes.writeln('#endif // \$if $ct_condition')
+ g.includes.writeln('#endif // \$if ${ct_condition}')
}
} else if node.kind == 'define' {
if ct_condition.len > 0 {
- g.includes.writeln('#if $ct_condition')
+ g.includes.writeln('#if ${ct_condition}')
}
- g.includes.writeln('// defined by module `$node.mod`')
- g.includes.writeln('#define $node.main')
+ g.includes.writeln('// defined by module `${node.mod}`')
+ g.includes.writeln('#define ${node.main}')
if ct_condition.len > 0 {
- g.includes.writeln('#endif // \$if $ct_condition')
+ g.includes.writeln('#endif // \$if ${ct_condition}')
}
}
}
@@ -2143,9 +2143,9 @@ fn (mut g Gen) stmt(node ast.Stmt) {
return
}
if node.is_union {
- g.typedefs.writeln('typedef union $name $name;')
+ g.typedefs.writeln('typedef union ${name} ${name};')
} else {
- g.typedefs.writeln('typedef struct $name $name;')
+ g.typedefs.writeln('typedef struct ${name} ${name};')
}
}
ast.TypeDecl {
@@ -2202,7 +2202,7 @@ fn (mut g Gen) get_sumtype_casting_fn(got_ ast.Type, exp_ ast.Type) string {
got, exp := got_.idx(), exp_.idx()
i := got | int(u32(exp) << 16)
got_cname, exp_cname := g.table.sym(got).cname, g.table.sym(exp).cname
- fn_name := '${got_cname}_to_sumtype_$exp_cname'
+ fn_name := '${got_cname}_to_sumtype_${exp_cname}'
if got == exp || g.sumtype_definitions[i] {
return fn_name
}
@@ -2227,14 +2227,14 @@ fn (mut g Gen) write_sumtype_casting_fn(fun SumtypeCastingFn) {
got_name := 'fn ${g.table.fn_type_source_signature(got_sym.info.func)}'
got_cname = 'anon_fn_${g.table.fn_type_signature(got_sym.info.func)}'
type_idx = g.table.type_idxs[got_name].str()
- sb.writeln('static inline $exp_cname ${fun.fn_name}($got_cname x) {')
- sb.writeln('\t$got_cname ptr = x;')
+ sb.writeln('static inline ${exp_cname} ${fun.fn_name}(${got_cname} x) {')
+ sb.writeln('\t${got_cname} ptr = x;')
is_anon_fn = true
}
}
if !is_anon_fn {
- sb.writeln('static inline $exp_cname ${fun.fn_name}($got_cname* x) {')
- sb.writeln('\t$got_cname* ptr = memdup(x, sizeof($got_cname));')
+ sb.writeln('static inline ${exp_cname} ${fun.fn_name}(${got_cname}* x) {')
+ sb.writeln('\t${got_cname}* ptr = memdup(x, sizeof(${got_cname}));')
}
for embed_hierarchy in g.table.get_embeds(got_sym) {
// last embed in the hierarchy
@@ -2251,9 +2251,9 @@ fn (mut g Gen) write_sumtype_casting_fn(fun SumtypeCastingFn) {
accessor += embed_name
}
// if the variable is not used, the C compiler will optimize it away
- sb.writeln('\t$embed_cname* ${embed_name}_ptr = memdup($accessor, sizeof($embed_cname));')
+ sb.writeln('\t${embed_cname}* ${embed_name}_ptr = memdup(${accessor}, sizeof(${embed_cname}));')
}
- sb.write_string('\treturn ($exp_cname){ ._$got_cname = ptr, ._typ = $type_idx')
+ sb.write_string('\treturn (${exp_cname}){ ._${got_cname} = ptr, ._typ = ${type_idx}')
for field in (exp_sym.info as ast.SumType).fields {
mut ptr := 'ptr'
mut type_cname := got_cname
@@ -2268,9 +2268,9 @@ fn (mut g Gen) write_sumtype_casting_fn(fun SumtypeCastingFn) {
field_styp := g.typ(field.typ)
if got_sym.kind in [.sum_type, .interface_] {
// the field is already a wrapped pointer; we shouldn't wrap it once again
- sb.write_string(', .$field.name = ptr->$field.name')
+ sb.write_string(', .${field.name} = ptr->${field.name}')
} else {
- sb.write_string(', .$field.name = ($field_styp*)((char*)$ptr + __offsetof_ptr($ptr, $type_cname, $field.name))')
+ sb.write_string(', .${field.name} = (${field_styp}*)((char*)${ptr} + __offsetof_ptr(${ptr}, ${type_cname}, ${field.name}))')
}
}
sb.writeln('};\n}')
@@ -2280,7 +2280,7 @@ fn (mut g Gen) write_sumtype_casting_fn(fun SumtypeCastingFn) {
fn (mut g Gen) call_cfn_for_casting_expr(fname string, expr ast.Expr, exp_is_ptr bool, exp_styp string, got_is_ptr bool, got_is_fn bool, got_styp string) {
mut rparen_n := 1
if exp_is_ptr {
- g.write('HEAP($exp_styp, ')
+ g.write('HEAP(${exp_styp}, ')
rparen_n++
}
g.write('${fname}(')
@@ -2290,7 +2290,7 @@ fn (mut g Gen) call_cfn_for_casting_expr(fname string, expr ast.Expr, exp_is_ptr
// Note: the `_to_sumtype_` family of functions do call memdup internally, making
// another duplicate with the HEAP macro is redundant, so use ADDR instead:
promotion_macro_name := if fname.contains('_to_sumtype_') { 'ADDR' } else { 'HEAP' }
- g.write('${promotion_macro_name}($got_styp, (')
+ g.write('${promotion_macro_name}(${got_styp}, (')
rparen_n += 2
} else {
g.write('&')
@@ -2331,7 +2331,7 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
got_styp := g.cc_type(got_type.ref(), true)
// TODO: why does cc_type even add this in the first place?
exp_styp := exp_sym.cname
- mut fname := 'I_${got_styp}_to_Interface_$exp_styp'
+ mut fname := 'I_${got_styp}_to_Interface_${exp_styp}'
if exp_sym.info.is_generic {
fname = g.generic_fn_name(exp_sym.info.concrete_types, fname)
}
@@ -2341,17 +2341,17 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
} else {
got_styp := g.cc_type(got_type, true)
got_is_shared := got_type.has_flag(.shared_f)
- exp_styp := if got_is_shared { '__shared__$exp_sym.cname' } else { exp_sym.cname }
+ exp_styp := if got_is_shared { '__shared__${exp_sym.cname}' } else { exp_sym.cname }
// If it's shared, we need to use the other caster:
mut fname := if got_is_shared {
- 'I___shared__${got_styp}_to_shared_Interface_$exp_styp'
+ 'I___shared__${got_styp}_to_shared_Interface_${exp_styp}'
} else {
- 'I_${got_styp}_to_Interface_$exp_styp'
+ 'I_${got_styp}_to_Interface_${exp_styp}'
}
lock g.referenced_fns {
g.referenced_fns[fname] = true
}
- fname = '/*$exp_sym*/$fname'
+ fname = '/*${exp_sym}*/${fname}'
if exp_sym.info.is_generic {
fname = g.generic_fn_name(exp_sym.info.concrete_types, fname)
}
@@ -2422,17 +2422,17 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
g.error('cannot convert reference to `shared`', expr.pos())
}
if exp_sym.kind == .array {
- g.writeln('($shared_styp*)__dup_shared_array(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup_shared_array(&(${shared_styp}){.mtx = {0}, .val =')
} else if exp_sym.kind == .map {
- g.writeln('($shared_styp*)__dup_shared_map(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup_shared_map(&(${shared_styp}){.mtx = {0}, .val =')
} else {
- g.writeln('($shared_styp*)__dup${shared_styp}(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup${shared_styp}(&(${shared_styp}){.mtx = {0}, .val =')
}
old_is_shared := g.is_shared
g.is_shared = false
g.expr(expr)
g.is_shared = old_is_shared
- g.writeln('}, sizeof($shared_styp))')
+ g.writeln('}, sizeof(${shared_styp}))')
return
} else if got_type_raw.has_flag(.shared_f) && !expected_type.has_flag(.shared_f) {
if expected_type.is_ptr() {
@@ -2504,7 +2504,7 @@ fn (mut g Gen) gen_attrs(attrs []ast.Attr) {
return
}
for attr in attrs {
- g.writeln('// Attr: [$attr.name]')
+ g.writeln('// Attr: [${attr.name}]')
}
}
@@ -2597,37 +2597,37 @@ fn (mut g Gen) asm_arg(arg ast.AsmArg, stmt ast.AsmStmt) {
if name in stmt.local_labels || name in stmt.global_labels
|| name in g.file.global_labels || stmt.is_basic
|| (name !in stmt.input.map(it.alias) && name !in stmt.output.map(it.alias)) {
- asm_formatted_name := if name in stmt.global_labels { '%l[$name]' } else { name }
+ asm_formatted_name := if name in stmt.global_labels { '%l[${name}]' } else { name }
g.write(asm_formatted_name)
} else {
- g.write('%[$name]')
+ g.write('%[${name}]')
}
}
ast.CharLiteral {
- g.write("'$arg.val'")
+ g.write("'${arg.val}'")
}
ast.IntegerLiteral {
- g.write('\$$arg.val')
+ g.write('\$${arg.val}')
}
ast.FloatLiteral {
if g.pref.nofloat {
- g.write('\$$arg.val.int()')
+ g.write('\$${arg.val.int()}')
} else {
- g.write('\$$arg.val')
+ g.write('\$${arg.val}')
}
}
ast.BoolLiteral {
- g.write('\$$arg.val.str()')
+ g.write('\$${arg.val.str()}')
}
ast.AsmRegister {
if !stmt.is_basic {
g.write('%') // escape percent with percent in extended assembly
}
- g.write('%$arg.name')
+ g.write('%${arg.name}')
}
ast.AsmAddressing {
if arg.segment != '' {
- g.write('%%$arg.segment:')
+ g.write('%%${arg.segment}:')
}
base := arg.base
index := arg.index
@@ -2657,10 +2657,10 @@ fn (mut g Gen) asm_arg(arg ast.AsmArg, stmt ast.AsmStmt) {
g.asm_arg(displacement, stmt)
g.write(',')
} else {
- panic('unexpected $displacement.type_name()')
+ panic('unexpected ${displacement.type_name()}')
}
g.asm_arg(index, stmt)
- g.write(',$scale)')
+ g.write(',${scale})')
}
.base_plus_index_plus_displacement {
g.asm_arg(displacement, stmt)
@@ -2676,7 +2676,7 @@ fn (mut g Gen) asm_arg(arg ast.AsmArg, stmt ast.AsmStmt) {
g.asm_arg(base, stmt)
g.write(',')
g.asm_arg(index, stmt)
- g.write(',$scale)')
+ g.write(',${scale})')
}
.rip_plus_displacement {
g.asm_arg(displacement, stmt)
@@ -2701,9 +2701,9 @@ fn (mut g Gen) asm_arg(arg ast.AsmArg, stmt ast.AsmStmt) {
fn (mut g Gen) gen_asm_ios(ios []ast.AsmIO) {
for i, io in ios {
if io.alias != '' {
- g.write('[$io.alias] ')
+ g.write('[${io.alias}] ')
}
- g.write('"$io.constraint" (')
+ g.write('"${io.constraint}" (')
g.expr(io.expr)
g.write(')')
if i + 1 < ios.len {
@@ -2720,11 +2720,11 @@ fn cnewlines(s string) string {
fn (mut g Gen) write_fn_ptr_decl(func &ast.FnType, ptr_name string) {
ret_styp := g.typ(func.func.return_type)
- g.write('$ret_styp (*$ptr_name) (')
+ g.write('${ret_styp} (*${ptr_name}) (')
arg_len := func.func.params.len
for i, arg in func.func.params {
arg_styp := g.typ(arg.typ)
- g.write('$arg_styp $arg.name')
+ g.write('${arg_styp} ${arg.name}')
if i < arg_len - 1 {
g.write(', ')
}
@@ -2765,7 +2765,7 @@ fn (mut g Gen) gen_clone_assignment(val ast.Expr, typ ast.Type, add_eq bool) boo
// `arr1 = arr2` => `arr1 = arr2.clone()`
shared_styp := g.typ(typ.set_nr_muls(0))
if typ.share() == .shared_t {
- g.write('($shared_styp*)__dup_shared_array(&($shared_styp){.mtx = {0}, .val =')
+ g.write('(${shared_styp}*)__dup_shared_array(&(${shared_styp}){.mtx = {0}, .val =')
}
g.write(' array_clone_static_to_depth(')
g.expr(val)
@@ -2774,9 +2774,9 @@ fn (mut g Gen) gen_clone_assignment(val ast.Expr, typ ast.Type, add_eq bool) boo
}
elem_type := (right_sym.info as ast.Array).elem_type
array_depth := g.get_array_depth(elem_type)
- g.write(', $array_depth)')
+ g.write(', ${array_depth})')
if typ.share() == .shared_t {
- g.write('}, sizeof($shared_styp))')
+ g.write('}, sizeof(${shared_styp}))')
}
} else if right_sym.kind == .string {
// `str1 = str2` => `str1 = str2.clone()`
@@ -2807,7 +2807,7 @@ fn (mut g Gen) autofree_scope_vars_stop(pos int, line_nr int, free_parent_scopes
// TODO why can scope.pos be 0? (only outside fns?)
return
}
- g.trace_autofree('// autofree_scope_vars(pos=$pos line_nr=$line_nr scope.pos=$scope.start_pos scope.end_pos=$scope.end_pos)')
+ g.trace_autofree('// autofree_scope_vars(pos=${pos} line_nr=${line_nr} scope.pos=${scope.start_pos} scope.end_pos=${scope.end_pos})')
g.autofree_scope_vars2(scope, scope.start_pos, scope.end_pos, line_nr, free_parent_scopes,
stop_pos)
}
@@ -2825,7 +2825,7 @@ fn (mut g Gen) autofree_scope_vars2(scope &ast.Scope, start_pos int, end_pos int
for _, obj in scope.objects {
match obj {
ast.Var {
- g.trace_autofree('// var "$obj.name" var.pos=$obj.pos.pos var.line_nr=$obj.pos.line_nr')
+ g.trace_autofree('// var "${obj.name}" var.pos=${obj.pos.pos} var.line_nr=${obj.pos.line_nr}')
if obj.name == g.returned_var_name {
g.trace_autofree('// skipping returned var')
continue
@@ -2833,16 +2833,16 @@ fn (mut g Gen) autofree_scope_vars2(scope &ast.Scope, start_pos int, end_pos int
if obj.is_or {
// Skip vars inited with the `or {}`, since they are generated
// after the or block in C.
- g.trace_autofree('// skipping `or{}` var "$obj.name"')
+ g.trace_autofree('// skipping `or{}` var "${obj.name}"')
continue
}
if obj.is_tmp {
// Skip for loop vars
- g.trace_autofree('// skipping tmp var "$obj.name"')
+ g.trace_autofree('// skipping tmp var "${obj.name}"')
continue
}
if obj.is_inherited {
- g.trace_autofree('// skipping inherited var "$obj.name"')
+ g.trace_autofree('// skipping inherited var "${obj.name}"')
continue
}
// if var.typ == 0 {
@@ -2982,9 +2982,9 @@ fn (mut g Gen) autofree_var_call(free_fn_name string, v ast.Var) {
return
}
if v.is_auto_heap {
- af.writeln('\t${free_fn_name}(${c_name(v.name)}); // autofreed heap var $g.cur_mod.name $g.is_builtin_mod')
+ af.writeln('\t${free_fn_name}(${c_name(v.name)}); // autofreed heap var ${g.cur_mod.name} ${g.is_builtin_mod}')
} else {
- af.writeln('\t${free_fn_name}(&${c_name(v.name)}); // autofreed var $g.cur_mod.name $g.is_builtin_mod')
+ af.writeln('\t${free_fn_name}(&${c_name(v.name)}); // autofreed var ${g.cur_mod.name} ${g.is_builtin_mod}')
}
}
g.autofree_scope_stmts << af.str()
@@ -3092,11 +3092,11 @@ fn (mut g Gen) expr(node_ ast.Expr) {
shared_typ := ret_type.set_flag(.shared_f)
shared_styp = g.typ(shared_typ)
if ret_sym.kind == .array {
- g.writeln('($shared_styp*)__dup_shared_array(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup_shared_array(&(${shared_styp}){.mtx = {0}, .val =')
} else if ret_sym.kind == .map {
- g.writeln('($shared_styp*)__dup_shared_map(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup_shared_map(&(${shared_styp}){.mtx = {0}, .val =')
} else {
- g.writeln('($shared_styp*)__dup${shared_styp}(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup${shared_styp}(&(${shared_styp}){.mtx = {0}, .val =')
}
}
last_stmt_pos := if g.stmt_path_pos.len > 0 { g.stmt_path_pos.last() } else { 0 }
@@ -3117,7 +3117,7 @@ fn (mut g Gen) expr(node_ ast.Expr) {
// println('pos=$node.pos.pos')
}
if g.is_shared && !ret_type.has_flag(.shared_f) && !g.inside_or_block {
- g.writeln('}, sizeof($shared_styp))')
+ g.writeln('}, sizeof(${shared_styp}))')
}
// if g.autofree && node.autofree_pregen != '' { // g.strs_to_free0.len != 0 {
/*
@@ -3225,10 +3225,10 @@ fn (mut g Gen) expr(node_ ast.Expr) {
node_typ := g.unwrap_generic(typ)
sym := g.table.sym(node_typ)
if sym.language == .v && sym.kind in [.placeholder, .any] {
- g.error('unknown type `$sym.name`', node.pos)
+ g.error('unknown type `${sym.name}`', node.pos)
}
is_ref_type := g.contains_ptr(node_typ)
- g.write('/*IsRefType*/ $is_ref_type')
+ g.write('/*IsRefType*/ ${is_ref_type}')
}
ast.Likely {
if node.is_likely {
@@ -3258,7 +3258,7 @@ fn (mut g Gen) expr(node_ ast.Expr) {
}
ast.OffsetOf {
styp := g.typ(node.struct_type)
- g.write('/*OffsetOf*/ (u32)(__offsetof(${util.no_dots(styp)}, $node.field))')
+ g.write('/*OffsetOf*/ (u32)(__offsetof(${util.no_dots(styp)}, ${node.field}))')
}
ast.OrExpr {
// this should never appear here
@@ -3270,7 +3270,7 @@ fn (mut g Gen) expr(node_ ast.Expr) {
}
ast.PostfixExpr {
if node.auto_locked != '' {
- g.writeln('sync__RwMutex_lock(&$node.auto_locked->mtx);')
+ g.writeln('sync__RwMutex_lock(&${node.auto_locked}->mtx);')
}
g.inside_map_postfix = true
if node.is_c2v_prefix {
@@ -3289,7 +3289,7 @@ fn (mut g Gen) expr(node_ ast.Expr) {
}
if node.auto_locked != '' {
g.writeln(';')
- g.write('sync__RwMutex_unlock(&$node.auto_locked->mtx)')
+ g.write('sync__RwMutex_unlock(&${node.auto_locked}->mtx)')
}
}
ast.PrefixExpr {
@@ -3314,7 +3314,7 @@ fn (mut g Gen) expr(node_ ast.Expr) {
if gen_or {
opt_elem_type := g.typ(elem_type.set_flag(.optional))
g.register_chan_pop_optional_call(opt_elem_type, styp)
- g.write('$opt_elem_type $tmp_opt = __Option_${styp}_popval(')
+ g.write('${opt_elem_type} ${tmp_opt} = __Option_${styp}_popval(')
} else {
g.write('__${styp}_popval(')
}
@@ -3327,7 +3327,7 @@ fn (mut g Gen) expr(node_ ast.Expr) {
}
if is_gen_or_and_assign_rhs {
elem_styp := g.typ(elem_type)
- g.write(';\n$cur_line*($elem_styp*)${tmp_opt}.data')
+ g.write(';\n${cur_line}*(${elem_styp}*)${tmp_opt}.data')
}
}
} else {
@@ -3381,7 +3381,7 @@ fn (mut g Gen) expr(node_ ast.Expr) {
sym := g.table.sym(typ)
sidx := g.type_sidx(typ)
// g.write('$type_idx /* $sym.name */')
- g.write('$sidx /* $sym.name */')
+ g.write('${sidx} /* ${sym.name} */')
}
ast.TypeOf {
g.typeof_expr(node)
@@ -3401,7 +3401,7 @@ fn (mut g Gen) char_literal(node ast.CharLiteral) {
}
// TODO: optimize use L-char instead of u32 when possible
if node.val.len_utf8() < node.val.len {
- g.write('((rune)0x$node.val.utf32_code().hex() /* `$node.val` */)')
+ g.write('((rune)0x${node.val.utf32_code().hex()} /* `${node.val}` */)')
return
}
if node.val.len == 1 {
@@ -3413,7 +3413,7 @@ fn (mut g Gen) char_literal(node ast.CharLiteral) {
return
}
}
- g.write("'$node.val'")
+ g.write("'${node.val}'")
}
// T.name, typeof(expr).name
@@ -3443,13 +3443,13 @@ fn (mut g Gen) typeof_expr(node ast.TypeOf) {
if sym.kind == .sum_type {
// When encountering a .sum_type, typeof() should be done at runtime,
// because the subtype of the expression may change:
- g.write('charptr_vstring_literal( /* $sym.name */ v_typeof_sumtype_${sym.cname}( (')
+ g.write('charptr_vstring_literal( /* ${sym.name} */ v_typeof_sumtype_${sym.cname}( (')
g.expr(node.expr)
g.write(')._typ ))')
} else if sym.kind == .array_fixed {
fixed_info := sym.info as ast.ArrayFixed
typ_name := g.table.get_type_name(fixed_info.elem_type)
- g.write('_SLIT("[$fixed_info.size]${util.strip_main_name(typ_name)}")')
+ g.write('_SLIT("[${fixed_info.size}]${util.strip_main_name(typ_name)}")')
} else if sym.kind == .function {
info := sym.info as ast.FnType
g.write('_SLIT("${g.fn_decl_str(info)}")')
@@ -3459,7 +3459,7 @@ fn (mut g Gen) typeof_expr(node ast.TypeOf) {
} else {
x := g.table.type_to_str(typ)
y := util.strip_main_name(x)
- g.write('_SLIT("$y")')
+ g.write('_SLIT("${y}")')
}
}
@@ -3510,7 +3510,7 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
&& (node.expr_type.has_flag(.optional) || node.expr_type.has_flag(.result))
if is_opt_or_res {
opt_base_typ := g.base_type(node.expr_type)
- g.writeln('(*($opt_base_typ*)')
+ g.writeln('(*(${opt_base_typ}*)')
}
if sym.kind in [.interface_, .sum_type] {
g.write('(*(')
@@ -3520,7 +3520,7 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
g.error('field_name should be `len`', node.pos)
}
info := sym.info as ast.ArrayFixed
- g.write('$info.size')
+ g.write('${info.size}')
return
} else if sym.kind == .chan && (node.field_name == 'len' || node.field_name == 'closed') {
g.write('sync__Channel_${node.field_name}(')
@@ -3549,18 +3549,18 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
if field_sym.kind == .interface_ && cast_sym.kind == .interface_ {
ptr := '*'.repeat(field.typ.nr_muls())
dot := if node.expr_type.is_ptr() { '->' } else { '.' }
- g.write('I_${field_sym.cname}_as_I_${cast_sym.cname}($ptr$node.expr$dot$node.field_name))')
+ g.write('I_${field_sym.cname}_as_I_${cast_sym.cname}(${ptr}${node.expr}${dot}${node.field_name}))')
return
} else {
if i != 0 {
dot := if field.typ.is_ptr() { '->' } else { '.' }
- sum_type_deref_field += ')$dot'
+ sum_type_deref_field += ')${dot}'
}
if cast_sym.info is ast.Aggregate {
agg_sym := g.table.sym(cast_sym.info.types[g.aggregate_type_idx])
- sum_type_deref_field += '_$agg_sym.cname'
+ sum_type_deref_field += '_${agg_sym.cname}'
} else {
- sum_type_deref_field += '_$cast_sym.cname'
+ sum_type_deref_field += '_${cast_sym.cname}'
}
}
}
@@ -3576,24 +3576,24 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
}
if !has_embeds {
if !node.has_hidden_receiver {
- g.write('${g.typ(node.expr_type.idx())}_$m.name')
+ g.write('${g.typ(node.expr_type.idx())}_${m.name}')
return
}
receiver := m.params[0]
expr_styp := g.typ(node.expr_type.idx())
data_styp := g.typ(receiver.typ.idx())
mut sb := strings.new_builder(256)
- name := '_V_closure_${expr_styp}_${m.name}_$node.pos.pos'
+ name := '_V_closure_${expr_styp}_${m.name}_${node.pos.pos}'
sb.write_string('${g.typ(m.return_type)} ${name}(')
for i in 1 .. m.params.len {
param := m.params[i]
if i != 1 {
sb.write_string(', ')
}
- sb.write_string('${g.typ(param.typ)} a$i')
+ sb.write_string('${g.typ(param.typ)} a${i}')
}
sb.writeln(') {')
- sb.writeln('\t$data_styp* a0 = __CLOSURE_GET_DATA();')
+ sb.writeln('\t${data_styp}* a0 = __CLOSURE_GET_DATA();')
if m.return_type != ast.void_type {
sb.write_string('\treturn ')
} else {
@@ -3607,7 +3607,7 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
if i != 0 {
sb.write_string(', ')
}
- sb.write_string('a$i')
+ sb.write_string('a${i}')
}
sb.writeln(');')
sb.writeln('}')
@@ -3615,7 +3615,7 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
g.anon_fn_definitions << sb.str()
g.nr_closures++
- g.write('__closure_create($name, ')
+ g.write('__closure_create(${name}, ')
if !receiver.typ.is_ptr() {
g.write('memdup_uncollectable(')
}
@@ -3624,7 +3624,7 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
}
g.expr(node.expr)
if !receiver.typ.is_ptr() {
- g.write(', sizeof($expr_styp))')
+ g.write(', sizeof(${expr_styp}))')
}
g.write(')')
return
@@ -3670,12 +3670,12 @@ fn (mut g Gen) selector_expr(node ast.SelectorExpr) {
g.write('val.')
}
if node.expr_type == 0 {
- verror('cgen: SelectorExpr | expr_type: 0 | it.expr: `$node.expr` | field: `$node.field_name` | file: $g.file.path | line: $node.pos.line_nr')
+ verror('cgen: SelectorExpr | expr_type: 0 | it.expr: `${node.expr}` | field: `${node.field_name}` | file: ${g.file.path} | line: ${node.pos.line_nr}')
}
field_name := if sym.language == .v { c_name(node.field_name) } else { node.field_name }
g.write(field_name)
if sum_type_deref_field != '' {
- g.write('$sum_type_dot$sum_type_deref_field)')
+ g.write('${sum_type_dot}${sum_type_deref_field})')
}
if sym.kind in [.interface_, .sum_type] {
g.write('))')
@@ -3688,9 +3688,9 @@ fn (mut g Gen) enum_decl(node ast.EnumDecl) {
if g.pref.ccompiler == 'msvc' {
mut last_value := '0'
enum_typ_name := g.table.get_type_name(node.typ)
- g.enum_typedefs.writeln('typedef $enum_typ_name $enum_name;')
+ g.enum_typedefs.writeln('typedef ${enum_typ_name} ${enum_name};')
for i, field in node.fields {
- g.enum_typedefs.write_string('\t#define ${enum_name}__$field.name ')
+ g.enum_typedefs.write_string('\t#define ${enum_name}__${field.name} ')
g.enum_typedefs.write_string('(')
if is_flag {
g.enum_typedefs.write_string((u64(1) << i).str())
@@ -3716,7 +3716,7 @@ fn (mut g Gen) enum_decl(node ast.EnumDecl) {
mut cur_enum_expr := ''
mut cur_enum_offset := 0
for i, field in node.fields {
- g.enum_typedefs.write_string('\t${enum_name}__$field.name')
+ g.enum_typedefs.write_string('\t${enum_name}__${field.name}')
if field.has_expr {
g.enum_typedefs.write_string(' = ')
expr_str := g.expr_string(field.expr)
@@ -3725,21 +3725,21 @@ fn (mut g Gen) enum_decl(node ast.EnumDecl) {
cur_enum_offset = 0
} else if is_flag {
g.enum_typedefs.write_string(' = ')
- cur_enum_expr = 'u64(1) << $i'
+ cur_enum_expr = 'u64(1) << ${i}'
g.enum_typedefs.write_string((u64(1) << i).str())
g.enum_typedefs.write_string('U')
cur_enum_offset = 0
}
cur_value := if cur_enum_offset > 0 {
- '$cur_enum_expr+$cur_enum_offset'
+ '${cur_enum_expr}+${cur_enum_offset}'
} else {
cur_enum_expr
}
- g.enum_typedefs.writeln(', // $cur_value')
+ g.enum_typedefs.writeln(', // ${cur_value}')
cur_enum_offset++
}
packed_attribute := if node.typ != ast.int_type { '__attribute__((packed))' } else { '' }
- g.enum_typedefs.writeln('} $packed_attribute $enum_name;')
+ g.enum_typedefs.writeln('} ${packed_attribute} ${enum_name};')
if node.typ != ast.int_type {
g.enum_typedefs.writeln('#pragma pack(pop)\n')
}
@@ -3768,7 +3768,7 @@ fn (mut g Gen) lock_expr(node ast.LockExpr) {
if node.is_expr {
styp := g.typ(node.typ)
cur_line = g.go_before_stmt(0)
- g.writeln('$styp $tmp_result;')
+ g.writeln('${styp} ${tmp_result};')
}
mut mtxs := ''
if node.lockeds.len == 0 {
@@ -3780,45 +3780,45 @@ fn (mut g Gen) lock_expr(node ast.LockExpr) {
g.writeln('->mtx);')
} else {
mtxs = g.new_tmp_var()
- g.writeln('uintptr_t _arr_$mtxs[$node.lockeds.len];')
- g.writeln('bool _isrlck_$mtxs[$node.lockeds.len];')
+ g.writeln('uintptr_t _arr_${mtxs}[${node.lockeds.len}];')
+ g.writeln('bool _isrlck_${mtxs}[${node.lockeds.len}];')
mut j := 0
for i, is_rlock in node.is_rlock {
if !is_rlock {
- g.write('_arr_$mtxs[$j] = (uintptr_t)&')
+ g.write('_arr_${mtxs}[${j}] = (uintptr_t)&')
g.expr(node.lockeds[i])
g.writeln('->mtx;')
- g.writeln('_isrlck_$mtxs[$j] = false;')
+ g.writeln('_isrlck_${mtxs}[${j}] = false;')
j++
}
}
for i, is_rlock in node.is_rlock {
if is_rlock {
- g.write('_arr_$mtxs[$j] = (uintptr_t)&')
+ g.write('_arr_${mtxs}[${j}] = (uintptr_t)&')
g.expr(node.lockeds[i])
g.writeln('->mtx;')
- g.writeln('_isrlck_$mtxs[$j] = true;')
+ g.writeln('_isrlck_${mtxs}[${j}] = true;')
j++
}
}
if node.lockeds.len == 2 {
- g.writeln('if (_arr_$mtxs[0] > _arr_$mtxs[1]) {')
- g.writeln('\tuintptr_t _ptr_$mtxs = _arr_$mtxs[0];')
- g.writeln('\t_arr_$mtxs[0] = _arr_$mtxs[1];')
- g.writeln('\t_arr_$mtxs[1] = _ptr_$mtxs;')
- g.writeln('\tbool _bool_$mtxs = _isrlck_$mtxs[0];')
- g.writeln('\t_isrlck_$mtxs[0] = _isrlck_$mtxs[1];')
- g.writeln('\t_isrlck_$mtxs[1] = _bool_$mtxs;')
+ g.writeln('if (_arr_${mtxs}[0] > _arr_${mtxs}[1]) {')
+ g.writeln('\tuintptr_t _ptr_${mtxs} = _arr_${mtxs}[0];')
+ g.writeln('\t_arr_${mtxs}[0] = _arr_${mtxs}[1];')
+ g.writeln('\t_arr_${mtxs}[1] = _ptr_${mtxs};')
+ g.writeln('\tbool _bool_${mtxs} = _isrlck_${mtxs}[0];')
+ g.writeln('\t_isrlck_${mtxs}[0] = _isrlck_${mtxs}[1];')
+ g.writeln('\t_isrlck_${mtxs}[1] = _bool_${mtxs};')
g.writeln('}')
} else {
- g.writeln('__sort_ptr(_arr_$mtxs, _isrlck_$mtxs, $node.lockeds.len);')
+ g.writeln('__sort_ptr(_arr_${mtxs}, _isrlck_${mtxs}, ${node.lockeds.len});')
}
- g.writeln('for (int $mtxs=0; $mtxs<$node.lockeds.len; $mtxs++) {')
- g.writeln('\tif ($mtxs && _arr_$mtxs[$mtxs] == _arr_$mtxs[$mtxs-1]) continue;')
- g.writeln('\tif (_isrlck_$mtxs[$mtxs])')
- g.writeln('\t\tsync__RwMutex_rlock((sync__RwMutex*)_arr_$mtxs[$mtxs]);')
+ g.writeln('for (int ${mtxs}=0; ${mtxs}<${node.lockeds.len}; ${mtxs}++) {')
+ g.writeln('\tif (${mtxs} && _arr_${mtxs}[${mtxs}] == _arr_${mtxs}[${mtxs}-1]) continue;')
+ g.writeln('\tif (_isrlck_${mtxs}[${mtxs}])')
+ g.writeln('\t\tsync__RwMutex_rlock((sync__RwMutex*)_arr_${mtxs}[${mtxs}]);')
g.writeln('\telse')
- g.writeln('\t\tsync__RwMutex_lock((sync__RwMutex*)_arr_$mtxs[$mtxs]);')
+ g.writeln('\t\tsync__RwMutex_lock((sync__RwMutex*)_arr_${mtxs}[${mtxs}]);')
g.writeln('}')
}
g.mtxs = mtxs
@@ -3835,7 +3835,7 @@ fn (mut g Gen) lock_expr(node ast.LockExpr) {
if node.is_expr {
g.writeln('')
g.write(cur_line)
- g.write('$tmp_result')
+ g.write('${tmp_result}')
}
}
@@ -3847,12 +3847,12 @@ fn (mut g Gen) unlock_locks() {
g.expr(g.cur_lock.lockeds[0])
g.write('->mtx);')
} else {
- g.writeln('for (int $g.mtxs=${g.cur_lock.lockeds.len - 1}; $g.mtxs>=0; $g.mtxs--) {')
- g.writeln('\tif ($g.mtxs && _arr_$g.mtxs[$g.mtxs] == _arr_$g.mtxs[$g.mtxs-1]) continue;')
- g.writeln('\tif (_isrlck_$g.mtxs[$g.mtxs])')
- g.writeln('\t\tsync__RwMutex_runlock((sync__RwMutex*)_arr_$g.mtxs[$g.mtxs]);')
+ g.writeln('for (int ${g.mtxs}=${g.cur_lock.lockeds.len - 1}; ${g.mtxs}>=0; ${g.mtxs}--) {')
+ g.writeln('\tif (${g.mtxs} && _arr_${g.mtxs}[${g.mtxs}] == _arr_${g.mtxs}[${g.mtxs}-1]) continue;')
+ g.writeln('\tif (_isrlck_${g.mtxs}[${g.mtxs}])')
+ g.writeln('\t\tsync__RwMutex_runlock((sync__RwMutex*)_arr_${g.mtxs}[${g.mtxs}]);')
g.writeln('\telse')
- g.writeln('\t\tsync__RwMutex_unlock((sync__RwMutex*)_arr_$g.mtxs[$g.mtxs]);')
+ g.writeln('\t\tsync__RwMutex_unlock((sync__RwMutex*)_arr_${g.mtxs}[${g.mtxs}]);')
g.write('}')
}
}
@@ -3876,10 +3876,10 @@ fn (mut g Gen) map_init(node ast.MapInit) {
if g.is_shared {
mut shared_typ := node.typ.set_flag(.shared_f)
shared_styp = g.typ(shared_typ)
- g.writeln('($shared_styp*)__dup_shared_map(&($shared_styp){.mtx = {0}, .val =')
+ g.writeln('(${shared_styp}*)__dup_shared_map(&(${shared_styp}){.mtx = {0}, .val =')
} else if is_amp {
styp = g.typ(node.typ)
- g.write('($styp*)memdup(ADDR($styp, ')
+ g.write('(${styp}*)memdup(ADDR(${styp}, ')
}
noscan_key := g.check_noscan(node.key_type)
noscan_value := g.check_noscan(node.value_type)
@@ -3894,11 +3894,11 @@ fn (mut g Gen) map_init(node ast.MapInit) {
}
if size > 0 {
if value_sym.kind == .function {
- g.writeln('new_map_init${noscan}($hash_fn, $key_eq_fn, $clone_fn, $free_fn, $size, sizeof($key_typ_str), sizeof(voidptr),')
+ g.writeln('new_map_init${noscan}(${hash_fn}, ${key_eq_fn}, ${clone_fn}, ${free_fn}, ${size}, sizeof(${key_typ_str}), sizeof(voidptr),')
} else {
- g.writeln('new_map_init${noscan}($hash_fn, $key_eq_fn, $clone_fn, $free_fn, $size, sizeof($key_typ_str), sizeof($value_typ_str),')
+ g.writeln('new_map_init${noscan}(${hash_fn}, ${key_eq_fn}, ${clone_fn}, ${free_fn}, ${size}, sizeof(${key_typ_str}), sizeof(${value_typ_str}),')
}
- g.writeln('\t\t_MOV(($key_typ_str[$size]){')
+ g.writeln('\t\t_MOV((${key_typ_str}[${size}]){')
for expr in node.keys {
g.write('\t\t\t')
g.expr(expr)
@@ -3906,9 +3906,9 @@ fn (mut g Gen) map_init(node ast.MapInit) {
}
g.writeln('\t\t}),')
if value_sym.kind == .function {
- g.writeln('\t\t_MOV((voidptr[$size]){')
+ g.writeln('\t\t_MOV((voidptr[${size}]){')
} else {
- g.writeln('\t\t_MOV(($value_typ_str[$size]){')
+ g.writeln('\t\t_MOV((${value_typ_str}[${size}]){')
}
for i, expr in node.vals {
g.write('\t\t\t')
@@ -3925,13 +3925,13 @@ fn (mut g Gen) map_init(node ast.MapInit) {
g.writeln('\t\t})')
g.writeln('\t)')
} else {
- g.write('new_map${noscan}(sizeof($key_typ_str), sizeof($value_typ_str), $hash_fn, $key_eq_fn, $clone_fn, $free_fn)')
+ g.write('new_map${noscan}(sizeof(${key_typ_str}), sizeof(${value_typ_str}), ${hash_fn}, ${key_eq_fn}, ${clone_fn}, ${free_fn})')
}
g.writeln('')
if g.is_shared {
- g.write('}, sizeof($shared_styp))')
+ g.write('}, sizeof(${shared_styp}))')
} else if is_amp {
- g.write('), sizeof($styp))')
+ g.write('), sizeof(${styp}))')
}
}
@@ -3978,7 +3978,7 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
tmp_obj := g.new_tmp_var()
tmp_objs << tmp_obj
el_stype := g.typ(ast.mktyp(expr.right_type))
- g.writeln('$el_stype $tmp_obj;')
+ g.writeln('${el_stype} ${tmp_obj};')
}
is_push << true
}
@@ -3997,7 +3997,7 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
} else {
''
}
- g.writeln('$el_stype $tmp_obj;')
+ g.writeln('${el_stype} ${tmp_obj};')
} else {
tmp_objs << ''
elem_types << ''
@@ -4009,7 +4009,7 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
}
}
chan_array := g.new_tmp_var()
- g.write('Array_sync__Channel_ptr $chan_array = new_array_from_c_array($n_channels, $n_channels, sizeof(sync__Channel*), _MOV((sync__Channel*[$n_channels]){')
+ g.write('Array_sync__Channel_ptr ${chan_array} = new_array_from_c_array(${n_channels}, ${n_channels}, sizeof(sync__Channel*), _MOV((sync__Channel*[${n_channels}]){')
for i in 0 .. n_channels {
if i > 0 {
g.write(', ')
@@ -4020,7 +4020,7 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
}
g.writeln('}));\n')
directions_array := g.new_tmp_var()
- g.write('Array_sync__Direction $directions_array = new_array_from_c_array($n_channels, $n_channels, sizeof(sync__Direction), _MOV((sync__Direction[$n_channels]){')
+ g.write('Array_sync__Direction ${directions_array} = new_array_from_c_array(${n_channels}, ${n_channels}, sizeof(sync__Direction), _MOV((sync__Direction[${n_channels}]){')
for i in 0 .. n_channels {
if i > 0 {
g.write(', ')
@@ -4033,7 +4033,7 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
}
g.writeln('}));\n')
objs_array := g.new_tmp_var()
- g.write('Array_voidptr $objs_array = new_array_from_c_array($n_channels, $n_channels, sizeof(voidptr), _MOV((voidptr[$n_channels]){')
+ g.write('Array_voidptr ${objs_array} = new_array_from_c_array(${n_channels}, ${n_channels}, sizeof(voidptr), _MOV((voidptr[${n_channels}]){')
for i in 0 .. n_channels {
if i > 0 {
g.write(', &')
@@ -4048,7 +4048,7 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
}
g.writeln('}));\n')
select_result := g.new_tmp_var()
- g.write('int $select_result = sync__channel_select(&/*arr*/$chan_array, $directions_array, &/*arr*/$objs_array, ')
+ g.write('int ${select_result} = sync__channel_select(&/*arr*/${chan_array}, ${directions_array}, &/*arr*/${objs_array}, ')
if has_timeout {
g.expr(timeout_expr)
} else if has_else {
@@ -4058,19 +4058,19 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
}
g.writeln(');')
// free the temps that were created
- g.writeln('array_free(&$objs_array);')
- g.writeln('array_free(&$directions_array);')
- g.writeln('array_free(&$chan_array);')
+ g.writeln('array_free(&${objs_array});')
+ g.writeln('array_free(&${directions_array});')
+ g.writeln('array_free(&${chan_array});')
mut i := 0
for j in 0 .. node.branches.len {
if j > 0 {
g.write('} else ')
}
- g.write('if ($select_result == ')
+ g.write('if (${select_result} == ')
if j == exception_branch {
g.writeln('-1) {')
} else {
- g.writeln('$i) {')
+ g.writeln('${i}) {')
if !is_push[i] && tmp_objs[i] != '' {
g.write('\t${elem_types[i]}')
g.expr(objs[i])
@@ -4084,7 +4084,7 @@ fn (mut g Gen) select_expr(node ast.SelectExpr) {
if is_expr {
g.empty_line = false
g.write(cur_line)
- g.write('($select_result != -2)')
+ g.write('(${select_result} != -2)')
}
}
@@ -4124,7 +4124,7 @@ fn (mut g Gen) ident(node ast.Ident) {
if node.info.is_optional && !(g.is_assign_lhs && g.right_is_opt) {
g.write('/*opt*/')
styp := g.base_type(node.info.typ)
- g.write('(*($styp*)${name}.data)')
+ g.write('(*(${styp}*)${name}.data)')
return
}
if !g.is_assign_lhs && node.info.share == .shared_t {
@@ -4150,7 +4150,7 @@ fn (mut g Gen) ident(node ast.Ident) {
cast_sym := g.table.sym(g.unwrap_generic(typ))
if obj_sym.kind == .interface_ && cast_sym.kind == .interface_ {
ptr := '*'.repeat(node.obj.typ.nr_muls())
- g.write('I_${obj_sym.cname}_as_I_${cast_sym.cname}($ptr$node.name)')
+ g.write('I_${obj_sym.cname}_as_I_${cast_sym.cname}(${ptr}${node.name})')
} else {
mut is_ptr := false
if i == 0 {
@@ -4162,9 +4162,9 @@ fn (mut g Gen) ident(node ast.Ident) {
dot := if is_ptr || is_auto_heap { '->' } else { '.' }
if cast_sym.info is ast.Aggregate {
sym := g.table.sym(cast_sym.info.types[g.aggregate_type_idx])
- g.write('${dot}_$sym.cname')
+ g.write('${dot}_${sym.cname}')
} else {
- g.write('${dot}_$cast_sym.cname')
+ g.write('${dot}_${cast_sym.cname}')
}
}
g.write(')')
@@ -4191,9 +4191,9 @@ fn (mut g Gen) ident(node ast.Ident) {
}
if g.pref.obfuscate && g.cur_mod.name == 'main' && name.starts_with('main__') {
key := node.name
- g.write('/* obf identfn: $key */')
+ g.write('/* obf identfn: ${key} */')
name = g.obf_table[key] or {
- panic('cgen: obf name "$key" not found, this should never happen')
+ panic('cgen: obf name "${key}" not found, this should never happen')
}
}
}
@@ -4211,7 +4211,7 @@ fn (mut g Gen) cast_expr(node ast.CastExpr) {
} else if sym.kind == .struct_ && !node.typ.is_ptr() && !(sym.info as ast.Struct).is_typedef {
// deprecated, replaced by Struct{...exr}
styp := g.typ(node.typ)
- g.write('*(($styp *)(&')
+ g.write('*((${styp} *)(&')
g.expr(node.expr)
g.write('))')
} else if sym.kind == .alias && g.table.final_sym(node.typ).kind == .array_fixed {
@@ -4221,7 +4221,7 @@ fn (mut g Gen) cast_expr(node ast.CastExpr) {
g.expr(node.expr)
} else if node.expr_type == ast.bool_type && node.typ.is_int() {
styp := g.typ(node_typ)
- g.write('($styp[]){(')
+ g.write('(${styp}[]){(')
g.expr(node.expr)
g.write(')?1:0}[0]')
} else {
@@ -4239,7 +4239,7 @@ fn (mut g Gen) cast_expr(node ast.CastExpr) {
// `ast.string_type` is done for MSVC's bug
if sym.kind != .alias
|| (sym.info as ast.Alias).parent_type !in [node.expr_type, ast.string_type] {
- cast_label = '($styp)'
+ cast_label = '(${styp})'
}
if node.typ.has_flag(.optional) && node.expr is ast.None {
g.gen_optional_error(node.typ, node.expr)
@@ -4276,9 +4276,9 @@ fn (mut g Gen) concat_expr(node ast.ConcatExpr) {
if !is_multi {
g.expr(node.vals[0])
} else {
- g.write('($styp){')
+ g.write('(${styp}){')
for i, expr in node.vals {
- g.write('.arg$i=')
+ g.write('.arg${i}=')
g.expr(expr)
if i < node.vals.len - 1 {
g.write(',')
@@ -4298,7 +4298,7 @@ fn (g &Gen) expr_is_multi_return_call(expr ast.Expr) bool {
fn (mut g Gen) gen_result_error(target_type ast.Type, expr ast.Expr) {
styp := g.typ(target_type)
- g.write('($styp){ .is_error=true, .err=')
+ g.write('(${styp}){ .is_error=true, .err=')
g.expr(expr)
g.write(', .data={EMPTY_STRUCT_INITIALIZATION} }')
}
@@ -4306,7 +4306,7 @@ fn (mut g Gen) gen_result_error(target_type ast.Type, expr ast.Expr) {
// NB: remove this when optional has no errors anymore
fn (mut g Gen) gen_optional_error(target_type ast.Type, expr ast.Expr) {
styp := g.typ(target_type)
- g.write('($styp){ .state=2, .err=')
+ g.write('(${styp}){ .state=2, .err=')
g.expr(expr)
g.write(', .data={EMPTY_STRUCT_INITIALIZATION} }')
}
@@ -4340,7 +4340,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.write_defer_stmts_when_needed()
if fn_return_is_optional || fn_return_is_result {
styp := g.typ(g.fn_decl.return_type)
- g.writeln('return ($styp){0};')
+ g.writeln('return (${styp}){0};')
} else {
if g.is_autofree {
g.trace_autofree('// free before return (no values returned)')
@@ -4362,7 +4362,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
if optional_none || is_regular_option || node.types[0] == ast.error_type_idx {
if g.fn_decl != unsafe { nil } && g.fn_decl.is_test {
test_error_var := g.new_tmp_var()
- g.write('$ret_typ $test_error_var = ')
+ g.write('${ret_typ} ${test_error_var} = ')
g.gen_optional_error(g.fn_decl.return_type, node.exprs[0])
g.writeln(';')
g.write_defer_stmts_when_needed()
@@ -4370,7 +4370,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
return
}
if use_tmp_var {
- g.write('$ret_typ $tmpvar = ')
+ g.write('${ret_typ} ${tmpvar} = ')
} else {
g.write('return ')
}
@@ -4378,7 +4378,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.writeln(';')
if use_tmp_var {
g.write_defer_stmts_when_needed()
- g.writeln('return $tmpvar;')
+ g.writeln('return ${tmpvar};')
}
return
}
@@ -4390,7 +4390,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
if is_regular_result || node.types[0] == ast.error_type_idx {
if g.fn_decl != unsafe { nil } && g.fn_decl.is_test {
test_error_var := g.new_tmp_var()
- g.write('$ret_typ $test_error_var = ')
+ g.write('${ret_typ} ${test_error_var} = ')
g.gen_result_error(g.fn_decl.return_type, node.exprs[0])
g.writeln(';')
g.write_defer_stmts_when_needed()
@@ -4398,7 +4398,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
return
}
if use_tmp_var {
- g.write('$ret_typ $tmpvar = ')
+ g.write('${ret_typ} ${tmpvar} = ')
} else {
g.write('return ')
}
@@ -4406,7 +4406,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.writeln(';')
if use_tmp_var {
g.write_defer_stmts_when_needed()
- g.writeln('return $tmpvar;')
+ g.writeln('return ${tmpvar};')
}
return
}
@@ -4415,27 +4415,27 @@ fn (mut g Gen) return_stmt(node ast.Return) {
if fn_return_is_multi && node.exprs.len > 0 && !g.expr_is_multi_return_call(node.exprs[0]) {
if node.exprs.len == 1 && (node.exprs[0] is ast.IfExpr || node.exprs[0] is ast.MatchExpr) {
// use a temporary for `return if cond { x,y } else { a,b }` or `return match expr { abc { x, y } else { z, w } }`
- g.write('$ret_typ $tmpvar = ')
+ g.write('${ret_typ} ${tmpvar} = ')
g.expr(node.exprs[0])
g.writeln(';')
g.write_defer_stmts_when_needed()
- g.writeln('return $tmpvar;')
+ g.writeln('return ${tmpvar};')
return
}
typ_sym := g.table.sym(g.fn_decl.return_type)
mr_info := typ_sym.info as ast.MultiReturn
mut styp := ''
if fn_return_is_optional {
- g.writeln('$ret_typ $tmpvar;')
+ g.writeln('${ret_typ} ${tmpvar};')
styp = g.base_type(g.fn_decl.return_type)
- g.write('_option_ok(&($styp[]) { ')
+ g.write('_option_ok(&(${styp}[]) { ')
} else if fn_return_is_result {
- g.writeln('$ret_typ $tmpvar;')
+ g.writeln('${ret_typ} ${tmpvar};')
styp = g.base_type(g.fn_decl.return_type)
- g.write('_result_ok(&($styp[]) { ')
+ g.write('_result_ok(&(${styp}[]) { ')
} else {
if use_tmp_var {
- g.write('$ret_typ $tmpvar = ')
+ g.write('${ret_typ} ${tmpvar} = ')
} else {
g.write('return ')
}
@@ -4443,7 +4443,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
}
// Use this to keep the tmp assignments in order
mut multi_unpack := ''
- g.write('($styp){')
+ g.write('(${styp}){')
mut arg_idx := 0
for i, expr in node.exprs {
// Check if we are dealing with a multi return and handle it seperately
@@ -4455,7 +4455,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
&& !call_expr.return_type.has_flag(.result) {
line := g.go_before_stmt(0)
expr_styp := g.typ(call_expr.return_type)
- g.write('$expr_styp $tmp=')
+ g.write('${expr_styp} ${tmp}=')
g.expr(expr)
g.writeln(';')
multi_unpack += g.go_before_stmt(0)
@@ -4467,11 +4467,11 @@ fn (mut g Gen) return_stmt(node ast.Return) {
multi_unpack += g.go_before_stmt(0)
g.write(line)
expr_styp := g.base_type(call_expr.return_type)
- tmp = ('(*($expr_styp*)${tmp}.data)')
+ tmp = ('(*(${expr_styp}*)${tmp}.data)')
}
expr_types := expr_sym.mr_info().types
for j, _ in expr_types {
- g.write('.arg$arg_idx=${tmp}.arg$j')
+ g.write('.arg${arg_idx}=${tmp}.arg${j}')
if j < expr_types.len || i < node.exprs.len - 1 {
g.write(',')
}
@@ -4479,7 +4479,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
}
continue
}
- g.write('.arg$arg_idx=')
+ g.write('.arg${arg_idx}=')
if expr.is_auto_deref_var() {
g.write('*')
}
@@ -4495,13 +4495,13 @@ fn (mut g Gen) return_stmt(node ast.Return) {
}
g.write('}')
if fn_return_is_optional {
- g.writeln(' }, ($c.option_name*)(&$tmpvar), sizeof($styp));')
+ g.writeln(' }, (${c.option_name}*)(&${tmpvar}), sizeof(${styp}));')
g.write_defer_stmts_when_needed()
- g.write('return $tmpvar')
+ g.write('return ${tmpvar}')
} else if fn_return_is_result {
- g.writeln(' }, ($c.result_name*)(&$tmpvar), sizeof($styp));')
+ g.writeln(' }, (${c.result_name}*)(&${tmpvar}), sizeof(${styp}));')
g.write_defer_stmts_when_needed()
- g.write('return $tmpvar')
+ g.write('return ${tmpvar}')
}
// Make sure to add our unpacks
if multi_unpack.len > 0 {
@@ -4512,12 +4512,12 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.writeln(';')
}
g.write_defer_stmts_when_needed()
- g.writeln('return $tmpvar;')
+ g.writeln('return ${tmpvar};')
has_semicolon = true
}
} else if node.exprs.len >= 1 {
if node.types.len == 0 {
- g.checker_bug('node.exprs.len == $node.exprs.len && node.types.len == 0',
+ g.checker_bug('node.exprs.len == ${node.exprs.len} && node.types.len == 0',
node.pos)
}
// normal return
@@ -4534,8 +4534,8 @@ fn (mut g Gen) return_stmt(node ast.Return) {
}
if fn_return_is_optional && !expr_type_is_opt && return_sym.name != c.option_name {
styp := g.base_type(g.fn_decl.return_type)
- g.writeln('$ret_typ $tmpvar;')
- g.write('_option_ok(&($styp[]) { ')
+ g.writeln('${ret_typ} ${tmpvar};')
+ g.write('_option_ok(&(${styp}[]) { ')
if !g.fn_decl.return_type.is_ptr() && node.types[0].is_ptr() {
if !(node.exprs[0] is ast.Ident && !g.is_amp) {
g.write('*')
@@ -4547,10 +4547,10 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.write(', ')
}
}
- g.writeln(' }, ($c.option_name*)(&$tmpvar), sizeof($styp));')
+ g.writeln(' }, (${c.option_name}*)(&${tmpvar}), sizeof(${styp}));')
g.write_defer_stmts_when_needed()
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
- g.writeln('return $tmpvar;')
+ g.writeln('return ${tmpvar};')
return
}
expr_type_is_result := match expr0 {
@@ -4563,8 +4563,8 @@ fn (mut g Gen) return_stmt(node ast.Return) {
}
if fn_return_is_result && !expr_type_is_result && return_sym.name != c.result_name {
styp := g.base_type(g.fn_decl.return_type)
- g.writeln('$ret_typ $tmpvar;')
- g.write('_result_ok(&($styp[]) { ')
+ g.writeln('${ret_typ} ${tmpvar};')
+ g.write('_result_ok(&(${styp}[]) { ')
if !g.fn_decl.return_type.is_ptr() && node.types[0].is_ptr() {
if !(node.exprs[0] is ast.Ident && !g.is_amp) {
g.write('*')
@@ -4576,10 +4576,10 @@ fn (mut g Gen) return_stmt(node ast.Return) {
g.write(', ')
}
}
- g.writeln(' }, ($c.result_name*)(&$tmpvar), sizeof($styp));')
+ g.writeln(' }, (${c.result_name}*)(&${tmpvar}), sizeof(${styp}));')
g.write_defer_stmts_when_needed()
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
- g.writeln('return $tmpvar;')
+ g.writeln('return ${tmpvar};')
return
}
// autofree before `return`
@@ -4602,7 +4602,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
// be modified.
if node.exprs[0] !is ast.Ident || use_tmp_var {
use_tmp_var = true
- g.write('$ret_typ $tmpvar = ')
+ g.write('${ret_typ} ${tmpvar} = ')
} else {
use_tmp_var = false
if !g.is_builtin_mod {
@@ -4634,7 +4634,7 @@ fn (mut g Gen) return_stmt(node ast.Return) {
if !g.is_builtin_mod {
g.autofree_scope_vars(node.pos.pos - 1, node.pos.line_nr, true)
}
- g.write('return $tmpvar')
+ g.write('return ${tmpvar}')
has_semicolon = false
}
} else { // if node.exprs.len == 0 {
@@ -4655,7 +4655,7 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
if g.pref.skip_unused {
if field.name !in g.table.used_consts {
$if trace_skip_unused_consts ? {
- eprintln('>> skipping unused const name: $field.name')
+ eprintln('>> skipping unused const name: ${field.name}')
}
continue
}
@@ -4675,7 +4675,7 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
val := g.expr_string(field.expr)
g.global_const_defs[util.no_dots(field.name)] = GlobalConstDef{
mod: field.mod
- def: '$styp $const_name = $val; // fixed array const'
+ def: '${styp} ${const_name} = ${val}; // fixed array const'
dep_names: g.table.dependent_names_in_expr(field_expr)
}
} else {
@@ -4686,8 +4686,8 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
val := g.expr_string(field.expr)
g.global_const_defs[util.no_dots(field.name)] = GlobalConstDef{
mod: field.mod
- def: 'string $const_name; // a string literal, inited later'
- init: '\t$const_name = $val;'
+ def: 'string ${const_name}; // a string literal, inited later'
+ init: '\t${const_name} = ${val};'
order: -1
}
}
@@ -4734,7 +4734,7 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
val := g.expr_string(field.expr.expr)
g.global_const_defs[util.no_dots(field.name)] = GlobalConstDef{
mod: field.mod
- def: '$styp $const_name = $val; // fixed array const'
+ def: '${styp} ${const_name} = ${val}; // fixed array const'
dep_names: g.table.dependent_names_in_expr(field_expr)
}
continue
@@ -4751,9 +4751,9 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
fn (mut g Gen) const_decl_precomputed(mod string, name string, field_name string, ct_value ast.ComptTimeConstValue, typ ast.Type) bool {
mut styp := g.typ(typ)
- cname := if g.pref.translated && !g.is_builtin_mod { name } else { '_const_$name' }
+ cname := if g.pref.translated && !g.is_builtin_mod { name } else { '_const_${name}' }
$if trace_const_precomputed ? {
- eprintln('> styp: $styp | cname: $cname | ct_value: $ct_value | $ct_value.type_name()')
+ eprintln('> styp: ${styp} | cname: ${cname} | ct_value: ${ct_value} | ${ct_value.type_name()}')
}
match ct_value {
i8 {
@@ -4809,7 +4809,7 @@ fn (mut g Gen) const_decl_precomputed(mod string, name string, field_name string
return false
}
escval := util.smart_quote(u8(rune_code).ascii_str(), false)
- g.const_decl_write_precomputed(mod, styp, cname, field_name, "'$escval'")
+ g.const_decl_write_precomputed(mod, styp, cname, field_name, "'${escval}'")
} else {
g.const_decl_write_precomputed(mod, styp, cname, field_name, u32(ct_value).str())
}
@@ -4822,16 +4822,16 @@ fn (mut g Gen) const_decl_precomputed(mod string, name string, field_name string
// so fall back to the delayed initialisation scheme:
g.global_const_defs[util.no_dots(field_name)] = GlobalConstDef{
mod: mod
- def: '$styp $cname; // str inited later'
- init: '\t$cname = _SLIT("$escaped_val");'
+ def: '${styp} ${cname}; // str inited later'
+ init: '\t${cname} = _SLIT("${escaped_val}");'
order: -1
}
if g.is_autofree {
- g.cleanups[mod].writeln('\tstring_free(&$cname);')
+ g.cleanups[mod].writeln('\tstring_free(&${cname});')
}
}
voidptr {
- g.const_decl_write_precomputed(mod, styp, cname, field_name, '(voidptr)(0x$ct_value)')
+ g.const_decl_write_precomputed(mod, styp, cname, field_name, '(voidptr)(0x${ct_value})')
}
ast.EmptyExpr {
return false
@@ -4848,14 +4848,14 @@ fn (mut g Gen) const_decl_write_precomputed(mod string, styp string, cname strin
// If you change it, please also test with `v -live run examples/hot_reload/graph.v` which uses `math.pi` .
g.global_const_defs[util.no_dots(field_name)] = GlobalConstDef{
mod: mod
- def: '#define $cname $ct_value // precomputed3, -live mode'
+ def: '#define ${cname} ${ct_value} // precomputed3, -live mode'
order: -1
}
return
}
g.global_const_defs[util.no_dots(field_name)] = GlobalConstDef{
mod: mod
- def: '$g.static_modifier const $styp $cname = $ct_value; // precomputed2'
+ def: '${g.static_modifier} const ${styp} ${cname} = ${ct_value}; // precomputed2'
// is_precomputed: true
}
}
@@ -4873,18 +4873,18 @@ fn (mut g Gen) const_decl_simple_define(mod string, name string, val string) {
x = x['main__'.len..]
}
} else {
- x = '_const_$x'
+ x = '_const_${x}'
}
if g.pref.translated {
g.global_const_defs[util.no_dots(name)] = GlobalConstDef{
mod: mod
- def: 'const int $x = $val;'
+ def: 'const int ${x} = ${val};'
order: -1
}
} else {
g.global_const_defs[util.no_dots(name)] = GlobalConstDef{
mod: mod
- def: '#define $x $val'
+ def: '#define ${x} ${val}'
order: -1
}
}
@@ -4894,7 +4894,7 @@ fn (mut g Gen) const_decl_init_later(mod string, name string, expr ast.Expr, typ
// Initialize more complex consts in `void _vinit/2{}`
// (C doesn't allow init expressions that can't be resolved at compile time).
mut styp := g.typ(typ)
- cname := if g.pref.translated && !g.is_builtin_mod { name } else { '_const_$name' }
+ cname := if g.pref.translated && !g.is_builtin_mod { name } else { '_const_${name}' }
mut init := strings.new_builder(100)
if cname == '_const_os__args' {
if g.pref.os == .windows {
@@ -4905,15 +4905,15 @@ fn (mut g Gen) const_decl_init_later(mod string, name string, expr ast.Expr, typ
} else {
if unwrap_option {
init.writeln('{')
- init.writeln(g.expr_string_surround('\t$cname = *($styp*)', expr, '.data;'))
+ init.writeln(g.expr_string_surround('\t${cname} = *(${styp}*)', expr, '.data;'))
init.writeln('}')
} else {
- init.writeln(g.expr_string_surround('\t$cname = ', expr, ';'))
+ init.writeln(g.expr_string_surround('\t${cname} = ', expr, ';'))
}
}
g.global_const_defs[util.no_dots(name)] = GlobalConstDef{
mod: mod
- def: '$styp $cname; // inited later'
+ def: '${styp} ${cname}; // inited later'
init: init.str().trim_right('\n')
dep_names: g.table.dependent_names_in_expr(expr)
}
@@ -4921,16 +4921,16 @@ fn (mut g Gen) const_decl_init_later(mod string, name string, expr ast.Expr, typ
sym := g.table.sym(typ)
if styp.starts_with('Array_') {
if sym.has_method_with_generic_parent('free') {
- g.cleanup.writeln('\t${styp}_free(&$cname);')
+ g.cleanup.writeln('\t${styp}_free(&${cname});')
} else {
- g.cleanup.writeln('\tarray_free(&$cname);')
+ g.cleanup.writeln('\tarray_free(&${cname});')
}
} else if styp == 'string' {
- g.cleanup.writeln('\tstring_free(&$cname);')
+ g.cleanup.writeln('\tstring_free(&${cname});')
} else if sym.kind == .map {
- g.cleanup.writeln('\tmap_free(&$cname);')
+ g.cleanup.writeln('\tmap_free(&${cname});')
} else if styp == 'IError' {
- g.cleanup.writeln('\tIError_free(&$cname);')
+ g.cleanup.writeln('\tIError_free(&${cname});')
}
}
}
@@ -4945,7 +4945,7 @@ fn (mut g Gen) global_decl(node ast.GlobalDecl) {
'extern '
} else {
//''
- '$g.static_modifier ' // TODO used to be '' before parallel_cc, may cause issues
+ '${g.static_modifier} ' // TODO used to be '' before parallel_cc, may cause issues
}
// should the global be initialized now, not later in `vinit()`
cinit := node.attrs.contains('cinit')
@@ -4960,7 +4960,7 @@ fn (mut g Gen) global_decl(node ast.GlobalDecl) {
if g.pref.skip_unused {
if field.name !in g.table.used_globals {
$if trace_skip_unused_globals ? {
- eprintln('>> skipping unused global name: $field.name')
+ eprintln('>> skipping unused global name: ${field.name}')
}
continue
}
@@ -4972,7 +4972,7 @@ fn (mut g Gen) global_decl(node ast.GlobalDecl) {
fn_type_name := g.get_anon_fn_type_name(mut anon_fn_expr, field.name)
g.global_const_defs[util.no_dots(fn_type_name)] = GlobalConstDef{
mod: node.mod
- def: '$fn_type_name = ${g.table.sym(field.typ).name}; // global2'
+ def: '${fn_type_name} = ${g.table.sym(field.typ).name}; // global2'
order: -1
}
continue
@@ -4981,7 +4981,7 @@ fn (mut g Gen) global_decl(node ast.GlobalDecl) {
mut init := ''
extern := if cextern { 'extern ' } else { '' }
modifier := if field.is_volatile { ' volatile ' } else { '' }
- def_builder.write_string('$extern$visibility_kw$modifier$styp $attributes $field.name')
+ def_builder.write_string('${extern}${visibility_kw}${modifier}${styp} ${attributes} ${field.name}')
if cextern {
def_builder.writeln('; // global5')
g.global_const_defs[util.no_dots(field.name)] = GlobalConstDef{
@@ -5013,7 +5013,7 @@ fn (mut g Gen) global_decl(node ast.GlobalDecl) {
} else {
// More complex expressions need to be moved to `_vinit()`
// e.g. `__global ( mygblobal = 'hello ' + world' )`
- init = '\t$field.name = ${g.expr_string(field.expr)}; // 3global'
+ init = '\t${field.name} = ${g.expr_string(field.expr)}; // 3global'
}
} else if !g.pref.translated { // don't zero globals from C code
default_initializer := g.type_default(field.typ)
@@ -5021,7 +5021,7 @@ fn (mut g Gen) global_decl(node ast.GlobalDecl) {
def_builder.write_string(' = {0}')
} else {
if field.name !in ['as_cast_type_indexes', 'g_memory_block', 'global_allocator'] {
- init = '\t$field.name = *($styp*)&(($styp[]){${g.type_default(field.typ)}}[0]); // global'
+ init = '\t${field.name} = *(${styp}*)&((${styp}[]){${g.type_default(field.typ)}}[0]); // global'
}
}
}
@@ -5041,7 +5041,7 @@ fn (mut g Gen) assoc(node ast.Assoc) {
return
}
styp := g.typ(node.typ)
- g.writeln('($styp){')
+ g.writeln('(${styp}){')
mut inited_fields := map[string]int{}
for i, field in node.fields {
inited_fields[field] = i
@@ -5052,16 +5052,16 @@ fn (mut g Gen) assoc(node ast.Assoc) {
for field in info.fields {
field_name := c_name(field.name)
if field.name in inited_fields {
- g.write('\t.$field_name = ')
+ g.write('\t.${field_name} = ')
g.expr(node.exprs[inited_fields[field.name]])
g.writeln(', ')
} else {
- g.writeln('\t.$field_name = ${node.var_name}.$field_name,')
+ g.writeln('\t.${field_name} = ${node.var_name}.${field_name},')
}
}
g.write('}')
if g.is_amp {
- g.write(', sizeof($styp))')
+ g.write(', sizeof(${styp}))')
}
}
@@ -5077,7 +5077,7 @@ fn (g &Gen) error(s string, pos token.Pos) {
}
fn (g &Gen) checker_bug(s string, pos token.Pos) {
- g.error('checker bug; $s', pos)
+ g.error('checker bug; ${s}', pos)
}
fn (mut g Gen) write_init_function() {
@@ -5128,7 +5128,7 @@ fn (mut g Gen) write_init_function() {
if var.mod == mod_name && var.init.len > 0 {
if is_empty {
is_empty = false
- g.writeln('\t// Initializations for module $mod_name')
+ g.writeln('\t// Initializations for module ${mod_name}')
}
g.writeln(var.init)
}
@@ -5138,7 +5138,7 @@ fn (mut g Gen) write_init_function() {
if initfn := g.table.find_fn(init_fn_name) {
if initfn.return_type == ast.void_type && initfn.params.len == 0 {
if is_empty {
- g.writeln('\t// Initializations for module $mod_name')
+ g.writeln('\t// Initializations for module ${mod_name}')
}
mod_c_name := util.no_dots(mod_name)
init_fn_c_name := '${mod_c_name}__init'
@@ -5160,7 +5160,7 @@ fn (mut g Gen) write_init_function() {
// g.writeln('puts("cleaning up...");')
reversed_table_modules := g.table.modules.reverse()
for mod_name in reversed_table_modules {
- g.writeln('\t// Cleanups for module $mod_name :')
+ g.writeln('\t// Cleanups for module ${mod_name} :')
g.writeln(g.cleanups[mod_name].str())
}
g.writeln('\tarray_free(&as_cast_type_indexes);')
@@ -5257,17 +5257,17 @@ fn (mut g Gen) write_types(symbols []&ast.TypeSymbol) {
if !g.pref.is_bare && !g.pref.no_builtin {
if g.pref.os == .windows {
if name == '__v_thread' {
- g.thread_definitions.writeln('typedef HANDLE $name;')
+ g.thread_definitions.writeln('typedef HANDLE ${name};')
} else {
// Windows can only return `u32` (no void*) from a thread, so the
// V gohandle must maintain a pointer to the return value
g.thread_definitions.writeln('typedef struct {')
g.thread_definitions.writeln('\tvoid* ret_ptr;')
g.thread_definitions.writeln('\tHANDLE handle;')
- g.thread_definitions.writeln('} $name;')
+ g.thread_definitions.writeln('} ${name};')
}
} else {
- g.thread_definitions.writeln('typedef pthread_t $name;')
+ g.thread_definitions.writeln('typedef pthread_t ${name};')
}
}
}
@@ -5275,13 +5275,13 @@ fn (mut g Gen) write_types(symbols []&ast.TypeSymbol) {
if sym.info.is_generic {
continue
}
- g.typedefs.writeln('typedef struct $name $name;')
+ g.typedefs.writeln('typedef struct ${name} ${name};')
g.type_definitions.writeln('')
- g.type_definitions.writeln('// Union sum type $name = ')
+ g.type_definitions.writeln('// Union sum type ${name} = ')
for variant in sym.info.variants {
g.type_definitions.writeln('// | ${variant:4d} = ${g.typ(variant.idx()):-20s}')
}
- g.type_definitions.writeln('struct $name {')
+ g.type_definitions.writeln('struct ${name} {')
g.type_definitions.writeln('\tunion {')
for variant in sym.info.variants {
variant_sym := g.table.sym(variant)
@@ -5291,14 +5291,14 @@ fn (mut g Gen) write_types(symbols []&ast.TypeSymbol) {
var = variant
}
}
- g.type_definitions.writeln('\t\t${g.typ(var)} _$variant_sym.cname;')
+ g.type_definitions.writeln('\t\t${g.typ(var)} _${variant_sym.cname};')
}
g.type_definitions.writeln('\t};')
g.type_definitions.writeln('\tint _typ;')
if sym.info.fields.len > 0 {
g.writeln('\t// pointers to common sumtype fields')
for field in sym.info.fields {
- g.type_definitions.writeln('\t${g.typ(field.typ.ref())} $field.name;')
+ g.type_definitions.writeln('\t${g.typ(field.typ.ref())} ${field.name};')
}
}
g.type_definitions.writeln('};')
@@ -5324,11 +5324,11 @@ fn (mut g Gen) write_types(symbols []&ast.TypeSymbol) {
pos := g.out.len
g.write_fn_ptr_decl(&elem_sym.info, '')
fixed_elem_name = g.out.cut_to(pos)
- mut def_str := 'typedef $fixed_elem_name;'
- def_str = def_str.replace_once('(*)', '(*$styp[$len])')
+ mut def_str := 'typedef ${fixed_elem_name};'
+ def_str = def_str.replace_once('(*)', '(*${styp}[${len}])')
g.type_definitions.writeln(def_str)
} else {
- g.type_definitions.writeln('typedef $fixed_elem_name $styp [$len];')
+ g.type_definitions.writeln('typedef ${fixed_elem_name} ${styp} [${len}];')
}
}
}
@@ -5553,7 +5553,7 @@ fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Ty
}
}
} else {
- g.write('*($mr_styp*) ${cvar_name}.data = ')
+ g.write('*(${mr_styp}*) ${cvar_name}.data = ')
old_inside_opt_data := g.inside_opt_data
g.inside_opt_data = true
g.expr_with_cast(expr_stmt.expr, expr_stmt.typ, return_type.clear_flag(.optional).clear_flag(.result))
@@ -5580,9 +5580,9 @@ fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Ty
err_msg := 'IError_name_table[${cvar_name}.err._typ]._method_msg(${cvar_name}.err._object)'
if g.pref.is_debug {
paline, pafile, pamod, pafn := g.panic_debug_info(or_block.pos)
- g.writeln('panic_debug($paline, tos3("$pafile"), tos3("$pamod"), tos3("$pafn"), $err_msg);')
+ g.writeln('panic_debug(${paline}, tos3("${pafile}"), tos3("${pamod}"), tos3("${pafn}"), ${err_msg});')
} else {
- g.writeln('\tpanic_result_not_set($err_msg);')
+ g.writeln('\tpanic_result_not_set(${err_msg});')
}
} else if g.fn_decl != unsafe { nil } && g.fn_decl.is_test {
g.gen_failing_error_propagation_for_test_fn(or_block, cvar_name)
@@ -5598,9 +5598,9 @@ fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Ty
} else {
styp := g.typ(g.fn_decl.return_type)
err_obj := g.new_tmp_var()
- g.writeln('\t$styp $err_obj;')
- g.writeln('\tmemcpy(&$err_obj, &$cvar_name, sizeof($c.result_name));')
- g.writeln('\treturn $err_obj;')
+ g.writeln('\t${styp} ${err_obj};')
+ g.writeln('\tmemcpy(&${err_obj}, &${cvar_name}, sizeof(${c.result_name}));')
+ g.writeln('\treturn ${err_obj};')
}
}
} else if or_block.kind == .propagate_option {
@@ -5609,9 +5609,9 @@ fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Ty
err_msg := 'IError_name_table[${cvar_name}.err._typ]._method_msg(${cvar_name}.err._object)'
if g.pref.is_debug {
paline, pafile, pamod, pafn := g.panic_debug_info(or_block.pos)
- g.writeln('panic_debug($paline, tos3("$pafile"), tos3("$pamod"), tos3("$pafn"), $err_msg );')
+ g.writeln('panic_debug(${paline}, tos3("${pafile}"), tos3("${pamod}"), tos3("${pafn}"), ${err_msg} );')
} else {
- g.writeln('\tpanic_optional_not_set( $err_msg );')
+ g.writeln('\tpanic_optional_not_set( ${err_msg} );')
}
} else if g.fn_decl != unsafe { nil } && g.fn_decl.is_test {
g.gen_failing_error_propagation_for_test_fn(or_block, cvar_name)
@@ -5627,9 +5627,9 @@ fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Ty
} else {
styp := g.typ(g.fn_decl.return_type)
err_obj := g.new_tmp_var()
- g.writeln('\t$styp $err_obj;')
- g.writeln('\tmemcpy(&$err_obj, &$cvar_name, sizeof(_option));')
- g.writeln('\treturn $err_obj;')
+ g.writeln('\t${styp} ${err_obj};')
+ g.writeln('\tmemcpy(&${err_obj}, &${cvar_name}, sizeof(_option));')
+ g.writeln('\treturn ${err_obj};')
}
}
}
@@ -5641,7 +5641,7 @@ fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Ty
fn c_name(name_ string) string {
name := util.no_dots(name_)
if c.c_reserved_chk.matches(name) {
- return '_v_$name'
+ return '_v_${name}'
}
return name
}
@@ -5674,7 +5674,7 @@ fn (mut g Gen) type_default(typ_ ast.Type) string {
elem_type := sym.chan_info().elem_type
elemtypstr := g.typ(elem_type)
noscan := g.check_noscan(elem_type)
- return 'sync__new_channel_st${noscan}(0, sizeof($elemtypstr))'
+ return 'sync__new_channel_st${noscan}(0, sizeof(${elemtypstr}))'
}
.array {
elem_typ := sym.array_info().elem_type
@@ -5684,10 +5684,10 @@ fn (mut g Gen) type_default(typ_ ast.Type) string {
elem_type_str = elem_type_str[3..]
}
noscan := g.check_noscan(elem_typ)
- init_str := '__new_array${noscan}(0, 0, sizeof($elem_type_str))'
+ init_str := '__new_array${noscan}(0, 0, sizeof(${elem_type_str}))'
if typ.has_flag(.shared_f) {
- atyp := '__shared__$sym.cname'
- return '($atyp*)__dup_shared_array(&($atyp){.mtx = {0}, .val =$init_str}, sizeof($atyp))'
+ atyp := '__shared__${sym.cname}'
+ return '(${atyp}*)__dup_shared_array(&(${atyp}){.mtx = {0}, .val =${init_str}}, sizeof(${atyp}))'
} else {
return init_str
}
@@ -5707,10 +5707,10 @@ fn (mut g Gen) type_default(typ_ ast.Type) string {
noscan += '_value'
}
}
- init_str := 'new_map${noscan}(sizeof(${g.typ(info.key_type)}), sizeof(${g.typ(info.value_type)}), $hash_fn, $key_eq_fn, $clone_fn, $free_fn)'
+ init_str := 'new_map${noscan}(sizeof(${g.typ(info.key_type)}), sizeof(${g.typ(info.value_type)}), ${hash_fn}, ${key_eq_fn}, ${clone_fn}, ${free_fn})'
if typ.has_flag(.shared_f) {
mtyp := '__shared__Map_${key_typ.cname}_${g.table.sym(info.value_type).cname}'
- return '($mtyp*)__dup_shared_map(&($mtyp){.mtx = {0}, .val =$init_str}, sizeof($mtyp))'
+ return '(${mtyp}*)__dup_shared_map(&(${mtyp}){.mtx = {0}, .val =${init_str}}, sizeof(${mtyp}))'
} else {
return init_str
}
@@ -5733,7 +5733,7 @@ fn (mut g Gen) type_default(typ_ ast.Type) string {
} else {
expr_str = g.expr_string(field.default_expr)
}
- init_str += '.$field_name = $expr_str,'
+ init_str += '.${field_name} = ${expr_str},'
} else {
mut zero_str := g.type_default(field.typ)
if zero_str == '{0}' {
@@ -5744,7 +5744,7 @@ fn (mut g Gen) type_default(typ_ ast.Type) string {
}
}
}
- init_str += '.$field_name = $zero_str,'
+ init_str += '.${field_name} = ${zero_str},'
}
has_none_zero = true
}
@@ -5767,7 +5767,7 @@ fn (mut g Gen) type_default(typ_ ast.Type) string {
}
if typ_is_shared_f {
styp := '__shared__${g.table.sym(typ).cname}'
- return '($styp*)__dup${styp}(&($styp){.mtx = {0}, .val = $init_str}, sizeof($styp))'
+ return '(${styp}*)__dup${styp}(&(${styp}){.mtx = {0}, .val = ${init_str}}, sizeof(${styp}))'
} else {
return init_str
}
@@ -5812,11 +5812,11 @@ fn (mut g Gen) size_of(node ast.SizeOf) {
node_typ := g.unwrap_generic(typ)
sym := g.table.sym(node_typ)
if sym.language == .v && sym.kind in [.placeholder, .any] {
- g.error('unknown type `$sym.name`', node.pos)
+ g.error('unknown type `${sym.name}`', node.pos)
}
if node.expr is ast.StringLiteral {
if node.expr.language == .c {
- g.write('sizeof("$node.expr.val")')
+ g.write('sizeof("${node.expr.val}")')
return
}
}
@@ -5833,9 +5833,9 @@ fn (mut g Gen) enum_val(node ast.EnumVal) {
// Mostly in translated code, when C enums are used as ints in switches
// sym := g.table.sym(node.typ)
// g.write('/* $node enum val is_number $node.mod styp=$styp sym=$sym*/_const_main__$node.val')
- g.write('_const_main__$node.val')
+ g.write('_const_main__${node.val}')
} else {
- g.write('${styp}__$node.val')
+ g.write('${styp}__${node.val}')
}
}
@@ -5850,22 +5850,22 @@ fn (mut g Gen) as_cast(node ast.AsCast) {
if mut expr_type_sym.info is ast.SumType {
dot := if node.expr_type.is_ptr() { '->' } else { '.' }
if sym.info is ast.FnType {
- g.write('/* as */ ($styp)__as_cast(')
+ g.write('/* as */ (${styp})__as_cast(')
} else {
- g.write('/* as */ *($styp*)__as_cast(')
+ g.write('/* as */ *(${styp}*)__as_cast(')
}
g.write('(')
g.expr(node.expr)
g.write(')')
g.write(dot)
- g.write('_$sym.cname,')
+ g.write('_${sym.cname},')
g.write('(')
g.expr(node.expr)
g.write(')')
g.write(dot)
// g.write('typ, /*expected:*/$node.typ)')
sidx := g.type_sidx(unwrapped_node_typ)
- g.write('_typ, $sidx) /*expected idx: $sidx, name: $sym.name */ ')
+ g.write('_typ, ${sidx}) /*expected idx: ${sidx}, name: ${sym.name} */ ')
// fill as cast name table
for variant in expr_type_sym.info.variants {
@@ -5902,10 +5902,10 @@ fn (g Gen) as_cast_name_table() string {
}
mut name_ast := strings.new_builder(1024)
casts_len := g.as_cast_type_names.len + 1
- name_ast.writeln('new_array_from_c_array($casts_len, $casts_len, sizeof(VCastTypeIndexName), _MOV((VCastTypeIndexName[$casts_len]){')
+ name_ast.writeln('new_array_from_c_array(${casts_len}, ${casts_len}, sizeof(VCastTypeIndexName), _MOV((VCastTypeIndexName[${casts_len}]){')
name_ast.writeln('\t\t (VCastTypeIndexName){.tindex = 0, .tname = _SLIT("unknown")}')
for key, value in g.as_cast_type_names {
- name_ast.writeln('\t\t, (VCastTypeIndexName){.tindex = $key, .tname = _SLIT("$value")}')
+ name_ast.writeln('\t\t, (VCastTypeIndexName){.tindex = ${key}, .tname = _SLIT("${value}")}')
}
name_ast.writeln('\t}));\n')
return name_ast.str()
@@ -5941,16 +5941,16 @@ fn (mut g Gen) interface_table() string {
// generate a struct that references interface methods
methods_struct_name := 'struct _${interface_name}_interface_methods'
mut methods_struct_def := strings.new_builder(100)
- methods_struct_def.writeln('$methods_struct_name {')
+ methods_struct_def.writeln('${methods_struct_name} {')
mut methodidx := map[string]int{}
for k, method in inter_info.methods {
methodidx[method.name] = k
ret_styp := g.typ(method.return_type)
- methods_struct_def.write_string('\t$ret_styp (*_method_${c_name(method.name)})(void* _')
+ methods_struct_def.write_string('\t${ret_styp} (*_method_${c_name(method.name)})(void* _')
// the first param is the receiver, it's handled by `void*` above
for i in 1 .. method.params.len {
arg := method.params[i]
- methods_struct_def.write_string(', ${g.typ(arg.typ)} $arg.name')
+ methods_struct_def.write_string(', ${g.typ(arg.typ)} ${arg.name}')
}
// TODO g.fn_args(method.args[1..])
methods_struct_def.writeln(');')
@@ -5963,17 +5963,17 @@ fn (mut g Gen) interface_table() string {
iname_table_length := inter_info.types.len
if iname_table_length == 0 {
// msvc can not process `static struct x[0] = {};`
- methods_struct.writeln('$g.static_modifier $methods_struct_name ${interface_name}_name_table[1];')
+ methods_struct.writeln('${g.static_modifier} ${methods_struct_name} ${interface_name}_name_table[1];')
} else {
if g.pref.build_mode != .build_module {
- methods_struct.writeln('$g.static_modifier $methods_struct_name ${interface_name}_name_table[$iname_table_length] = {')
+ methods_struct.writeln('${g.static_modifier} ${methods_struct_name} ${interface_name}_name_table[${iname_table_length}] = {')
} else {
- methods_struct.writeln('$g.static_modifier $methods_struct_name ${interface_name}_name_table[$iname_table_length];')
+ methods_struct.writeln('${g.static_modifier} ${methods_struct_name} ${interface_name}_name_table[${iname_table_length}];')
}
}
mut cast_functions := strings.new_builder(100)
mut methods_wrapper := strings.new_builder(100)
- methods_wrapper.writeln('// Methods wrapper for interface "$interface_name"')
+ methods_wrapper.writeln('// Methods wrapper for interface "${interface_name}"')
mut already_generated_mwrappers := map[string]int{}
iinidx_minimum_base := 1000 // Note: NOT 0, to avoid map entries set to 0 later, so `if already_generated_mwrappers[name] > 0 {` works.
mut current_iinidx := iinidx_minimum_base
@@ -5983,7 +5983,7 @@ fn (mut g Gen) interface_table() string {
// i.e. cctype is always just Cat, not Cat_ptr:
cctype := g.cc_type(ast.mktyp(st), true)
$if debug_interface_table ? {
- eprintln('>> interface name: $isym.name | concrete type: $st.debug() | st symname: $st_sym.name')
+ eprintln('>> interface name: ${isym.name} | concrete type: ${st.debug()} | st symname: ${st_sym.name}')
}
// Speaker_Cat_index = 0
interface_index_name := '_${interface_name}_${cctype}_index'
@@ -5994,23 +5994,23 @@ fn (mut g Gen) interface_table() string {
current_iinidx++
if isym.name != 'vweb.DbInterface' { // TODO remove this
// eprintln('>>> current_iinidx: ${current_iinidx-iinidx_minimum_base} | interface_index_name: $interface_index_name')
- sb.writeln('static $interface_name I_${cctype}_to_Interface_${interface_name}($cctype* x);')
+ sb.writeln('static ${interface_name} I_${cctype}_to_Interface_${interface_name}(${cctype}* x);')
mut cast_struct := strings.new_builder(100)
- cast_struct.writeln('($interface_name) {')
- cast_struct.writeln('\t\t._$cctype = x,')
- cast_struct.writeln('\t\t._typ = $interface_index_name,')
+ cast_struct.writeln('(${interface_name}) {')
+ cast_struct.writeln('\t\t._${cctype} = x,')
+ cast_struct.writeln('\t\t._typ = ${interface_index_name},')
for field in inter_info.fields {
cname := c_name(field.name)
field_styp := g.typ(field.typ)
if _ := st_sym.find_field(field.name) {
- cast_struct.writeln('\t\t.$cname = ($field_styp*)((char*)x + __offsetof_ptr(x, $cctype, $cname)),')
+ cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),')
} else if st_sym.kind == .array
&& field.name in ['element_size', 'data', 'offset', 'len', 'cap', 'flags'] {
// Manaully checking, we already knows array contains above fields
- cast_struct.writeln('\t\t.$cname = ($field_styp*)((char*)x + __offsetof_ptr(x, $cctype, $cname)),')
+ cast_struct.writeln('\t\t.${cname} = (${field_styp}*)((char*)x + __offsetof_ptr(x, ${cctype}, ${cname})),')
} else {
// the field is embedded in another struct
- cast_struct.write_string('\t\t.$cname = ($field_styp*)((char*)x')
+ cast_struct.write_string('\t\t.${cname} = (${field_styp}*)((char*)x')
if st == ast.voidptr_type || st == ast.nil_type {
cast_struct.write_string('/*.... ast.voidptr_type */')
} else {
@@ -6018,7 +6018,7 @@ fn (mut g Gen) interface_table() string {
for embed_type in st_sym.struct_info().embeds {
embed_sym := g.table.sym(embed_type)
if _ := embed_sym.find_field(field.name) {
- cast_struct.write_string(' + __offsetof_ptr(x, $cctype, $embed_sym.embed_name()) + __offsetof_ptr(x, $embed_sym.cname, $cname)')
+ cast_struct.write_string(' + __offsetof_ptr(x, ${cctype}, ${embed_sym.embed_name()}) + __offsetof_ptr(x, ${embed_sym.cname}, ${cname})')
break
}
}
@@ -6031,27 +6031,27 @@ fn (mut g Gen) interface_table() string {
cast_struct_str := cast_struct.str()
cast_functions.writeln('
-// Casting functions for converting "$cctype" to interface "$interface_name"
-static inline $interface_name I_${cctype}_to_Interface_${interface_name}($cctype* x) {
- return $cast_struct_str;
+// Casting functions for converting "${cctype}" to interface "${interface_name}"
+static inline ${interface_name} I_${cctype}_to_Interface_${interface_name}(${cctype}* x) {
+ return ${cast_struct_str};
}')
- shared_fn_name := 'I___shared__${cctype}_to_shared_Interface___shared__$interface_name'
+ shared_fn_name := 'I___shared__${cctype}_to_shared_Interface___shared__${interface_name}'
// Avoid undefined types errors by only generating the converters that are referenced:
if g.has_been_referenced(shared_fn_name) {
mut cast_shared_struct := strings.new_builder(100)
- cast_shared_struct.writeln('(__shared__$interface_name) {')
+ cast_shared_struct.writeln('(__shared__${interface_name}) {')
cast_shared_struct.writeln('\t\t.mtx = {0},')
cast_shared_struct.writeln('\t\t.val = {')
- cast_shared_struct.writeln('\t\t\t._$cctype = &x->val,')
- cast_shared_struct.writeln('\t\t\t._typ = $interface_index_name,')
+ cast_shared_struct.writeln('\t\t\t._${cctype} = &x->val,')
+ cast_shared_struct.writeln('\t\t\t._typ = ${interface_index_name},')
cast_shared_struct.writeln('\t\t}')
cast_shared_struct.write_string('\t}')
cast_shared_struct_str := cast_shared_struct.str()
cast_functions.writeln('
-// Casting functions for converting "__shared__$cctype" to interface "__shared__$interface_name"
-static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x) {
- return $cast_shared_struct_str;
+// Casting functions for converting "__shared__${cctype}" to interface "__shared__${interface_name}"
+static inline __shared__${interface_name} ${shared_fn_name}(__shared__${cctype}* x) {
+ return ${cast_shared_struct_str};
}')
}
}
@@ -6113,12 +6113,12 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
}
}
styp := g.cc_type(method.params[0].typ, true)
- mut method_call := '${styp}_$name'
+ mut method_call := '${styp}_${name}'
if !method.params[0].typ.is_ptr() {
- method_call = '${cctype}_$name'
+ method_call = '${cctype}_${name}'
// inline void Cat_speak_Interface_Animal_method_wrapper(Cat c) { return Cat_speak(*c); }
iwpostfix := '_Interface_${interface_name}_method_wrapper'
- methods_wrapper.write_string('static inline ${g.typ(method.return_type)} ${cctype}_$name${iwpostfix}(')
+ methods_wrapper.write_string('static inline ${g.typ(method.return_type)} ${cctype}_${name}${iwpostfix}(')
//
params_start_pos := g.out.len
mut params := method.params.clone()
@@ -6145,21 +6145,21 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
}
if embed_types.len > 0 && method.name !in method_names {
embed_sym := g.table.sym(embed_types.last())
- method_name := '${embed_sym.cname}_$method.name'
+ method_name := '${embed_sym.cname}_${method.name}'
methods_wrapper.write_string('${method_name}(${fargs[0]}')
for idx_embed, embed in embed_types {
esym := g.table.sym(embed)
if idx_embed == 0 || embed_types[idx_embed - 1].is_any_kind_of_pointer() {
- methods_wrapper.write_string('->$esym.embed_name()')
+ methods_wrapper.write_string('->${esym.embed_name()}')
} else {
- methods_wrapper.write_string('.$esym.embed_name()')
+ methods_wrapper.write_string('.${esym.embed_name()}')
}
}
if fargs.len > 1 {
methods_wrapper.write_string(', ')
}
args := fargs[1..].join(', ')
- methods_wrapper.writeln('$args);')
+ methods_wrapper.writeln('${args});')
} else {
if parameter_name.starts_with('__shared__') {
methods_wrapper.writeln('${method_call}(${fargs.join(', ')}->val);')
@@ -6173,7 +6173,7 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
}
if g.pref.build_mode != .build_module && st != ast.voidptr_type
&& st != ast.nil_type {
- methods_struct.writeln('\t\t._method_${c_name(method.name)} = (void*) $method_call,')
+ methods_struct.writeln('\t\t._method_${c_name(method.name)} = (void*) ${method_call},')
}
}
@@ -6191,14 +6191,14 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
}
iin_idx := already_generated_mwrappers[interface_index_name] - iinidx_minimum_base
if g.pref.build_mode != .build_module {
- sb.writeln('$g.static_modifier const int $interface_index_name = $iin_idx;')
+ sb.writeln('${g.static_modifier} const int ${interface_index_name} = ${iin_idx};')
} else {
- sb.writeln('extern const int $interface_index_name;')
+ sb.writeln('extern const int ${interface_index_name};')
}
}
for vtyp, variants in inter_info.conversions {
vsym := g.table.sym(vtyp)
- conversion_functions.write_string('static inline bool I_${interface_name}_is_I_${vsym.cname}($interface_name x) {\n\treturn ')
+ conversion_functions.write_string('static inline bool I_${interface_name}_is_I_${vsym.cname}(${interface_name} x) {\n\treturn ')
for i, variant in variants {
variant_sym := g.table.sym(variant)
if i > 0 {
@@ -6208,10 +6208,10 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
}
conversion_functions.writeln(';\n}')
- conversion_functions.writeln('static inline $vsym.cname I_${interface_name}_as_I_${vsym.cname}($interface_name x) {')
+ conversion_functions.writeln('static inline ${vsym.cname} I_${interface_name}_as_I_${vsym.cname}(${interface_name} x) {')
for variant in variants {
variant_sym := g.table.sym(variant)
- conversion_functions.writeln('\tif (x._typ == _${interface_name}_${variant_sym.cname}_index) return I_${variant_sym.cname}_to_Interface_${vsym.cname}(x._$variant_sym.cname);')
+ conversion_functions.writeln('\tif (x._typ == _${interface_name}_${variant_sym.cname}_index) return I_${variant_sym.cname}_to_Interface_${vsym.cname}(x._${variant_sym.cname});')
}
pmessage := 'string__plus(string__plus(tos3("`as_cast`: cannot convert "), tos3(v_typeof_interface_${interface_name}(x._typ))), tos3(" to ${util.strip_main_name(vsym.name)}"))'
if g.pref.is_debug {
@@ -6224,10 +6224,10 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
conversion_functions.write_string(pmessage)
conversion_functions.writeln(');')
}
- conversion_functions.writeln('\treturn ($vsym.cname){0};')
+ conversion_functions.writeln('\treturn (${vsym.cname}){0};')
conversion_functions.writeln('}')
}
- sb.writeln('// ^^^ number of types for interface $interface_name: ${current_iinidx - iinidx_minimum_base}')
+ sb.writeln('// ^^^ number of types for interface ${interface_name}: ${current_iinidx - iinidx_minimum_base}')
if iname_table_length == 0 {
methods_struct.writeln('')
} else {
@@ -6263,14 +6263,14 @@ pub fn get_guarded_include_text(iname string, imessage string) string {
res := '
|#if defined(__has_include)
|
- |#if __has_include($iname)
- |#include $iname
+ |#if __has_include(${iname})
+ |#include ${iname}
|#else
- |#error VERROR_MESSAGE $imessage
+ |#error VERROR_MESSAGE ${imessage}
|#endif
|
|#else
- |#include $iname
+ |#include ${iname}
|#endif
'.strip_margin()
return res
@@ -6278,7 +6278,7 @@ pub fn get_guarded_include_text(iname string, imessage string) string {
fn (mut g Gen) trace(fbase string, message string) {
if g.file.path_base == fbase {
- println('> g.trace | ${fbase:-10s} | $message')
+ println('> g.trace | ${fbase:-10s} | ${message}')
}
}
diff --git a/vlib/v/gen/c/cmain.v b/vlib/v/gen/c/cmain.v
index 992bcd4059..f8bfc21bc3 100644
--- a/vlib/v/gen/c/cmain.v
+++ b/vlib/v/gen/c/cmain.v
@@ -45,7 +45,7 @@ fn (mut g Gen) gen_vlines_reset() {
g.vlines_path = util.vlines_escape_path(g.pref.out_name_c, g.pref.ccompiler)
g.writeln('')
g.writeln('\n// Reset the file/line numbers')
- g.writeln('\n#line $lines_so_far "$g.vlines_path"')
+ g.writeln('\n#line ${lines_so_far} "${g.vlines_path}"')
g.writeln('')
}
}
@@ -200,7 +200,7 @@ pub fn (mut g Gen) gen_failing_error_propagation_for_test_fn(or_block ast.OrExpr
// and the test is considered failed
paline, pafile, pamod, pafn := g.panic_debug_info(or_block.pos)
err_msg := 'IError_name_table[${cvar_name}.err._typ]._method_msg(${cvar_name}.err._object)'
- g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_fn_error(test_runner._object, $paline, tos3("$pafile"), tos3("$pamod"), tos3("$pafn"), $err_msg );')
+ g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_fn_error(test_runner._object, ${paline}, tos3("${pafile}"), tos3("${pamod}"), tos3("${pafn}"), ${err_msg} );')
g.writeln('\tlongjmp(g_jump_buffer, 1);')
}
@@ -210,7 +210,7 @@ pub fn (mut g Gen) gen_failing_return_error_for_test_fn(return_stmt ast.Return,
// and the test is considered failed
paline, pafile, pamod, pafn := g.panic_debug_info(return_stmt.pos)
err_msg := 'IError_name_table[${cvar_name}.err._typ]._method_msg(${cvar_name}.err._object)'
- g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_fn_error(test_runner._object, $paline, tos3("$pafile"), tos3("$pamod"), tos3("$pafn"), $err_msg );')
+ g.writeln('\tmain__TestRunner_name_table[test_runner._typ]._method_fn_error(test_runner._object, ${paline}, tos3("${pafile}"), tos3("${pamod}"), tos3("${pafn}"), ${err_msg} );')
g.writeln('\tlongjmp(g_jump_buffer, 1);')
}
@@ -256,30 +256,30 @@ pub fn (mut g Gen) gen_c_main_for_tests() {
all_tfuncs = g.filter_only_matching_fn_names(all_tfuncs)
g.writeln('\tstring v_test_file = ${ctoslit(g.pref.path)};')
if g.pref.is_stats {
- g.writeln('\tmain__BenchedTests bt = main__start_testing($all_tfuncs.len, v_test_file);')
+ g.writeln('\tmain__BenchedTests bt = main__start_testing(${all_tfuncs.len}, v_test_file);')
}
g.writeln('')
g.writeln('\tstruct _main__TestRunner_interface_methods _vtrunner = main__TestRunner_name_table[test_runner._typ];')
g.writeln('\tvoid * _vtobj = test_runner._object;')
g.writeln('')
g.writeln('\tmain__VTestFileMetaInfo_free(test_runner.file_test_info);')
- g.writeln('\t*(test_runner.file_test_info) = main__vtest_new_filemetainfo(v_test_file, $all_tfuncs.len);')
- g.writeln('\t_vtrunner._method_start(_vtobj, $all_tfuncs.len);')
+ g.writeln('\t*(test_runner.file_test_info) = main__vtest_new_filemetainfo(v_test_file, ${all_tfuncs.len});')
+ g.writeln('\t_vtrunner._method_start(_vtobj, ${all_tfuncs.len});')
g.writeln('')
for tnumber, tname in all_tfuncs {
tcname := util.no_dots(tname)
testfn := g.table.fns[tname]
lnum := testfn.pos.line_nr + 1
g.writeln('\tmain__VTestFnMetaInfo_free(test_runner.fn_test_info);')
- g.writeln('\tstring tcname_$tnumber = _SLIT("$tcname");')
- g.writeln('\tstring tcmod_$tnumber = _SLIT("$testfn.mod");')
- g.writeln('\tstring tcfile_$tnumber = ${ctoslit(testfn.file)};')
- g.writeln('\t*(test_runner.fn_test_info) = main__vtest_new_metainfo(tcname_$tnumber, tcmod_$tnumber, tcfile_$tnumber, $lnum);')
+ g.writeln('\tstring tcname_${tnumber} = _SLIT("${tcname}");')
+ g.writeln('\tstring tcmod_${tnumber} = _SLIT("${testfn.mod}");')
+ g.writeln('\tstring tcfile_${tnumber} = ${ctoslit(testfn.file)};')
+ g.writeln('\t*(test_runner.fn_test_info) = main__vtest_new_metainfo(tcname_${tnumber}, tcmod_${tnumber}, tcfile_${tnumber}, ${lnum});')
g.writeln('\t_vtrunner._method_fn_start(_vtobj);')
g.writeln('\tif (!setjmp(g_jump_buffer)) {')
//
if g.pref.is_stats {
- g.writeln('\t\tmain__BenchedTests_testing_step_start(&bt, tcname_$tnumber);')
+ g.writeln('\t\tmain__BenchedTests_testing_step_start(&bt, tcname_${tnumber});')
}
g.writeln('\t\t${tcname}();')
g.writeln('\t\t_vtrunner._method_fn_pass(_vtobj);')
diff --git a/vlib/v/gen/c/comptime.v b/vlib/v/gen/c/comptime.v
index f6d874d947..5462b514d1 100644
--- a/vlib/v/gen/c/comptime.v
+++ b/vlib/v/gen/c/comptime.v
@@ -23,7 +23,7 @@ fn (mut g Gen) comptime_selector(node ast.ComptimeSelector) {
field_name := g.comptime_for_field_value.name
left_sym := g.table.sym(g.unwrap_generic(node.left_type))
_ := g.table.find_field_with_embeds(left_sym, field_name) or {
- g.error('`$node.left` has no field named `$field_name`', node.left.pos())
+ g.error('`${node.left}` has no field named `${field_name}`', node.left.pos())
}
g.write(c_name(field_name))
return
@@ -42,7 +42,7 @@ fn (mut g Gen) comptime_call(mut node ast.ComptimeCall) {
if node.method_name == 'env' {
// $env('ENV_VAR_NAME')
val := util.cescaped_path(os.getenv(node.args_var))
- g.write('_SLIT("$val")')
+ g.write('_SLIT("${val}")')
return
}
if node.is_vweb {
@@ -71,19 +71,19 @@ fn (mut g Gen) comptime_call(mut node ast.ComptimeCall) {
if is_html {
// return vweb html template
app_name := g.fn_decl.params[0].name
- g.writeln('vweb__Context_html(&$app_name->Context, _tmpl_res_$fn_name); strings__Builder_free(&sb_$fn_name); string_free(&_tmpl_res_$fn_name);')
+ g.writeln('vweb__Context_html(&${app_name}->Context, _tmpl_res_${fn_name}); strings__Builder_free(&sb_${fn_name}); string_free(&_tmpl_res_${fn_name});')
} else {
// return $tmpl string
g.write(cur_line)
if g.inside_return_tmpl {
g.write('return ')
}
- g.write('_tmpl_res_$fn_name')
+ g.write('_tmpl_res_${fn_name}')
}
return
}
sym := g.table.sym(g.unwrap_generic(node.left_type))
- g.trace_autofree('// \$method call. sym="$sym.name"')
+ g.trace_autofree('// \$method call. sym="${sym.name}"')
if node.method_name == 'method' {
// `app.$method()`
m := sym.find_method(g.comptime_for_method) or { return }
@@ -109,11 +109,11 @@ fn (mut g Gen) comptime_call(mut node ast.ComptimeCall) {
// check argument length and types
if m.params.len - 1 != node.args.len && !expand_strs {
if g.inside_call {
- g.error('expected ${m.params.len - 1} arguments to method ${sym.name}.$m.name, but got $node.args.len',
+ g.error('expected ${m.params.len - 1} arguments to method ${sym.name}.${m.name}, but got ${node.args.len}',
node.pos)
} else {
// do not generate anything if the argument lengths don't match
- g.writeln('/* skipping ${sym.name}.$m.name due to mismatched arguments list */')
+ g.writeln('/* skipping ${sym.name}.${m.name} due to mismatched arguments list */')
// g.writeln('println(_SLIT("skipping ${node.sym.name}.$m.name due to mismatched arguments list"));')
// eprintln('info: skipping ${node.sym.name}.$m.name due to mismatched arguments list\n' +
//'method.params: $m.params, args: $node.args\n\n')
@@ -155,9 +155,9 @@ fn (mut g Gen) comptime_call(mut node ast.ComptimeCall) {
if m.params[i].typ.is_int() || m.params[i].typ.idx() == ast.bool_type_idx {
// Gets the type name and cast the string to the type with the string_ function
type_name := g.table.type_symbols[int(m.params[i].typ)].str()
- g.write('string_${type_name}(((string*)${last_arg}.data) [$idx])')
+ g.write('string_${type_name}(((string*)${last_arg}.data) [${idx}])')
} else {
- g.write('((string*)${last_arg}.data) [$idx] ')
+ g.write('((string*)${last_arg}.data) [${idx}] ')
}
if i < m.params.len - 1 {
g.write(', ')
@@ -185,9 +185,9 @@ fn (mut g Gen) comptime_call(mut node ast.ComptimeCall) {
if j > 0 {
g.write(' else ')
}
- g.write('if (string__eq($node.method_name, _SLIT("$method.name"))) ')
+ g.write('if (string__eq(${node.method_name}, _SLIT("${method.name}"))) ')
}
- g.write('${util.no_dots(sym.name)}_${method.name}($amp ')
+ g.write('${util.no_dots(sym.name)}_${method.name}(${amp} ')
g.expr(node.left)
g.writeln(');')
j++
@@ -200,12 +200,12 @@ fn cgen_attrs(attrs []ast.Attr) []string {
// we currently don't quote 'arg' (otherwise we could just use `s := attr.str()`)
mut s := attr.name
if attr.arg.len > 0 {
- s += ': $attr.arg'
+ s += ': ${attr.arg}'
}
if attr.kind == .string {
s = escape_quotes(s)
}
- res << '_SLIT("$s")'
+ res << '_SLIT("${s}")'
}
return res
}
@@ -213,10 +213,10 @@ fn cgen_attrs(attrs []ast.Attr) []string {
fn (mut g Gen) comptime_at(node ast.AtExpr) {
if node.kind == .vmod_file {
val := cescape_nonascii(util.smart_quote(node.val, false))
- g.write('_SLIT("$val")')
+ g.write('_SLIT("${val}")')
} else {
val := node.val.replace('\\', '\\\\')
- g.write('_SLIT("$val")')
+ g.write('_SLIT("${val}")')
}
}
@@ -282,17 +282,17 @@ fn (mut g Gen) comptime_if(node ast.IfExpr) {
styp := g.typ(node.typ)
if len > 1 {
g.indent++
- g.writeln('$styp $tmp_var;')
+ g.writeln('${styp} ${tmp_var};')
g.writeln('{')
g.stmts(branch.stmts[..len - 1])
- g.write('\t$tmp_var = ')
+ g.write('\t${tmp_var} = ')
g.stmt(last)
g.writeln(';')
g.writeln('}')
g.indent--
} else {
g.indent++
- g.write('$styp $tmp_var = ')
+ g.write('${styp} ${tmp_var} = ')
g.stmt(last)
g.writeln(';')
g.indent--
@@ -315,7 +315,7 @@ fn (mut g Gen) comptime_if(node ast.IfExpr) {
g.defer_ifdef = ''
g.writeln('#endif')
if node.is_expr {
- g.write('$line $tmp_var')
+ g.write('${line} ${tmp_var}')
}
}
@@ -342,14 +342,14 @@ fn (mut g Gen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
verror(err.msg())
return false
}
- g.write('defined($ifdef)')
+ g.write('defined(${ifdef})')
return true
}
ast.InfixExpr {
match cond.op {
.and, .logical_or {
l := g.comptime_if_cond(cond.left, pkg_exist)
- g.write(' $cond.op ')
+ g.write(' ${cond.op} ')
r := g.comptime_if_cond(cond.right, pkg_exist)
return if cond.op == .and { l && r } else { l || r }
}
@@ -413,7 +413,7 @@ fn (mut g Gen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
if left.gkind_field == .typ {
exp_type = g.unwrap_generic(left.name_type)
} else {
- name = '${left.expr}.$left.field_name'
+ name = '${left.expr}.${left.field_name}'
exp_type = g.comptime_var_type_map[name]
}
} else if left is ast.TypeNode {
@@ -422,10 +422,10 @@ fn (mut g Gen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
}
if cond.op == .key_is {
- g.write('$exp_type.idx() == $got_type.idx()')
+ g.write('${exp_type.idx()} == ${got_type.idx()}')
return exp_type == got_type
} else {
- g.write('$exp_type.idx() != $got_type.idx()')
+ g.write('${exp_type.idx()} != ${got_type.idx()}')
return exp_type != got_type
}
}
@@ -441,11 +441,11 @@ fn (mut g Gen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
}
ast.Ident {
ifdef := g.comptime_if_to_ifdef(cond.name, false) or { 'true' } // handled in checker
- g.write('defined($ifdef)')
+ g.write('defined(${ifdef})')
return true
}
ast.ComptimeCall {
- g.write('$pkg_exist')
+ g.write('${pkg_exist}')
return true
}
else {
@@ -458,7 +458,7 @@ fn (mut g Gen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
fn (mut g Gen) comptime_for(node ast.ComptimeFor) {
sym := g.table.sym(g.unwrap_generic(node.typ))
- g.writeln('/* \$for $node.val_var in ${sym.name}($node.kind.str()) */ {')
+ g.writeln('/* \$for ${node.val_var} in ${sym.name}(${node.kind.str()}) */ {')
g.indent++
// vweb_result_type := ast.new_type(g.table.find_type_idx('vweb.Result'))
mut i := 0
@@ -468,7 +468,7 @@ fn (mut g Gen) comptime_for(node ast.ComptimeFor) {
methods_with_attrs := sym.methods.filter(it.attrs.len > 0) // methods with attrs second
methods << methods_with_attrs
if methods.len > 0 {
- g.writeln('FunctionData $node.val_var = {0};')
+ g.writeln('FunctionData ${node.val_var} = {0};')
}
typ_vweb_result := g.table.find_type_idx('vweb.Result')
for method in methods {
@@ -485,14 +485,14 @@ fn (mut g Gen) comptime_for(node ast.ComptimeFor) {
}
}
g.comptime_for_method = method.name
- g.writeln('/* method $i */ {')
- g.writeln('\t${node.val_var}.name = _SLIT("$method.name");')
+ g.writeln('/* method ${i} */ {')
+ g.writeln('\t${node.val_var}.name = _SLIT("${method.name}");')
if method.attrs.len == 0 {
g.writeln('\t${node.val_var}.attrs = __new_array_with_default(0, 0, sizeof(string), 0);')
} else {
attrs := cgen_attrs(method.attrs)
g.writeln(
- '\t${node.val_var}.attrs = new_array_from_c_array($attrs.len, $attrs.len, sizeof(string), _MOV((string[$attrs.len]){' +
+ '\t${node.val_var}.attrs = new_array_from_c_array(${attrs.len}, ${attrs.len}, sizeof(string), _MOV((string[${attrs.len}]){' +
attrs.join(', ') + '}));\n')
}
if method.params.len < 2 {
@@ -500,15 +500,15 @@ fn (mut g Gen) comptime_for(node ast.ComptimeFor) {
g.writeln('\t${node.val_var}.args = __new_array_with_default(0, 0, sizeof(MethodArgs), 0);')
} else {
len := method.params.len - 1
- g.write('\t${node.val_var}.args = new_array_from_c_array($len, $len, sizeof(MethodArgs), _MOV((MethodArgs[$len]){')
+ g.write('\t${node.val_var}.args = new_array_from_c_array(${len}, ${len}, sizeof(MethodArgs), _MOV((MethodArgs[${len}]){')
// Skip receiver arg
for j, arg in method.params[1..] {
typ := arg.typ.idx()
- g.write('{$typ.str(), _SLIT("$arg.name")}')
+ g.write('{${typ.str()}, _SLIT("${arg.name}")}')
if j < len - 1 {
g.write(', ')
}
- g.comptime_var_type_map['${node.val_var}.args[$j].typ'] = typ
+ g.comptime_var_type_map['${node.val_var}.args[${j}].typ'] = typ
}
g.writeln('}));\n')
}
@@ -525,14 +525,14 @@ fn (mut g Gen) comptime_for(node ast.ComptimeFor) {
sig += ')'
ret_type := g.table.sym(method.return_type).name
if ret_type != 'void' {
- sig += ' $ret_type'
+ sig += ' ${ret_type}'
}
styp := g.table.find_type_idx(sig)
// TODO: type aliases
ret_typ := method.return_type.idx()
- g.writeln('\t${node.val_var}.typ = $styp;')
- g.writeln('\t${node.val_var}.return_type = $ret_typ;')
+ g.writeln('\t${node.val_var}.typ = ${styp};')
+ g.writeln('\t${node.val_var}.return_type = ${ret_typ};')
//
g.comptime_var_type_map['${node.val_var}.return_type'] = ret_typ
g.comptime_var_type_map['${node.val_var}.typ'] = styp
@@ -555,29 +555,29 @@ fn (mut g Gen) comptime_for(node ast.ComptimeFor) {
if sym.kind == .struct_ {
sym_info := sym.info as ast.Struct
if sym_info.fields.len > 0 {
- g.writeln('\tFieldData $node.val_var = {0};')
+ g.writeln('\tFieldData ${node.val_var} = {0};')
}
g.inside_comptime_for_field = true
for field in sym_info.fields {
g.comptime_for_field_var = node.val_var
g.comptime_for_field_value = field
g.comptime_for_field_type = field.typ
- g.writeln('/* field $i */ {')
- g.writeln('\t${node.val_var}.name = _SLIT("$field.name");')
+ g.writeln('/* field ${i} */ {')
+ g.writeln('\t${node.val_var}.name = _SLIT("${field.name}");')
if field.attrs.len == 0 {
g.writeln('\t${node.val_var}.attrs = __new_array_with_default(0, 0, sizeof(string), 0);')
} else {
attrs := cgen_attrs(field.attrs)
g.writeln(
- '\t${node.val_var}.attrs = new_array_from_c_array($attrs.len, $attrs.len, sizeof(string), _MOV((string[$attrs.len]){' +
+ '\t${node.val_var}.attrs = new_array_from_c_array(${attrs.len}, ${attrs.len}, sizeof(string), _MOV((string[${attrs.len}]){' +
attrs.join(', ') + '}));\n')
}
// field_sym := g.table.sym(field.typ)
// g.writeln('\t${node.val_var}.typ = _SLIT("$field_sym.name");')
styp := field.typ
- g.writeln('\t${node.val_var}.typ = $styp.idx();')
- g.writeln('\t${node.val_var}.is_pub = $field.is_pub;')
- g.writeln('\t${node.val_var}.is_mut = $field.is_mut;')
+ g.writeln('\t${node.val_var}.typ = ${styp.idx()};')
+ g.writeln('\t${node.val_var}.is_pub = ${field.is_pub};')
+ g.writeln('\t${node.val_var}.is_mut = ${field.is_mut};')
g.writeln('\t${node.val_var}.is_shared = ${field.typ.has_flag(.shared_f)};')
g.comptime_var_type_map['${node.val_var}.typ'] = styp
g.stmts(node.stmts)
@@ -591,14 +591,14 @@ fn (mut g Gen) comptime_for(node ast.ComptimeFor) {
} else if node.kind == .attributes {
if sym.info is ast.Struct {
if sym.info.attrs.len > 0 {
- g.writeln('\tStructAttribute $node.val_var = {0};')
+ g.writeln('\tStructAttribute ${node.val_var} = {0};')
}
for attr in sym.info.attrs {
- g.writeln('/* attribute $i */ {')
- g.writeln('\t${node.val_var}.name = _SLIT("$attr.name");')
- g.writeln('\t${node.val_var}.has_arg = $attr.has_arg;')
- g.writeln('\t${node.val_var}.arg = _SLIT("$attr.arg");')
- g.writeln('\t${node.val_var}.kind = AttributeKind__$attr.kind;')
+ g.writeln('/* attribute ${i} */ {')
+ g.writeln('\t${node.val_var}.name = _SLIT("${attr.name}");')
+ g.writeln('\t${node.val_var}.has_arg = ${attr.has_arg};')
+ g.writeln('\t${node.val_var}.arg = _SLIT("${attr.arg}");')
+ g.writeln('\t${node.val_var}.kind = AttributeKind__${attr.kind};')
g.stmts(node.stmts)
g.writeln('}')
}
@@ -759,9 +759,9 @@ fn (mut g Gen) comptime_if_to_ifdef(name string, is_comptime_optional bool) ?str
else {
if is_comptime_optional
|| (g.pref.compile_defines_all.len > 0 && name in g.pref.compile_defines_all) {
- return 'CUSTOM_DEFINE_$name'
+ return 'CUSTOM_DEFINE_${name}'
}
- return error('bad os ifdef name "$name"') // should never happen, caught in the checker
+ return error('bad os ifdef name "${name}"') // should never happen, caught in the checker
}
}
return none
diff --git a/vlib/v/gen/c/coutput_test.v b/vlib/v/gen/c/coutput_test.v
index e3a6f47d2c..cf24ef0995 100644
--- a/vlib/v/gen/c/coutput_test.v
+++ b/vlib/v/gen/c/coutput_test.v
@@ -29,7 +29,7 @@ fn test_out_files() {
files := os.ls(testdata_folder) or { [] }
tests := files.filter(it.ends_with('.out'))
if tests.len == 0 {
- eprintln('no `.out` tests found in $testdata_folder')
+ eprintln('no `.out` tests found in ${testdata_folder}')
return
}
paths := vtest.filter_vtest_only(tests, basepath: testdata_folder)
@@ -39,10 +39,10 @@ fn test_out_files() {
pexe := os.join_path(output_path, '${basename}.exe')
//
file_options := get_file_options(path)
- alloptions := '-o ${os.quoted_path(pexe)} $file_options.vflags'
- print(mm('v $alloptions run $relpath') + ' == ${mm(out_relpath)} ')
+ alloptions := '-o ${os.quoted_path(pexe)} ${file_options.vflags}'
+ print(mm('v ${alloptions} run ${relpath}') + ' == ${mm(out_relpath)} ')
//
- compilation := os.execute('${os.quoted_path(vexe)} $alloptions ${os.quoted_path(path)}')
+ compilation := os.execute('${os.quoted_path(vexe)} ${alloptions} ${os.quoted_path(path)}')
ensure_compilation_succeeded(compilation)
res := os.execute(os.quoted_path(pexe))
if res.exit_code < 0 {
@@ -100,7 +100,7 @@ fn test_c_must_have_files() {
files := os.ls(testdata_folder) or { [] }
tests := files.filter(it.ends_with('.c.must_have'))
if tests.len == 0 {
- eprintln('no `.c.must_have` files found in $testdata_folder')
+ eprintln('no `.c.must_have` files found in ${testdata_folder}')
return
}
paths := vtest.filter_vtest_only(tests, basepath: testdata_folder)
@@ -109,11 +109,11 @@ fn test_c_must_have_files() {
for must_have_path in paths {
basename, path, relpath, must_have_relpath := target2paths(must_have_path, '.c.must_have')
file_options := get_file_options(path)
- alloptions := '-o - $file_options.vflags'
- description := mm('v $alloptions $relpath') +
+ alloptions := '-o - ${file_options.vflags}'
+ description := mm('v ${alloptions} ${relpath}') +
' matches all line patterns in ${mm(must_have_relpath)} '
print(description)
- cmd := '${os.quoted_path(vexe)} $alloptions ${os.quoted_path(path)}'
+ cmd := '${os.quoted_path(vexe)} ${alloptions} ${os.quoted_path(path)}'
compilation := os.execute(cmd)
ensure_compilation_succeeded(compilation)
expected_lines := os.read_lines(must_have_path) or { [] }
@@ -127,8 +127,8 @@ fn test_c_must_have_files() {
} else {
failed_patterns << eline
println(term.red('FAIL'))
- eprintln('$must_have_path:${idx_expected_line + 1}: expected match error:')
- eprintln('`$cmd` did NOT produce expected line:')
+ eprintln('${must_have_path}:${idx_expected_line + 1}: expected match error:')
+ eprintln('`${cmd}` did NOT produce expected line:')
eprintln(term.colorize(term.red, eline))
if description !in failed_descriptions {
failed_descriptions << description
@@ -154,7 +154,7 @@ fn test_c_must_have_files() {
if failed_descriptions.len > 0 {
eprintln('--------- failed commands: -------------------------------------------')
for fd in failed_descriptions {
- eprintln(' > $fd')
+ eprintln(' > ${fd}')
}
eprintln('----------------------------------------------------------------------')
}
@@ -193,7 +193,7 @@ fn ensure_compilation_succeeded(compilation os.Result) {
panic(compilation.output)
}
if compilation.exit_code != 0 {
- panic('compilation failed: $compilation.output')
+ panic('compilation failed: ${compilation.output}')
}
}
diff --git a/vlib/v/gen/c/ctempvars.v b/vlib/v/gen/c/ctempvars.v
index f0484ed45a..b19b8cac5d 100644
--- a/vlib/v/gen/c/ctempvars.v
+++ b/vlib/v/gen/c/ctempvars.v
@@ -19,7 +19,7 @@ fn (mut g Gen) new_ctemp_var_then_gen(expr ast.Expr, expr_type ast.Type) ast.CTe
fn (mut g Gen) gen_ctemp_var(tvar ast.CTempVar) {
styp := g.typ(tvar.typ)
- g.write('$styp $tvar.name = ')
+ g.write('${styp} ${tvar.name} = ')
g.expr(tvar.orig)
g.writeln(';')
}
diff --git a/vlib/v/gen/c/dumpexpr.v b/vlib/v/gen/c/dumpexpr.v
index 3ee76e5ba4..96044445d9 100644
--- a/vlib/v/gen/c/dumpexpr.v
+++ b/vlib/v/gen/c/dumpexpr.v
@@ -16,9 +16,9 @@ fn (mut g Gen) dump_expr(node ast.DumpExpr) {
if g.table.sym(node.expr_type).language == .c {
name = name[3..]
}
- dump_fn_name := '_v_dump_expr_$name' +
+ dump_fn_name := '_v_dump_expr_${name}' +
(if node.expr_type.is_ptr() { '_ptr'.repeat(node.expr_type.nr_muls()) } else { '' })
- g.write(' ${dump_fn_name}(${ctoslit(fpath)}, $line, $sexpr, ')
+ g.write(' ${dump_fn_name}(${ctoslit(fpath)}, ${line}, ${sexpr}, ')
if node.expr_type.has_flag(.shared_f) {
g.write('&')
g.expr(node.expr)
@@ -53,17 +53,17 @@ fn (mut g Gen) dump_expr_definitions() {
}
if dump_sym.kind == .function {
fninfo := dump_sym.info as ast.FnType
- str_dumparg_type = 'DumpFNType_$name'
+ str_dumparg_type = 'DumpFNType_${name}'
tdef_pos := g.out.len
g.write_fn_ptr_decl(&fninfo, str_dumparg_type)
str_tdef := g.out.after(tdef_pos)
g.go_back(str_tdef.len)
- dump_typedefs['typedef $str_tdef;'] = true
+ dump_typedefs['typedef ${str_tdef};'] = true
}
- dump_fn_name := '_v_dump_expr_$name' +
+ dump_fn_name := '_v_dump_expr_${name}' +
(if is_ptr { '_ptr'.repeat(typ.nr_muls()) } else { '' })
- dump_fn_defs.writeln('$str_dumparg_type ${dump_fn_name}(string fpath, int line, string sexpr, $str_dumparg_type dump_arg);')
- if g.writeln_fn_header('$str_dumparg_type ${dump_fn_name}(string fpath, int line, string sexpr, $str_dumparg_type dump_arg)', mut
+ dump_fn_defs.writeln('${str_dumparg_type} ${dump_fn_name}(string fpath, int line, string sexpr, ${str_dumparg_type} dump_arg);')
+ if g.writeln_fn_header('${str_dumparg_type} ${dump_fn_name}(string fpath, int line, string sexpr, ${str_dumparg_type} dump_arg)', mut
dump_fns)
{
continue
@@ -117,9 +117,9 @@ fn (mut g Gen) dump_expr_definitions() {
fn (mut g Gen) writeln_fn_header(s string, mut sb strings.Builder) bool {
if g.pref.build_mode == .build_module {
- sb.writeln('$s;')
+ sb.writeln('${s};')
return true
}
- sb.writeln('$s {')
+ sb.writeln('${s} {')
return false
}
diff --git a/vlib/v/gen/c/embed.v b/vlib/v/gen/c/embed.v
index d264367992..f0205257c7 100644
--- a/vlib/v/gen/c/embed.v
+++ b/vlib/v/gen/c/embed.v
@@ -24,11 +24,11 @@ fn (mut g Gen) handle_embedded_files_finish() {
// gen_embed_file_struct generates C code for `$embed_file('...')` calls.
fn (mut g Gen) gen_embed_file_init(mut node ast.ComptimeCall) {
$if trace_embed_file ? {
- eprintln('> gen_embed_file_init $node.embed_file.apath')
+ eprintln('> gen_embed_file_init ${node.embed_file.apath}')
}
if g.should_really_embed_file() {
file_bytes := os.read_bytes(node.embed_file.apath) or {
- panic('unable to read file: "$node.embed_file.rpath')
+ panic('unable to read file: "${node.embed_file.rpath}')
}
if node.embed_file.compression_type == 'none' {
@@ -43,13 +43,13 @@ fn (mut g Gen) gen_embed_file_init(mut node ast.ComptimeCall) {
cache_path := os.join_path(cache_dir, cache_key)
vexe := pref.vexe_path()
- compress_cmd := '${os.quoted_path(vexe)} compress $node.embed_file.compression_type ${os.quoted_path(node.embed_file.apath)} ${os.quoted_path(cache_path)}'
+ compress_cmd := '${os.quoted_path(vexe)} compress ${node.embed_file.compression_type} ${os.quoted_path(node.embed_file.apath)} ${os.quoted_path(cache_path)}'
$if trace_embed_file ? {
- eprintln('> gen_embed_file_init, compress_cmd: $compress_cmd')
+ eprintln('> gen_embed_file_init, compress_cmd: ${compress_cmd}')
}
result := os.execute(compress_cmd)
if result.exit_code != 0 {
- eprintln('unable to compress file "$node.embed_file.rpath": $result.output')
+ eprintln('unable to compress file "${node.embed_file.rpath}": ${result.output}')
node.embed_file.bytes = file_bytes
} else {
compressed_bytes := os.read_bytes(cache_path) or {
@@ -77,7 +77,7 @@ fn (mut g Gen) gen_embed_file_init(mut node ast.ComptimeCall) {
g.write('_v_embed_file_metadata( ${ef_idx}U )')
g.file.embedded_files << node.embed_file
$if trace_embed_file ? {
- eprintln('> gen_embed_file_init => _v_embed_file_metadata(${ef_idx:-25}) | ${node.embed_file.apath:-50} | compression: $node.embed_file.compression_type | len: $node.embed_file.len')
+ eprintln('> gen_embed_file_init => _v_embed_file_metadata(${ef_idx:-25}) | ${node.embed_file.apath:-50} | compression: ${node.embed_file.compression_type} | len: ${node.embed_file.len}')
}
}
@@ -114,16 +114,16 @@ fn (mut g Gen) gen_embedded_metadata() {
g.embedded_data.writeln('\t\t\tres.free_compressed = 0;')
g.embedded_data.writeln('\t\t\tres.free_uncompressed = 0;')
if g.should_really_embed_file() {
- g.embedded_data.writeln('\t\t\tres.len = $emfile.len;')
+ g.embedded_data.writeln('\t\t\tres.len = ${emfile.len};')
} else {
file_size := os.file_size(emfile.apath)
if file_size > 5242880 {
eprintln('Warning: embedding of files >= ~5MB is currently not supported')
}
- g.embedded_data.writeln('\t\t\tres.len = $file_size;')
+ g.embedded_data.writeln('\t\t\tres.len = ${file_size};')
}
g.embedded_data.writeln('\t\t\tbreak;')
- g.embedded_data.writeln('\t\t} // case $ef_idx')
+ g.embedded_data.writeln('\t\t} // case ${ef_idx}')
}
g.embedded_data.writeln('\t\tdefault: _v_panic(_SLIT("unknown embed file"));')
g.embedded_data.writeln('\t} // switch')
@@ -140,13 +140,13 @@ fn (mut g Gen) gen_embedded_data() {
// like the `rcc` tool in Qt?
*/
for i, emfile in g.embedded_files {
- g.embedded_data.write_string('static const unsigned char _v_embed_blob_$i[$emfile.bytes.len] = {\n ')
+ g.embedded_data.write_string('static const unsigned char _v_embed_blob_${i}[${emfile.bytes.len}] = {\n ')
for j := 0; j < emfile.bytes.len; j++ {
b := emfile.bytes[j].hex()
if j < emfile.bytes.len - 1 {
- g.embedded_data.write_string('0x$b,')
+ g.embedded_data.write_string('0x${b},')
} else {
- g.embedded_data.write_string('0x$b')
+ g.embedded_data.write_string('0x${b}')
}
if 0 == ((j + 1) % 16) {
g.embedded_data.write_string('\n ')
@@ -157,7 +157,7 @@ fn (mut g Gen) gen_embedded_data() {
g.embedded_data.writeln('')
g.embedded_data.writeln('const v__embed_file__EmbedFileIndexEntry _v_embed_file_index[] = {')
for i, emfile in g.embedded_files {
- g.embedded_data.writeln('\t{$i, { .str=(byteptr)("${cestring(emfile.rpath)}"), .len=$emfile.rpath.len, .is_lit=1 }, { .str=(byteptr)("${cestring(emfile.compression_type)}"), .len=$emfile.compression_type.len, .is_lit=1 }, (byteptr)_v_embed_blob_$i},')
+ g.embedded_data.writeln('\t{${i}, { .str=(byteptr)("${cestring(emfile.rpath)}"), .len=${emfile.rpath.len}, .is_lit=1 }, { .str=(byteptr)("${cestring(emfile.compression_type)}"), .len=${emfile.compression_type.len}, .is_lit=1 }, (byteptr)_v_embed_blob_${i}},')
}
g.embedded_data.writeln('\t{-1, { .str=(byteptr)(""), .len=0, .is_lit=1 }, { .str=(byteptr)(""), .len=0, .is_lit=1 }, NULL}')
g.embedded_data.writeln('};')
diff --git a/vlib/v/gen/c/fn.v b/vlib/v/gen/c/fn.v
index 038920e0fe..e3b05602ae 100644
--- a/vlib/v/gen/c/fn.v
+++ b/vlib/v/gen/c/fn.v
@@ -13,16 +13,16 @@ fn (mut g Gen) is_used_by_main(node ast.FnDecl) bool {
fkey := node.fkey()
is_used_by_main = g.table.used_fns[fkey]
$if trace_skip_unused_fns ? {
- println('> is_used_by_main: $is_used_by_main | node.name: $node.name | fkey: $fkey | node.is_method: $node.is_method')
+ println('> is_used_by_main: ${is_used_by_main} | node.name: ${node.name} | fkey: ${fkey} | node.is_method: ${node.is_method}')
}
if !is_used_by_main {
$if trace_skip_unused_fns_in_c_code ? {
- g.writeln('// trace_skip_unused_fns_in_c_code, $node.name, fkey: $fkey')
+ g.writeln('// trace_skip_unused_fns_in_c_code, ${node.name}, fkey: ${fkey}')
}
}
} else {
$if trace_skip_unused_fns_in_c_code ? {
- g.writeln('// trace_skip_unused_fns_in_c_code, $node.name, fkey: $node.fkey()')
+ g.writeln('// trace_skip_unused_fns_in_c_code, ${node.name}, fkey: ${node.fkey()}')
}
}
return is_used_by_main
@@ -37,7 +37,7 @@ fn (mut g Gen) fn_decl(node ast.FnDecl) {
}
if node.ninstances == 0 && node.generic_names.len > 0 {
$if trace_generics ? {
- eprintln('skipping generic fn with no concrete instances: $node.mod $node.name')
+ eprintln('skipping generic fn with no concrete instances: ${node.mod} ${node.name}')
}
return
}
@@ -82,7 +82,7 @@ fn (mut g Gen) fn_decl(node ast.FnDecl) {
skip = false
}
if !skip && g.pref.is_verbose {
- println('build module `$g.module_built` fn `$node.name`')
+ println('build module `${g.module_built}` fn `${node.name}`')
}
}
if g.pref.use_cache {
@@ -178,13 +178,13 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
nkey := node.fkey()
generic_types_by_fn := g.table.fn_generic_types[nkey]
$if trace_post_process_generic_fns ? {
- eprintln('>> gen_fn_decl, nkey: $nkey | generic_types_by_fn: $generic_types_by_fn')
+ eprintln('>> gen_fn_decl, nkey: ${nkey} | generic_types_by_fn: ${generic_types_by_fn}')
}
for concrete_types in generic_types_by_fn {
if g.pref.is_verbose {
syms := concrete_types.map(g.table.sym(it))
the_type := syms.map(it.name).join(', ')
- println('gen fn `$node.name` for type `$the_type`')
+ println('gen fn `${node.name}` for type `${the_type}`')
}
g.cur_concrete_types = concrete_types
g.gen_fn_decl(node, skip)
@@ -219,7 +219,7 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
is_livemode := g.pref.is_livemain || g.pref.is_liveshared
is_live_wrap := is_livefn && is_livemode
if is_livefn && !is_livemode {
- eprintln('INFO: compile with `v -live $g.pref.path `, if you want to use the [live] function $node.name .')
+ eprintln('INFO: compile with `v -live ${g.pref.path} `, if you want to use the [live] function ${node.name} .')
}
mut name := g.c_fn_name(node) or { return }
@@ -232,9 +232,9 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
sym := g.table.sym(node.receiver.typ)
key = sym.name + '.' + node.name
}
- g.writeln('/* obf: $key */')
+ g.writeln('/* obf: ${key} */')
name = g.obf_table[key] or {
- panic('cgen: fn_decl: obf name "$key" not found, this should never happen')
+ panic('cgen: fn_decl: obf name "${key}" not found, this should never happen')
}
}
// Live functions are protected by a mutex, because otherwise they
@@ -249,7 +249,7 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
}
mut impl_fn_name := name
if is_live_wrap {
- impl_fn_name = 'impl_live_$name'
+ impl_fn_name = 'impl_live_${name}'
}
last_fn_c_name_save := g.last_fn_c_name
defer {
@@ -259,12 +259,12 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
//
if is_live_wrap {
if is_livemain {
- g.definitions.write_string('$type_name (* $impl_fn_name)(')
- g.write('$type_name no_impl_${name}(')
+ g.definitions.write_string('${type_name} (* ${impl_fn_name})(')
+ g.write('${type_name} no_impl_${name}(')
}
if is_liveshared {
- g.definitions.write_string('$type_name ${impl_fn_name}(')
- g.write('$type_name ${impl_fn_name}(')
+ g.definitions.write_string('${type_name} ${impl_fn_name}(')
+ g.write('${type_name} ${impl_fn_name}(')
}
} else {
if !(node.is_pub || g.pref.is_debug) {
@@ -286,7 +286,7 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
} else {
''
}
- fn_header := '$visibility_kw$type_name $fn_attrs${name}('
+ fn_header := '${visibility_kw}${type_name} ${fn_attrs}${name}('
g.definitions.write_string(fn_header)
g.write(fn_header)
}
@@ -309,7 +309,7 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
g.definitions.writeln(');')
g.writeln(') {')
if is_closure {
- g.writeln('$cur_closure_ctx* $c.closure_ctx = __CLOSURE_GET_DATA();')
+ g.writeln('${cur_closure_ctx}* ${c.closure_ctx} = __CLOSURE_GET_DATA();')
}
for i, is_promoted in heap_promoted {
if is_promoted {
@@ -334,7 +334,7 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
}
info := var.obj as ast.Var
if g.table.sym(info.typ).kind != .function {
- g.writeln('${g.typ(info.typ)}$deref ${c_name(var.name)};')
+ g.writeln('${g.typ(info.typ)}${deref} ${c_name(var.name)};')
}
}
}
@@ -349,20 +349,20 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
// an early exit, which will leave the mutex locked.
mut fn_args_list := []string{}
for ia, fa in fargs {
- fn_args_list << '${fargtypes[ia]} $fa'
+ fn_args_list << '${fargtypes[ia]} ${fa}'
}
mut live_fncall := '${impl_fn_name}(' + fargs.join(', ') + ');'
mut live_fnreturn := ''
if type_name != 'void' {
- live_fncall = '$type_name res = $live_fncall'
+ live_fncall = '${type_name} res = ${live_fncall}'
live_fnreturn = 'return res;'
}
- g.definitions.writeln('$type_name ${name}(' + fn_args_list.join(', ') + ');')
- g.hotcode_definitions.writeln('$type_name ${name}(' + fn_args_list.join(', ') + '){')
+ g.definitions.writeln('${type_name} ${name}(' + fn_args_list.join(', ') + ');')
+ g.hotcode_definitions.writeln('${type_name} ${name}(' + fn_args_list.join(', ') + '){')
g.hotcode_definitions.writeln(' pthread_mutex_lock(&live_fn_mutex);')
- g.hotcode_definitions.writeln(' $live_fncall')
+ g.hotcode_definitions.writeln(' ${live_fncall}')
g.hotcode_definitions.writeln(' pthread_mutex_unlock(&live_fn_mutex);')
- g.hotcode_definitions.writeln(' $live_fnreturn')
+ g.hotcode_definitions.writeln(' ${live_fnreturn}')
g.hotcode_definitions.writeln('}')
}
// Profiling mode? Start counting at the beginning of the function (save current time).
@@ -399,9 +399,9 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
default_expr := g.type_default(node.return_type)
// TODO: perf?
if default_expr == '{0}' {
- g.writeln('\treturn ($type_name)$default_expr;')
+ g.writeln('\treturn (${type_name})${default_expr};')
} else {
- g.writeln('\treturn $default_expr;')
+ g.writeln('\treturn ${default_expr};')
}
}
g.writeln('}')
@@ -411,10 +411,10 @@ fn (mut g Gen) gen_fn_decl(node &ast.FnDecl, skip bool) {
for attr in node.attrs {
if attr.name == 'export' {
weak := if node.attrs.any(it.name == 'weak') { 'VWEAK ' } else { '' }
- g.writeln('// export alias: $attr.arg -> $name')
- export_alias := '$weak$type_name $fn_attrs${attr.arg}($arg_str)'
- g.definitions.writeln('VV_EXPORTED_SYMBOL $export_alias; // exported fn $node.name')
- g.writeln('$export_alias {')
+ g.writeln('// export alias: ${attr.arg} -> ${name}')
+ export_alias := '${weak}${type_name} ${fn_attrs}${attr.arg}(${arg_str})'
+ g.definitions.writeln('VV_EXPORTED_SYMBOL ${export_alias}; // exported fn ${node.name}')
+ g.writeln('${export_alias} {')
g.write('\treturn ${name}(')
g.write(fargs.join(', '))
g.writeln(');')
@@ -479,7 +479,7 @@ fn (mut g Gen) gen_anon_fn(mut node ast.AnonFn) {
ctx_struct := closure_ctx(node.decl)
// it may be possible to optimize `memdup` out if the closure never leaves current scope
// TODO in case of an assignment, this should only call "__closure_set_data" and "__closure_set_function" (and free the former data)
- g.write('__closure_create($fn_name, ($ctx_struct*) memdup_uncollectable(&($ctx_struct){')
+ g.write('__closure_create(${fn_name}, (${ctx_struct}*) memdup_uncollectable(&(${ctx_struct}){')
g.indent++
for var in node.inherited_vars {
mut has_inherited := false
@@ -487,16 +487,16 @@ fn (mut g Gen) gen_anon_fn(mut node ast.AnonFn) {
if obj is ast.Var {
if obj.has_inherited {
has_inherited = true
- g.writeln('.$var.name = $c.closure_ctx->$var.name,')
+ g.writeln('.${var.name} = ${c.closure_ctx}->${var.name},')
}
}
}
if !has_inherited {
- g.writeln('.$var.name = $var.name,')
+ g.writeln('.${var.name} = ${var.name},')
}
}
g.indent--
- g.write('}, sizeof($ctx_struct)))')
+ g.write('}, sizeof(${ctx_struct})))')
g.empty_line = false
}
@@ -510,7 +510,7 @@ fn (mut g Gen) gen_anon_fn_decl(mut node ast.AnonFn) {
builder.writeln('/*F*/')
if node.inherited_vars.len > 0 {
ctx_struct := closure_ctx(node.decl)
- builder.writeln('$ctx_struct {')
+ builder.writeln('${ctx_struct} {')
for var in node.inherited_vars {
var_sym := g.table.sym(var.typ)
if var_sym.info is ast.FnType {
@@ -519,7 +519,7 @@ fn (mut g Gen) gen_anon_fn_decl(mut node ast.AnonFn) {
builder.writeln('\t' + sig + ';')
} else {
styp := g.typ(var.typ)
- builder.writeln('\t$styp $var.name;')
+ builder.writeln('\t${styp} ${var.name};')
}
}
builder.writeln('};\n')
@@ -534,7 +534,7 @@ fn (mut g Gen) gen_anon_fn_decl(mut node ast.AnonFn) {
}
fn (g &Gen) defer_flag_var(stmt &ast.DeferStmt) string {
- return '${g.last_fn_c_name}_defer_$stmt.idx_in_fn'
+ return '${g.last_fn_c_name}_defer_${stmt.idx_in_fn}'
}
fn (mut g Gen) write_defer_stmts_when_needed() {
@@ -574,8 +574,8 @@ fn (mut g Gen) fn_decl_params(params []ast.Param, scope &ast.Scope, is_variadic
if param_type_sym.kind == .function {
info := param_type_sym.info as ast.FnType
func := info.func
- g.write('${g.typ(func.return_type)} (*$caname)(')
- g.definitions.write_string('${g.typ(func.return_type)} (*$caname)(')
+ g.write('${g.typ(func.return_type)} (*${caname})(')
+ g.definitions.write_string('${g.typ(func.return_type)} (*${caname})(')
g.fn_decl_params(func.params, unsafe { nil }, func.is_variadic)
g.write(')')
g.definitions.write_string(')')
@@ -599,7 +599,7 @@ fn (mut g Gen) fn_decl_params(params []ast.Param, scope &ast.Scope, is_variadic
} else {
''
}
- s := '$const_prefix$param_type_name $var_name_prefix$caname'
+ s := '${const_prefix}${param_type_name} ${var_name_prefix}${caname}'
g.write(s)
g.definitions.write_string(s)
fparams << caname
@@ -621,13 +621,13 @@ fn (mut g Gen) fn_decl_params(params []ast.Param, scope &ast.Scope, is_variadic
fn (mut g Gen) get_anon_fn_type_name(mut node ast.AnonFn, var_name string) string {
mut builder := strings.new_builder(64)
return_styp := g.typ(node.decl.return_type)
- builder.write_string('$return_styp (*$var_name) (')
+ builder.write_string('${return_styp} (*${var_name}) (')
if node.decl.params.len == 0 {
builder.write_string('void)')
} else {
for i, param in node.decl.params {
param_styp := g.typ(param.typ)
- builder.write_string('$param_styp $param.name')
+ builder.write_string('${param_styp} ${param.name}')
if i != node.decl.params.len - 1 {
builder.write_string(', ')
}
@@ -645,7 +645,7 @@ fn (mut g Gen) call_expr(node ast.CallExpr) {
tmp_var := g.new_tmp_var()
fn_type := g.fn_var_signature(node.left.decl.return_type, node.left.decl.params.map(it.typ),
tmp_var)
- g.write('$fn_type = ')
+ g.write('${fn_type} = ')
g.expr(node.left)
g.writeln(';')
g.write(tmp_var)
@@ -688,7 +688,7 @@ fn (mut g Gen) call_expr(node ast.CallExpr) {
if gen_or && !is_gen_or_and_assign_rhs {
cur_line = g.go_before_stmt(0)
}
- g.write('$styp $tmp_opt = ')
+ g.write('${styp} ${tmp_opt} = ')
}
if node.is_method && !node.is_field {
if node.name == 'writeln' && g.pref.experimental && node.args.len > 0
@@ -706,19 +706,19 @@ fn (mut g Gen) call_expr(node ast.CallExpr) {
unwrapped_typ := node.return_type.clear_flag(.optional).clear_flag(.result)
unwrapped_styp := g.typ(unwrapped_typ)
if unwrapped_typ == ast.void_type {
- g.write('\n $cur_line')
+ g.write('\n ${cur_line}')
} else {
if !g.inside_const_opt_or_res {
- g.write('\n $cur_line (*($unwrapped_styp*)${tmp_opt}.data)')
+ g.write('\n ${cur_line} (*(${unwrapped_styp}*)${tmp_opt}.data)')
} else {
- g.write('\n $cur_line $tmp_opt')
+ g.write('\n ${cur_line} ${tmp_opt}')
}
}
} else if gen_keep_alive {
if node.return_type == ast.void_type {
- g.write('\n $cur_line')
+ g.write('\n ${cur_line}')
} else {
- g.write('\n $cur_line $tmp_opt')
+ g.write('\n ${cur_line} ${tmp_opt}')
}
}
if node.is_noreturn {
@@ -739,7 +739,7 @@ fn (mut g Gen) conversion_function_call(prefix string, postfix string, node ast.
g.write('${prefix}( (')
g.expr(node.left)
dot := if node.left_type.is_ptr() { '->' } else { '.' }
- g.write(')${dot}_typ )$postfix')
+ g.write(')${dot}_typ )${postfix}')
}
fn (mut g Gen) method_call(node ast.CallExpr) {
@@ -815,7 +815,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
if typ_sym.kind == .interface_ && (typ_sym.info as ast.Interface).defines_method(node.name) {
// Speaker_name_table[s._interface_idx].speak(s._object)
$if debug_interface_method_call ? {
- eprintln('>>> interface typ_sym.name: $typ_sym.name | receiver_type_name: $receiver_type_name | pos: $node.pos')
+ eprintln('>>> interface typ_sym.name: ${typ_sym.name} | receiver_type_name: ${receiver_type_name} | pos: ${node.pos}')
}
left_is_shared := left_type.has_flag(.shared_f)
@@ -923,7 +923,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
g.expr(node.left)
}
}
- g.write(', &($elem_type_str[]){')
+ g.write(', &(${elem_type_str}[]){')
g.expr(node.args[0].expr)
g.write('})')
return
@@ -952,24 +952,24 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
if left_sym.kind in [.sum_type, .interface_] {
if node.name == 'type_name' {
if left_sym.kind == .sum_type {
- g.conversion_function_call('charptr_vstring_literal( /* $left_sym.name */ v_typeof_sumtype_$typ_sym.cname',
+ g.conversion_function_call('charptr_vstring_literal( /* ${left_sym.name} */ v_typeof_sumtype_${typ_sym.cname}',
')', node)
return
}
if left_sym.kind == .interface_ {
- g.conversion_function_call('charptr_vstring_literal( /* $left_sym.name */ v_typeof_interface_$typ_sym.cname',
+ g.conversion_function_call('charptr_vstring_literal( /* ${left_sym.name} */ v_typeof_interface_${typ_sym.cname}',
')', node)
return
}
}
if node.name == 'type_idx' {
if left_sym.kind == .sum_type {
- g.conversion_function_call('/* $left_sym.name */ v_typeof_sumtype_idx_$typ_sym.cname',
+ g.conversion_function_call('/* ${left_sym.name} */ v_typeof_sumtype_idx_${typ_sym.cname}',
'', node)
return
}
if left_sym.kind == .interface_ {
- g.conversion_function_call('/* $left_sym.name */ v_typeof_interface_idx_$typ_sym.cname',
+ g.conversion_function_call('/* ${left_sym.name} */ v_typeof_interface_idx_${typ_sym.cname}',
'', node)
return
}
@@ -1040,10 +1040,10 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
if node.name in ['last', 'first', 'pop'] {
return_type_str := g.typ(node.return_type)
has_cast = true
- g.write('(*($return_type_str*)')
+ g.write('(*(${return_type_str}*)')
}
}
- mut name := util.no_dots('${receiver_type_name}_$node.name')
+ mut name := util.no_dots('${receiver_type_name}_${node.name}')
mut array_depth := -1
mut noscan := ''
if left_sym.kind == .array {
@@ -1060,7 +1060,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
}
} else if left_sym.kind == .chan {
if node.name in ['close', 'try_pop', 'try_push'] {
- name = 'sync__Channel_$node.name'
+ name = 'sync__Channel_${node.name}'
}
} else if final_left_sym.kind == .map {
if node.name == 'keys' {
@@ -1073,9 +1073,9 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
&& node.name != 'str' {
sym := g.table.sym(node.receiver_type)
key := sym.name + '.' + node.name
- g.write('/* obf method call: $key */')
+ g.write('/* obf method call: ${key} */')
name = g.obf_table[key] or {
- panic('cgen: obf name "$key" not found, this should never happen')
+ panic('cgen: obf name "${key}" not found, this should never happen')
}
}
// Check if expression is: arr[a..b].clone(), arr[a..].clone()
@@ -1116,7 +1116,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
if array_depth >= 0 {
name = name + '_to_depth'
}
- g.write('$name${noscan}(')
+ g.write('${name}${noscan}(')
} else {
g.write('${name}(')
}
@@ -1129,7 +1129,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
// TODO same logic in call_args()
if !is_range_slice {
if !node.left.is_lvalue() {
- g.write('ADDR($rec_cc_type, ')
+ g.write('ADDR(${rec_cc_type}, ')
has_cast = true
} else if !is_node_name_in_first_last_repeat && !(left_type.has_flag(.shared_f)
&& left_type == node.receiver_type) {
@@ -1147,7 +1147,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
// TODO
// g.write('&')
} else if diff > 0 {
- g.write('/*diff=$diff*/')
+ g.write('/*diff=${diff}*/')
g.write([]u8{len: diff, init: `*`}.bytestr())
}
}
@@ -1155,7 +1155,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
if g.is_autofree && node.free_receiver && !g.inside_lambda && !g.is_builtin_mod {
// The receiver expression needs to be freed, use the temp var.
fn_name := node.name.replace('.', '_')
- arg_name := '_arg_expr_${fn_name}_0_$node.pos.pos'
+ arg_name := '_arg_expr_${fn_name}_0_${node.pos.pos}'
g.write('/*af receiver arg*/' + arg_name)
} else {
if left_sym.kind == .array && node.left.is_auto_deref_var()
@@ -1209,7 +1209,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) {
}
g.call_args(node)
if array_depth >= 0 {
- g.write(', $array_depth')
+ g.write(', ${array_depth}')
}
g.write(')')
}
@@ -1282,7 +1282,7 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
encode_name := js_enc_name(json_type_str)
g.empty_line = true
g.writeln('// json.encode')
- g.write('cJSON* $json_obj = ${encode_name}(')
+ g.write('cJSON* ${json_obj} = ${encode_name}(')
if node.args[0].typ.is_ptr() {
g.write('*')
}
@@ -1290,9 +1290,9 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
g.writeln(');')
tmp2 = g.new_tmp_var()
if is_json_encode {
- g.writeln('string $tmp2 = json__json_print($json_obj);')
+ g.writeln('string ${tmp2} = json__json_print(${json_obj});')
} else {
- g.writeln('string $tmp2 = json__json_print_pretty($json_obj);')
+ g.writeln('string ${tmp2} = json__json_print_pretty(${json_obj});')
}
} else {
ast_type := node.args[0].expr as ast.TypeNode
@@ -1302,19 +1302,19 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
g.gen_json_for_type(ast_type.typ)
g.empty_line = true
g.writeln('// json.decode')
- g.write('cJSON* $json_obj = json__json_parse(')
+ g.write('cJSON* ${json_obj} = json__json_parse(')
// Skip the first argument in json.decode which is a type
// its name was already used to generate the function call
g.is_js_call = true
g.call_args(node)
g.writeln(');')
tmp2 = g.new_tmp_var()
- g.writeln('${result_name}_$typ $tmp2 = ${fn_name}($json_obj);')
+ g.writeln('${result_name}_${typ} ${tmp2} = ${fn_name}(${json_obj});')
}
if !g.is_autofree {
- g.write('cJSON_Delete($json_obj); // del')
+ g.write('cJSON_Delete(${json_obj}); // del')
}
- g.write('\n$cur_line')
+ g.write('\n${cur_line}')
name = ''
json_obj = tmp2
}
@@ -1340,9 +1340,9 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
// Obfuscate only functions in the main module for now
if g.pref.obfuscate && g.cur_mod.name == 'main' && name.starts_with('main__') {
key := node.name
- g.write('/* obf call: $key */')
+ g.write('/* obf call: ${key} */')
name = g.obf_table[key] or {
- panic('cgen: obf name "$key" not found, this should never happen')
+ panic('cgen: obf name "${key}" not found, this should never happen')
}
}
if !is_selector_call {
@@ -1386,9 +1386,9 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
if g.is_autofree && !typ.has_flag(.optional) && !typ.has_flag(.result) {
// Create a temporary variable so that the value can be freed
tmp := g.new_tmp_var()
- g.write('string $tmp = ')
+ g.write('string ${tmp} = ')
g.gen_expr_to_string(expr, typ)
- g.writeln('; ${c_name(print_method)}($tmp); string_free(&$tmp);')
+ g.writeln('; ${c_name(print_method)}(${tmp}); string_free(&${tmp});')
} else {
g.write('${c_name(print_method)}(')
if expr is ast.ComptimeSelector {
@@ -1426,7 +1426,7 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
if !print_auto_str {
if g.pref.is_debug && node.name == 'panic' {
paline, pafile, pamod, pafn := g.panic_debug_info(node.pos)
- g.write('panic_debug($paline, tos3("$pafile"), tos3("$pamod"), tos3("$pafn"), ')
+ g.write('panic_debug(${paline}, tos3("${pafile}"), tos3("${pamod}"), tos3("${pafn}"), ')
g.call_args(node)
g.write(')')
} else {
@@ -1464,9 +1464,9 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
dot := if is_ptr { '->' } else { '.' }
if cast_sym.info is ast.Aggregate {
sym := g.table.sym(cast_sym.info.types[g.aggregate_type_idx])
- g.write('${dot}_$sym.cname')
+ g.write('${dot}_${sym.cname}')
} else {
- g.write('${dot}_$cast_sym.cname')
+ g.write('${dot}_${cast_sym.cname}')
}
g.write(')')
}
@@ -1558,9 +1558,9 @@ fn (mut g Gen) autofree_call_pregen(node ast.CallExpr) {
}
free_tmp_arg_vars = true
fn_name := node.name.replace('.', '_') // can't use name...
- t := '_arg_expr_${fn_name}_${i}_$node.pos.pos'
+ t := '_arg_expr_${fn_name}_${i}_${node.pos.pos}'
used := false // scope.known_var(t)
- mut s := '$t = '
+ mut s := '${t} = '
if used {
// This means this tmp var name was already used (the same function was called and
// `_arg_fnname_1` was already generated).
@@ -1573,7 +1573,7 @@ fn (mut g Gen) autofree_call_pregen(node ast.CallExpr) {
else {}
}
}
- s = '$t = '
+ s = '${t} = '
} else {
scope.register(ast.Var{
name: t
@@ -1581,7 +1581,7 @@ fn (mut g Gen) autofree_call_pregen(node ast.CallExpr) {
is_autofree_tmp: true
pos: node.pos
})
- s = 'string $t = '
+ s = 'string ${t} = '
}
s += g.expr_string(arg.expr)
s += ';// new af2 pre'
@@ -1728,7 +1728,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
// Use these variables here.
fn_name := node.name.replace('.', '_')
// name := '_tt${g.tmp_count_af}_arg_expr_${fn_name}_$i'
- name := '_arg_expr_${fn_name}_${i + 1}_$node.pos.pos'
+ name := '_arg_expr_${fn_name}_${i + 1}_${node.pos.pos}'
g.write('/*af arg*/' + name)
}
} else {
@@ -1738,7 +1738,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
if use_tmp_var_autofree {
// TODO copypasta, move to an inline fn
fn_name := node.name.replace('.', '_')
- name := '_arg_expr_${fn_name}_${i + 1}_$node.pos.pos'
+ name := '_arg_expr_${fn_name}_${i + 1}_${node.pos.pos}'
g.write('/*af arg2*/' + name)
} else {
g.expr(arg.expr)
@@ -1765,7 +1765,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
arr_info.elem_type = utyp
}
} else {
- g.error('unable to find method $node.name', node.pos)
+ g.error('unable to find method ${node.name}', node.pos)
}
} else {
if fn_def := g.table.find_fn(node.name) {
@@ -1776,7 +1776,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
arr_info.elem_type = utyp
}
} else {
- g.error('unable to find function $node.name', node.pos)
+ g.error('unable to find function ${node.name}', node.pos)
}
}
}
@@ -1801,7 +1801,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
}
} else {
noscan := g.check_noscan(arr_info.elem_type)
- g.write('new_array_from_c_array${noscan}($variadic_count, $variadic_count, sizeof($elem_type), _MOV(($elem_type[$variadic_count]){')
+ g.write('new_array_from_c_array${noscan}(${variadic_count}, ${variadic_count}, sizeof(${elem_type}), _MOV((${elem_type}[${variadic_count}]){')
for j in arg_nr .. args.len {
g.ref_or_deref_arg(args[j], arr_info.elem_type, node.language)
if j < args.len - 1 {
@@ -1811,7 +1811,7 @@ fn (mut g Gen) call_args(node ast.CallExpr) {
g.write('}))')
}
} else {
- g.write('__new_array(0, 0, sizeof($elem_type))')
+ g.write('__new_array(0, 0, sizeof(${elem_type}))')
}
}
}
@@ -1843,7 +1843,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
if expr.left.inherited_vars.len > 0 {
fn_var := g.fn_var_signature(expr.left.decl.return_type, expr.left.decl.params.map(it.typ),
tmp_fn)
- g.write('\t$fn_var = ')
+ g.write('\t${fn_var} = ')
g.gen_anon_fn(mut expr.left)
g.writeln(';')
use_tmp_fn_var = true
@@ -1856,7 +1856,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
fn_sym := g.table.sym(expr.fn_var_type)
func := (fn_sym.info as ast.FnType).func
fn_var := g.fn_var_signature(func.return_type, func.params.map(it.typ), tmp_fn)
- g.write('\t$fn_var = ')
+ g.write('\t${fn_var} = ')
g.expr(expr.left)
g.writeln(';')
name = fn_sym.cname
@@ -1870,9 +1870,9 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
sym := g.table.sym(expr.receiver_type)
key = sym.name + '.' + expr.name
}
- g.write('/* obf go: $key */')
+ g.write('/* obf go: ${key} */')
name = g.obf_table[key] or {
- panic('cgen: obf name "$key" not found, this should never happen')
+ panic('cgen: obf name "${key}" not found, this should never happen')
}
}
g.empty_line = true
@@ -1880,24 +1880,24 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
wrapper_struct_name := 'thread_arg_' + name
wrapper_fn_name := name + '_thread_wrapper'
arg_tmp_var := 'arg_' + tmp
- g.writeln('$wrapper_struct_name *$arg_tmp_var = malloc(sizeof(thread_arg_$name));')
+ g.writeln('${wrapper_struct_name} *${arg_tmp_var} = malloc(sizeof(thread_arg_${name}));')
fn_name := if use_tmp_fn_var { tmp_fn } else { name }
if !(expr.is_method && g.table.sym(expr.receiver_type).kind == .interface_) {
- g.writeln('$arg_tmp_var->fn = $fn_name;')
+ g.writeln('${arg_tmp_var}->fn = ${fn_name};')
}
if expr.is_method {
- g.write('$arg_tmp_var->arg0 = ')
+ g.write('${arg_tmp_var}->arg0 = ')
g.expr(expr.left)
g.writeln(';')
}
for i, arg in expr.args {
- g.write('$arg_tmp_var->arg${i + 1} = ')
+ g.write('${arg_tmp_var}->arg${i + 1} = ')
g.expr(arg.expr)
g.writeln(';')
}
s_ret_typ := g.typ(node.call_expr.return_type)
if g.pref.os == .windows && node.call_expr.return_type != ast.void_type {
- g.writeln('$arg_tmp_var->ret_ptr = malloc(sizeof($s_ret_typ));')
+ g.writeln('${arg_tmp_var}->ret_ptr = malloc(sizeof(${s_ret_typ}));')
}
is_opt := node.call_expr.return_type.has_flag(.optional)
is_res := node.call_expr.return_type.has_flag(.result)
@@ -1913,43 +1913,43 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
} else {
opt := if is_opt { '${option_name}_' } else { '' }
res := if is_res { '${result_name}_' } else { '' }
- gohandle_name = '__v_thread_$opt$res${g.table.sym(g.unwrap_generic(node.call_expr.return_type)).cname}'
+ gohandle_name = '__v_thread_${opt}${res}${g.table.sym(g.unwrap_generic(node.call_expr.return_type)).cname}'
}
if g.pref.os == .windows {
simple_handle := if node.is_expr && node.call_expr.return_type != ast.void_type {
- 'thread_handle_$tmp'
+ 'thread_handle_${tmp}'
} else {
- 'thread_$tmp'
+ 'thread_${tmp}'
}
- g.writeln('HANDLE $simple_handle = CreateThread(0, 0, (LPTHREAD_START_ROUTINE)$wrapper_fn_name, $arg_tmp_var, 0, 0);')
- g.writeln('if (!$simple_handle) panic_lasterr(tos3("`go ${name}()`: "));')
+ g.writeln('HANDLE ${simple_handle} = CreateThread(0, 0, (LPTHREAD_START_ROUTINE)${wrapper_fn_name}, ${arg_tmp_var}, 0, 0);')
+ g.writeln('if (!${simple_handle}) panic_lasterr(tos3("`go ${name}()`: "));')
if node.is_expr && node.call_expr.return_type != ast.void_type {
- g.writeln('$gohandle_name thread_$tmp = {')
- g.writeln('\t.ret_ptr = $arg_tmp_var->ret_ptr,')
- g.writeln('\t.handle = thread_handle_$tmp')
+ g.writeln('${gohandle_name} thread_${tmp} = {')
+ g.writeln('\t.ret_ptr = ${arg_tmp_var}->ret_ptr,')
+ g.writeln('\t.handle = thread_handle_${tmp}')
g.writeln('};')
}
if !node.is_expr {
- g.writeln('CloseHandle(thread_$tmp);')
+ g.writeln('CloseHandle(thread_${tmp});')
}
} else {
- g.writeln('pthread_t thread_$tmp;')
+ g.writeln('pthread_t thread_${tmp};')
mut sthread_attributes := 'NULL'
if g.pref.os != .vinix {
g.writeln('pthread_attr_t thread_${tmp}_attributes;')
g.writeln('pthread_attr_init(&thread_${tmp}_attributes);')
- g.writeln('pthread_attr_setstacksize(&thread_${tmp}_attributes, $g.pref.thread_stack_size);')
+ g.writeln('pthread_attr_setstacksize(&thread_${tmp}_attributes, ${g.pref.thread_stack_size});')
sthread_attributes = '&thread_${tmp}_attributes'
}
- g.writeln('int ${tmp}_thr_res = pthread_create(&thread_$tmp, $sthread_attributes, (void*)$wrapper_fn_name, $arg_tmp_var);')
+ g.writeln('int ${tmp}_thr_res = pthread_create(&thread_${tmp}, ${sthread_attributes}, (void*)${wrapper_fn_name}, ${arg_tmp_var});')
g.writeln('if (${tmp}_thr_res) panic_error_number(tos3("`go ${name}()`: "), ${tmp}_thr_res);')
if !node.is_expr {
- g.writeln('pthread_detach(thread_$tmp);')
+ g.writeln('pthread_detach(thread_${tmp});')
}
}
g.writeln('// end go')
if node.is_expr {
- handle = 'thread_$tmp'
+ handle = 'thread_${tmp}'
// create wait handler for this return type if none exists
waiter_fn_name := gohandle_name + '_wait'
mut should_register := false
@@ -1960,10 +1960,10 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
}
}
if should_register {
- g.gowrappers.writeln('\n$s_ret_typ ${waiter_fn_name}($gohandle_name thread) {')
+ g.gowrappers.writeln('\n${s_ret_typ} ${waiter_fn_name}(${gohandle_name} thread) {')
mut c_ret_ptr_ptr := 'NULL'
if node.call_expr.return_type != ast.void_type {
- g.gowrappers.writeln('\t$s_ret_typ* ret_ptr;')
+ g.gowrappers.writeln('\t${s_ret_typ}* ret_ptr;')
c_ret_ptr_ptr = '&ret_ptr'
}
if g.pref.os == .windows {
@@ -1974,7 +1974,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
g.gowrappers.writeln('\tret_ptr = thread.ret_ptr;')
}
} else {
- g.gowrappers.writeln('\tint stat = pthread_join(thread, (void **)$c_ret_ptr_ptr);')
+ g.gowrappers.writeln('\tint stat = pthread_join(thread, (void **)${c_ret_ptr_ptr});')
}
g.gowrappers.writeln('\tif (stat != 0) { _v_panic(_SLIT("unable to join thread")); }')
if g.pref.os == .windows {
@@ -1985,7 +1985,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
}
}
if node.call_expr.return_type != ast.void_type {
- g.gowrappers.writeln('\t$s_ret_typ ret = *ret_ptr;')
+ g.gowrappers.writeln('\t${s_ret_typ} ret = *ret_ptr;')
g.gowrappers.writeln('\tfree(ret_ptr);')
g.gowrappers.writeln('\treturn ret;')
}
@@ -2001,7 +2001,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
}
}
if should_register {
- g.type_definitions.writeln('\ntypedef struct $wrapper_struct_name {')
+ g.type_definitions.writeln('\ntypedef struct ${wrapper_struct_name} {')
mut fn_var := ''
if node.call_expr.is_fn_var {
fn_sym := g.table.sym(node.call_expr.fn_var_type)
@@ -2035,10 +2035,10 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
}
}
}
- g.type_definitions.writeln('\t$fn_var;')
+ g.type_definitions.writeln('\t${fn_var};')
if expr.is_method {
styp := g.typ(expr.receiver_type)
- g.type_definitions.writeln('\t$styp arg0;')
+ g.type_definitions.writeln('\t${styp} arg0;')
}
need_return_ptr := g.pref.os == .windows && node.call_expr.return_type != ast.void_type
for i, arg in expr.args {
@@ -2049,21 +2049,21 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
g.type_definitions.writeln('\t' + sig + ';')
} else {
styp := g.typ(arg.typ)
- g.type_definitions.writeln('\t$styp arg${i + 1};')
+ g.type_definitions.writeln('\t${styp} arg${i + 1};')
}
}
if need_return_ptr {
g.type_definitions.writeln('\tvoid* ret_ptr;')
}
- g.type_definitions.writeln('} $wrapper_struct_name;')
+ g.type_definitions.writeln('} ${wrapper_struct_name};')
thread_ret_type := if g.pref.os == .windows { 'u32' } else { 'void*' }
- g.type_definitions.writeln('$g.static_modifier $thread_ret_type ${wrapper_fn_name}($wrapper_struct_name *arg);')
- g.gowrappers.writeln('$thread_ret_type ${wrapper_fn_name}($wrapper_struct_name *arg) {')
+ g.type_definitions.writeln('${g.static_modifier} ${thread_ret_type} ${wrapper_fn_name}(${wrapper_struct_name} *arg);')
+ g.gowrappers.writeln('${thread_ret_type} ${wrapper_fn_name}(${wrapper_struct_name} *arg) {')
if node.call_expr.return_type != ast.void_type {
if g.pref.os == .windows {
- g.gowrappers.write_string('\t*(($s_ret_typ*)(arg->ret_ptr)) = ')
+ g.gowrappers.write_string('\t*((${s_ret_typ}*)(arg->ret_ptr)) = ')
} else {
- g.gowrappers.writeln('\t$s_ret_typ* ret_ptr = malloc(sizeof($s_ret_typ));')
+ g.gowrappers.writeln('\t${s_ret_typ}* ret_ptr = malloc(sizeof(${s_ret_typ}));')
g.gowrappers.write_string('\t*ret_ptr = ')
}
} else {
@@ -2171,7 +2171,7 @@ fn (mut g Gen) keep_alive_call_pregen(node ast.CallExpr) int {
// evaluation order is preserved
expected_type := node.expected_arg_types[i]
typ := g.table.sym(expected_type).cname
- g.write('$typ __tmp_arg_${tmp_cnt_save + i} = ')
+ g.write('${typ} __tmp_arg_${tmp_cnt_save + i} = ')
g.ref_or_deref_arg(arg, expected_type, node.language)
g.writeln(';')
}
@@ -2402,5 +2402,5 @@ fn (mut g Gen) write_fn_attrs(attrs []ast.Attr) string {
}
fn call_convention_attribute(cconvention string, is_cc_msvc bool) string {
- return if is_cc_msvc { '__$cconvention ' } else { '__attribute__(($cconvention)) ' }
+ return if is_cc_msvc { '__${cconvention} ' } else { '__attribute__((${cconvention})) ' }
}
diff --git a/vlib/v/gen/c/for.v b/vlib/v/gen/c/for.v
index a74595aa4c..9bfe610b44 100644
--- a/vlib/v/gen/c/for.v
+++ b/vlib/v/gen/c/for.v
@@ -11,7 +11,7 @@ fn (mut g Gen) for_c_stmt(node ast.ForCStmt) {
g.is_vlines_enabled = false
g.inside_for_c_stmt = true
if node.label.len > 0 {
- g.writeln('$node.label:')
+ g.writeln('${node.label}:')
}
g.writeln('{')
g.indent++
@@ -51,7 +51,7 @@ fn (mut g Gen) for_c_stmt(node ast.ForCStmt) {
g.is_vlines_enabled = false
g.inside_for_c_stmt = true
if node.label.len > 0 {
- g.writeln('$node.label:')
+ g.writeln('${node.label}:')
}
g.write('for (')
if !node.has_init {
@@ -105,7 +105,7 @@ fn (mut g Gen) for_stmt(node ast.ForStmt) {
g.loop_depth++
g.is_vlines_enabled = false
if node.label.len > 0 {
- g.writeln('$node.label:')
+ g.writeln('${node.label}:')
}
g.writeln('for (;;) {')
if !node.is_inf {
@@ -149,17 +149,17 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
}
g.loop_depth++
if node.label.len > 0 {
- g.writeln('\t$node.label: {}')
+ g.writeln('\t${node.label}: {}')
}
if node.is_range {
// `for x in 1..10 {`
i := if node.val_var == '_' { g.new_tmp_var() } else { c_name(node.val_var) }
val_typ := ast.mktyp(node.val_type)
- g.write('for (${g.typ(val_typ)} $i = ')
+ g.write('for (${g.typ(val_typ)} ${i} = ')
g.expr(node.cond)
- g.write('; $i < ')
+ g.write('; ${i} < ')
g.expr(node.high)
- g.writeln('; ++$i) {')
+ g.writeln('; ++${i}) {')
} else if node.kind == .array {
// `for num in nums {`
// g.writeln('// FOR IN array')
@@ -171,7 +171,7 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
} else {
cond_var = g.new_tmp_var()
g.write(g.typ(node.cond_type))
- g.write(' $cond_var = ')
+ g.write(' ${cond_var} = ')
g.expr(node.cond)
g.writeln(';')
}
@@ -180,16 +180,16 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
share_accessor := if node.cond_type.share() == .shared_t { 'val.' } else { '' }
op_field := field_accessor + share_accessor
g.empty_line = true
- g.writeln('for (int $i = 0; $i < $cond_var${op_field}len; ++$i) {')
+ g.writeln('for (int ${i} = 0; ${i} < ${cond_var}${op_field}len; ++${i}) {')
if node.val_var != '_' {
if val_sym.kind == .function {
g.write('\t')
g.write_fn_ptr_decl(val_sym.info as ast.FnType, c_name(node.val_var))
- g.writeln(' = ((voidptr*)$cond_var${op_field}data)[$i];')
+ g.writeln(' = ((voidptr*)${cond_var}${op_field}data)[${i}];')
} else if val_sym.kind == .array_fixed && !node.val_is_mut {
- right := '(($styp*)$cond_var${op_field}data)[$i]'
- g.writeln('\t$styp ${c_name(node.val_var)};')
- g.writeln('\tmemcpy(*($styp*)${c_name(node.val_var)}, (byte*)$right, sizeof($styp));')
+ right := '((${styp}*)${cond_var}${op_field}data)[${i}]'
+ g.writeln('\t${styp} ${c_name(node.val_var)};')
+ g.writeln('\tmemcpy(*(${styp}*)${c_name(node.val_var)}, (byte*)${right}, sizeof(${styp}));')
} else {
// If val is mutable (pointer behind the scenes), we need to generate
// `int* val = ((int*)arr.data) + i;`
@@ -197,11 +197,11 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
// `int* val = ((int**)arr.data)[i];`
// right := if node.val_is_mut { styp } else { styp + '*' }
right := if node.val_is_mut || node.val_is_ref {
- '(($styp)$cond_var${op_field}data) + $i'
+ '((${styp})${cond_var}${op_field}data) + ${i}'
} else {
- '(($styp*)$cond_var${op_field}data)[$i]'
+ '((${styp}*)${cond_var}${op_field}data)[${i}]'
}
- g.writeln('\t$styp ${c_name(node.val_var)} = $right;')
+ g.writeln('\t${styp} ${c_name(node.val_var)} = ${right};')
}
}
} else if node.kind == .array_fixed {
@@ -211,16 +211,16 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
if cond_is_literal {
cond_var = g.new_tmp_var()
g.write(g.typ(node.cond_type))
- g.write(' $cond_var = ')
+ g.write(' ${cond_var} = ')
g.expr(node.cond)
g.writeln(';')
} else if cond_type_is_ptr {
cond_var = g.new_tmp_var()
cond_var_type := g.typ(node.cond_type).trim('*')
if !node.cond.is_lvalue() {
- g.write('$cond_var_type *$cond_var = (($cond_var_type)')
+ g.write('${cond_var_type} *${cond_var} = ((${cond_var_type})')
} else {
- g.write('$cond_var_type *$cond_var = (')
+ g.write('${cond_var_type} *${cond_var} = (')
}
g.expr(node.cond)
g.writeln(');')
@@ -230,7 +230,7 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
idx := if node.key_var in ['', '_'] { g.new_tmp_var() } else { node.key_var }
cond_sym := g.table.sym(node.cond_type)
info := cond_sym.info as ast.ArrayFixed
- g.writeln('for (int $idx = 0; $idx != $info.size; ++$idx) {')
+ g.writeln('for (int ${idx} = 0; ${idx} != ${info.size}; ++${idx}) {')
if node.val_var != '_' {
val_sym := g.table.sym(node.val_type)
is_fixed_array := val_sym.kind == .array_fixed && !node.val_is_mut
@@ -239,22 +239,22 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
g.write_fn_ptr_decl(val_sym.info as ast.FnType, c_name(node.val_var))
} else if is_fixed_array {
styp := g.typ(node.val_type)
- g.writeln('\t$styp ${c_name(node.val_var)};')
- g.writeln('\tmemcpy(*($styp*)${c_name(node.val_var)}, (byte*)$cond_var[$idx], sizeof($styp));')
+ g.writeln('\t${styp} ${c_name(node.val_var)};')
+ g.writeln('\tmemcpy(*(${styp}*)${c_name(node.val_var)}, (byte*)${cond_var}[${idx}], sizeof(${styp}));')
} else {
styp := g.typ(node.val_type)
- g.write('\t$styp ${c_name(node.val_var)}')
+ g.write('\t${styp} ${c_name(node.val_var)}')
}
if !is_fixed_array {
addr := if node.val_is_mut { '&' } else { '' }
if cond_type_is_ptr {
- g.writeln(' = ${addr}(*$cond_var)[$idx];')
+ g.writeln(' = ${addr}(*${cond_var})[${idx}];')
} else if cond_is_literal {
- g.writeln(' = $addr$cond_var[$idx];')
+ g.writeln(' = ${addr}${cond_var}[${idx}];')
} else {
- g.write(' = $addr')
+ g.write(' = ${addr}')
g.expr(node.cond)
- g.writeln('[$idx];')
+ g.writeln('[${idx}];')
}
}
}
@@ -267,7 +267,7 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
} else {
cond_var = g.new_tmp_var()
g.write(g.typ(node.cond_type))
- g.write(' $cond_var = ')
+ g.write(' ${cond_var} = ')
g.expr(node.cond)
g.writeln(';')
}
@@ -278,26 +278,26 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
idx := g.new_tmp_var()
map_len := g.new_tmp_var()
g.empty_line = true
- g.writeln('int $map_len = $cond_var${arw_or_pt}key_values.len;')
- g.writeln('for (int $idx = 0; $idx < $map_len; ++$idx ) {')
+ g.writeln('int ${map_len} = ${cond_var}${arw_or_pt}key_values.len;')
+ g.writeln('for (int ${idx} = 0; ${idx} < ${map_len}; ++${idx} ) {')
// TODO: don't have this check when the map has no deleted elements
g.indent++
diff := g.new_tmp_var()
- g.writeln('int $diff = $cond_var${arw_or_pt}key_values.len - $map_len;')
- g.writeln('$map_len = $cond_var${arw_or_pt}key_values.len;')
+ g.writeln('int ${diff} = ${cond_var}${arw_or_pt}key_values.len - ${map_len};')
+ g.writeln('${map_len} = ${cond_var}${arw_or_pt}key_values.len;')
// TODO: optimize this
- g.writeln('if ($diff < 0) {')
- g.writeln('\t$idx = -1;')
+ g.writeln('if (${diff} < 0) {')
+ g.writeln('\t${idx} = -1;')
g.writeln('\tcontinue;')
g.writeln('}')
- g.writeln('if (!DenseArray_has_index(&$cond_var${arw_or_pt}key_values, $idx)) {continue;}')
+ g.writeln('if (!DenseArray_has_index(&${cond_var}${arw_or_pt}key_values, ${idx})) {continue;}')
if node.key_var != '_' {
key_styp := g.typ(node.key_type)
key := c_name(node.key_var)
- g.writeln('$key_styp $key = /*key*/ *($key_styp*)DenseArray_key(&$cond_var${arw_or_pt}key_values, $idx);')
+ g.writeln('${key_styp} ${key} = /*key*/ *(${key_styp}*)DenseArray_key(&${cond_var}${arw_or_pt}key_values, ${idx});')
// TODO: analyze whether node.key_type has a .clone() method and call .clone() for all types:
if node.key_type == ast.string_type {
- g.writeln('$key = string_clone($key);')
+ g.writeln('${key} = string_clone(${key});')
}
}
if node.val_var != '_' {
@@ -305,23 +305,23 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
if val_sym.kind == .function {
g.write_fn_ptr_decl(val_sym.info as ast.FnType, c_name(node.val_var))
g.write(' = (*(voidptr*)')
- g.writeln('DenseArray_value(&$cond_var${arw_or_pt}key_values, $idx));')
+ g.writeln('DenseArray_value(&${cond_var}${arw_or_pt}key_values, ${idx}));')
} else if val_sym.kind == .array_fixed && !node.val_is_mut {
val_styp := g.typ(node.val_type)
- g.writeln('$val_styp ${c_name(node.val_var)};')
- g.writeln('memcpy(*($val_styp*)${c_name(node.val_var)}, (byte*)DenseArray_value(&$cond_var${arw_or_pt}key_values, $idx), sizeof($val_styp));')
+ g.writeln('${val_styp} ${c_name(node.val_var)};')
+ g.writeln('memcpy(*(${val_styp}*)${c_name(node.val_var)}, (byte*)DenseArray_value(&${cond_var}${arw_or_pt}key_values, ${idx}), sizeof(${val_styp}));')
} else {
val_styp := g.typ(node.val_type)
if node.val_type.is_ptr() {
if node.val_is_mut || node.val_is_ref {
- g.write('$val_styp ${c_name(node.val_var)} = &(*($val_styp)')
+ g.write('${val_styp} ${c_name(node.val_var)} = &(*(${val_styp})')
} else {
- g.write('$val_styp ${c_name(node.val_var)} = (*($val_styp*)')
+ g.write('${val_styp} ${c_name(node.val_var)} = (*(${val_styp}*)')
}
} else {
- g.write('$val_styp ${c_name(node.val_var)} = (*($val_styp*)')
+ g.write('${val_styp} ${c_name(node.val_var)} = (*(${val_styp}*)')
}
- g.writeln('DenseArray_value(&$cond_var${arw_or_pt}key_values, $idx));')
+ g.writeln('DenseArray_value(&${cond_var}${arw_or_pt}key_values, ${idx}));')
}
}
g.indent--
@@ -333,13 +333,13 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
}
field_accessor := if node.cond_type.is_ptr() { '->' } else { '.' }
i := if node.key_var in ['', '_'] { g.new_tmp_var() } else { node.key_var }
- g.write('for (int $i = 0; $i < ')
+ g.write('for (int ${i} = 0; ${i} < ')
g.expr(cond)
- g.writeln('${field_accessor}len; ++$i) {')
+ g.writeln('${field_accessor}len; ++${i}) {')
if node.val_var != '_' {
g.write('\tu8 ${c_name(node.val_var)} = ')
g.expr(cond)
- g.writeln('${field_accessor}str[$i];')
+ g.writeln('${field_accessor}str[${i}];')
}
} else if node.kind == .struct_ {
cond_type_sym := g.table.sym(node.cond_type)
@@ -349,13 +349,13 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
}
ret_typ := next_fn.return_type
t_expr := g.new_tmp_var()
- g.write('${g.typ(node.cond_type)} $t_expr = ')
+ g.write('${g.typ(node.cond_type)} ${t_expr} = ')
g.expr(node.cond)
g.writeln(';')
if node.key_var in ['', '_'] {
g.writeln('while (1) {')
} else {
- g.writeln('for (size_t $node.key_var = 0;; ++$node.key_var) {')
+ g.writeln('for (size_t ${node.key_var} = 0;; ++${node.key_var}) {')
}
t_var := g.new_tmp_var()
receiver_typ := g.unwrap_generic(next_fn.params[0].typ)
@@ -367,22 +367,22 @@ fn (mut g Gen) for_in_stmt(node_ ast.ForInStmt) {
fn_name = g.generic_fn_name(receiver_sym.info.concrete_types, fn_name)
}
}
- g.write('\t${g.typ(ret_typ)} $t_var = ${fn_name}(')
+ g.write('\t${g.typ(ret_typ)} ${t_var} = ${fn_name}(')
if !node.cond_type.is_ptr() && receiver_typ.is_ptr() {
g.write('&')
}
- g.writeln('$t_expr);')
+ g.writeln('${t_expr});')
g.writeln('\tif (${t_var}.state != 0) break;')
val := if node.val_var in ['', '_'] { g.new_tmp_var() } else { node.val_var }
val_styp := g.typ(node.val_type)
if node.val_is_mut {
- g.writeln('\t$val_styp $val = ($val_styp)${t_var}.data;')
+ g.writeln('\t${val_styp} ${val} = (${val_styp})${t_var}.data;')
} else {
- g.writeln('\t$val_styp $val = *($val_styp*)${t_var}.data;')
+ g.writeln('\t${val_styp} ${val} = *(${val_styp}*)${t_var}.data;')
}
} else {
typ_str := g.table.type_to_str(node.cond_type)
- g.error('for in: unhandled symbol `$node.cond` of type `$typ_str`', node.pos)
+ g.error('for in: unhandled symbol `${node.cond}` of type `${typ_str}`', node.pos)
}
g.stmts(node.stmts)
if node.label.len > 0 {
diff --git a/vlib/v/gen/c/if.v b/vlib/v/gen/c/if.v
index 738272577f..d45f965145 100644
--- a/vlib/v/gen/c/if.v
+++ b/vlib/v/gen/c/if.v
@@ -157,9 +157,9 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
styp := g.typ(node.typ)
cur_line = g.go_before_stmt(0)
g.empty_line = true
- g.writeln('$styp $tmp; /* if prepend */')
+ g.writeln('${styp} ${tmp}; /* if prepend */')
if g.infix_left_var_name.len > 0 {
- g.writeln('if ($g.infix_left_var_name) {')
+ g.writeln('if (${g.infix_left_var_name}) {')
g.indent++
}
} else if node.is_expr || g.inside_ternary != 0 {
@@ -202,7 +202,7 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
if cond.expr !is ast.IndexExpr && cond.expr !is ast.PrefixExpr {
var_name := g.new_tmp_var()
guard_vars[i] = var_name
- g.writeln('${g.typ(cond.expr_type)} $var_name;')
+ g.writeln('${g.typ(cond.expr_type)} ${var_name};')
} else {
guard_vars[i] = ''
}
@@ -234,7 +234,7 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
g.writeln('if (!${var_name}.is_error) {')
}
} else {
- g.write('if ($var_name = ')
+ g.write('if (${var_name} = ')
g.expr(branch.cond.expr)
if branch.cond.expr_type.has_flag(.optional) {
g.writeln(', ${var_name}.state == 0) {')
@@ -250,7 +250,7 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
} else {
branch.cond.vars[0].name
}
- g.write('\t$base_type $cond_var_name = ')
+ g.write('\t${base_type} ${cond_var_name} = ')
g.expr(branch.cond.expr)
g.writeln(';')
} else {
@@ -264,9 +264,9 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
if branch.cond.vars.len == 1 {
left_var_name := c_name(branch.cond.vars[0].name)
if is_auto_heap {
- g.writeln('\t$base_type* $left_var_name = HEAP($base_type, *($base_type*)${var_name}.data);')
+ g.writeln('\t${base_type}* ${left_var_name} = HEAP(${base_type}, *(${base_type}*)${var_name}.data);')
} else {
- g.writeln('\t$base_type $left_var_name = *($base_type*)${var_name}.data;')
+ g.writeln('\t${base_type} ${left_var_name} = *(${base_type}*)${var_name}.data;')
}
} else if branch.cond.vars.len > 1 {
sym := g.table.sym(branch.cond.expr_type)
@@ -276,9 +276,9 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
var_typ := g.typ(sym.info.types[vi])
left_var_name := c_name(var.name)
if is_auto_heap {
- g.writeln('\t$var_typ* $left_var_name = (HEAP($base_type, *($base_type*)${var_name}.data).arg$vi);')
+ g.writeln('\t${var_typ}* ${left_var_name} = (HEAP(${base_type}, *(${base_type}*)${var_name}.data).arg${vi});')
} else {
- g.writeln('\t$var_typ $left_var_name = (*($base_type*)${var_name}.data).arg$vi;')
+ g.writeln('\t${var_typ} ${left_var_name} = (*(${base_type}*)${var_name}.data).arg${vi};')
}
}
}
@@ -331,6 +331,6 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
g.writeln('}')
}
g.empty_line = false
- g.write('$cur_line $tmp')
+ g.write('${cur_line} ${tmp}')
}
}
diff --git a/vlib/v/gen/c/index.v b/vlib/v/gen/c/index.v
index 9ced644ece..cf10d77601 100644
--- a/vlib/v/gen/c/index.v
+++ b/vlib/v/gen/c/index.v
@@ -24,7 +24,7 @@ fn (mut g Gen) index_expr(node ast.IndexExpr) {
cur_line := g.go_before_stmt(0)
g.out.write_string(util.tabs(g.indent))
opt_elem_type := g.typ(ast.u8_type.set_flag(.optional))
- g.write('$opt_elem_type $tmp_opt = string_at_with_check(')
+ g.write('${opt_elem_type} ${tmp_opt} = string_at_with_check(')
g.expr(node.left)
g.write(', ')
g.expr(node.index)
@@ -32,7 +32,7 @@ fn (mut g Gen) index_expr(node ast.IndexExpr) {
if !node.is_option {
g.or_block(tmp_opt, node.or_expr, ast.u8_type)
}
- g.write('\n$cur_line*(byte*)&${tmp_opt}.data')
+ g.write('\n${cur_line}*(byte*)&${tmp_opt}.data')
} else {
is_direct_array_access := g.is_direct_array_access || node.is_direct
if is_direct_array_access {
@@ -73,7 +73,7 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
cur_line = g.go_before_stmt(0)
g.out.write_string(util.tabs(g.indent))
opt_elem_type := g.typ(ast.string_type.set_flag(.optional))
- g.write('$opt_elem_type $tmp_opt = string_substr_with_check(')
+ g.write('${opt_elem_type} ${tmp_opt} = string_substr_with_check(')
} else {
g.write('string_substr(')
}
@@ -86,9 +86,9 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
if !range.has_high {
tmp_left = g.new_tmp_var()
tmp_type := g.typ(node.left_type)
- g.insert_before_stmt('${util.tabs(g.indent)}$tmp_type $tmp_left;')
+ g.insert_before_stmt('${util.tabs(g.indent)}${tmp_type} ${tmp_left};')
// (tmp = expr, array_slice(...))
- g.write('($tmp_left = ')
+ g.write('(${tmp_left} = ')
g.expr(node.left)
g.write(', ')
}
@@ -116,7 +116,7 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
}
g.write('new_array_from_c_array${noscan}(')
ctype := g.typ(info.elem_type)
- g.write('$info.size, $info.size, sizeof($ctype), ')
+ g.write('${info.size}, ${info.size}, sizeof(${ctype}), ')
if node.left_type.is_ptr() {
g.write('*')
}
@@ -136,10 +136,10 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
g.expr(range.high)
} else if sym.kind == .array_fixed {
info := sym.info as ast.ArrayFixed
- g.write('$info.size')
+ g.write('${info.size}')
} else if sym.kind == .array {
if node.left_type.is_ptr() {
- g.write('$tmp_left->')
+ g.write('${tmp_left}->')
} else {
g.write('${tmp_left}.')
}
@@ -156,7 +156,7 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
g.or_block(tmp_opt, node.or_expr, ast.string_type)
}
- g.write('\n$cur_line*(string*)&${tmp_opt}.data')
+ g.write('\n${cur_line}*(string*)&${tmp_opt}.data')
}
}
@@ -177,9 +177,9 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
is_direct_array_access := g.is_direct_array_access || node.is_direct
is_op_assign := g.assign_op != .assign && info.elem_type != ast.string_type
if is_direct_array_access {
- g.write('(($elem_type_str*)')
+ g.write('((${elem_type_str}*)')
} else if is_op_assign {
- g.write('(*($elem_type_str*)array_get(')
+ g.write('(*(${elem_type_str}*)array_get(')
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
g.write('*')
}
@@ -223,7 +223,7 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
}
*/
if need_wrapper {
- g.write(', &($elem_type_str[]) { ')
+ g.write(', &(${elem_type_str}[]) { ')
} else {
g.write(', &')
}
@@ -249,7 +249,7 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
tmp_opt := if gen_or { g.new_tmp_var() } else { '' }
tmp_opt_ptr := if gen_or { g.new_tmp_var() } else { '' }
if gen_or {
- g.write('$elem_type_str* $tmp_opt_ptr = ($elem_type_str*)(array_get_with_check(')
+ g.write('${elem_type_str}* ${tmp_opt_ptr} = (${elem_type_str}*)(array_get_with_check(')
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
g.write('*')
}
@@ -261,15 +261,15 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
if elem_sym.info is ast.FnType {
g.write('((')
g.write_fn_ptr_decl(&elem_sym.info, '')
- g.write(')(*($elem_type_str*)array_get(')
+ g.write(')(*(${elem_type_str}*)array_get(')
}
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
g.write('*')
}
} else if is_direct_array_access {
- g.write('(($elem_type_str*)')
+ g.write('((${elem_type_str}*)')
} else {
- g.write('(*($elem_type_str*)array_get(')
+ g.write('(*(${elem_type_str}*)array_get(')
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
g.write('*')
}
@@ -308,16 +308,16 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
if gen_or {
g.writeln(';')
opt_elem_type := g.typ(elem_type.set_flag(.optional))
- g.writeln('$opt_elem_type $tmp_opt = {0};')
- g.writeln('if ($tmp_opt_ptr) {')
- g.writeln('\t*(($elem_type_str*)&${tmp_opt}.data) = *(($elem_type_str*)$tmp_opt_ptr);')
+ g.writeln('${opt_elem_type} ${tmp_opt} = {0};')
+ g.writeln('if (${tmp_opt_ptr}) {')
+ g.writeln('\t*((${elem_type_str}*)&${tmp_opt}.data) = *((${elem_type_str}*)${tmp_opt_ptr});')
g.writeln('} else {')
g.writeln('\t${tmp_opt}.state = 2; ${tmp_opt}.err = _v_error(_SLIT("array index out of range"));')
g.writeln('}')
if !node.is_option {
g.or_block(tmp_opt, node.or_expr, elem_type)
}
- g.write('\n$cur_line*($elem_type_str*)${tmp_opt}.data')
+ g.write('\n${cur_line}*(${elem_type_str}*)${tmp_opt}.data')
}
}
}
@@ -333,7 +333,7 @@ fn (mut g Gen) index_of_fixed_array(node ast.IndexExpr, sym ast.TypeSymbol) {
line := g.go_before_stmt(0).trim_space()
styp := g.typ(node.left_type)
g.empty_line = true
- g.write('$styp $tmp = ')
+ g.write('${styp} ${tmp} = ')
g.expr(node.left)
g.writeln(';')
g.write(line)
@@ -357,7 +357,7 @@ fn (mut g Gen) index_of_fixed_array(node ast.IndexExpr, sym ast.TypeSymbol) {
// bounds check
g.write('v_fixed_index(')
g.expr(node.index)
- g.write(', $info.size)')
+ g.write(', ${info.size})')
}
g.write(']')
if is_fn_index_call {
@@ -388,9 +388,9 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
g.write('map_set(')
} else {
if node.is_setter {
- g.write('(*(($elem_type_str*)map_get_and_set((map*)')
+ g.write('(*((${elem_type_str}*)map_get_and_set((map*)')
} else {
- g.write('(*(($elem_type_str*)map_get((map*)')
+ g.write('(*((${elem_type_str}*)map_get((map*)')
}
}
if !left_is_ptr || node.left_type.has_flag(.shared_f) {
@@ -406,7 +406,7 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
if node.left_type.has_flag(.shared_f) {
g.write('->val')
}
- g.write(', &($key_type_str[]){')
+ g.write(', &(${key_type_str}[]){')
old_is_arraymap_set := g.is_arraymap_set
old_is_assign_lhs := g.is_assign_lhs
g.is_arraymap_set = false
@@ -416,18 +416,18 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
g.is_assign_lhs = old_is_assign_lhs
g.write('}')
g.arraymap_set_pos = g.out.len
- g.write(', &($elem_type_str[]) { ')
+ g.write(', &(${elem_type_str}[]) { ')
if g.assign_op != .assign && info.value_type != ast.string_type {
zero := g.type_default(info.value_type)
- g.write('$zero })))')
+ g.write('${zero} })))')
}
} else if g.inside_map_postfix || g.inside_map_infix || g.inside_map_index
|| (g.is_assign_lhs && !g.is_arraymap_set && get_and_set_types) {
zero := g.type_default(info.value_type)
if node.is_setter {
- g.write('(*($elem_type_str*)map_get_and_set((map*)')
+ g.write('(*(${elem_type_str}*)map_get_and_set((map*)')
} else {
- g.write('(*($elem_type_str*)map_get((map*)')
+ g.write('(*(${elem_type_str}*)map_get((map*)')
}
if !left_is_ptr || node.left_type.has_flag(.shared_f) {
g.write('&')
@@ -436,9 +436,9 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
if node.left_type.has_flag(.shared_f) {
g.write('->val')
}
- g.write(', &($key_type_str[]){')
+ g.write(', &(${key_type_str}[]){')
g.expr(node.index)
- g.write('}, &($elem_type_str[]){ $zero }))')
+ g.write('}, &(${elem_type_str}[]){ ${zero} }))')
} else {
zero := g.type_default(info.value_type)
is_gen_or_and_assign_rhs := gen_or && !g.discard_or_result
@@ -452,7 +452,7 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
tmp_opt := if gen_or { g.new_tmp_var() } else { '' }
tmp_opt_ptr := if gen_or { g.new_tmp_var() } else { '' }
if gen_or {
- g.write('$elem_type_str* $tmp_opt_ptr = ($elem_type_str*)(map_get_check(')
+ g.write('${elem_type_str}* ${tmp_opt_ptr} = (${elem_type_str}*)(map_get_check(')
} else {
if g.is_fn_index_call {
if elem_sym.info is ast.FnType {
@@ -461,7 +461,7 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
g.write(')(*(voidptr*)map_get(')
}
} else {
- g.write('(*($elem_type_str*)map_get(')
+ g.write('(*(${elem_type_str}*)map_get(')
}
}
if !left_is_ptr || node.left_type.has_flag(.shared_f) {
@@ -478,29 +478,29 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
g.write('.val')
}
}
- g.write('), &($key_type_str[]){')
+ g.write('), &(${key_type_str}[]){')
g.expr(node.index)
g.write('}')
if gen_or {
g.write('))')
} else if g.is_fn_index_call {
- g.write(', &(voidptr[]){ $zero })))')
+ g.write(', &(voidptr[]){ ${zero} })))')
} else {
- g.write(', &($elem_type_str[]){ $zero }))')
+ g.write(', &(${elem_type_str}[]){ ${zero} }))')
}
if gen_or {
g.writeln(';')
opt_elem_type := g.typ(elem_type.set_flag(.optional))
- g.writeln('$opt_elem_type $tmp_opt = {0};')
- g.writeln('if ($tmp_opt_ptr) {')
- g.writeln('\t*(($elem_type_str*)&${tmp_opt}.data) = *(($elem_type_str*)$tmp_opt_ptr);')
+ g.writeln('${opt_elem_type} ${tmp_opt} = {0};')
+ g.writeln('if (${tmp_opt_ptr}) {')
+ g.writeln('\t*((${elem_type_str}*)&${tmp_opt}.data) = *((${elem_type_str}*)${tmp_opt_ptr});')
g.writeln('} else {')
g.writeln('\t${tmp_opt}.state = 2; ${tmp_opt}.err = _v_error(_SLIT("array index out of range"));')
g.writeln('}')
if !node.is_option {
g.or_block(tmp_opt, node.or_expr, elem_type)
}
- g.write('\n${cur_line}(*($elem_type_str*)${tmp_opt}.data)')
+ g.write('\n${cur_line}(*(${elem_type_str}*)${tmp_opt}.data)')
}
}
}
diff --git a/vlib/v/gen/c/infix.v b/vlib/v/gen/c/infix.v
index 4fe8e30812..29bddb2b03 100644
--- a/vlib/v/gen/c/infix.v
+++ b/vlib/v/gen/c/infix.v
@@ -9,7 +9,7 @@ import v.util
fn (mut g Gen) infix_expr(node ast.InfixExpr) {
if node.auto_locked != '' {
- g.writeln('sync__RwMutex_lock(&$node.auto_locked->mtx);')
+ g.writeln('sync__RwMutex_lock(&${node.auto_locked}->mtx);')
}
match node.op {
.arrow {
@@ -50,7 +50,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
}
if node.auto_locked != '' {
g.writeln(';')
- g.write('sync__RwMutex_unlock(&$node.auto_locked->mtx)')
+ g.write('sync__RwMutex_unlock(&${node.auto_locked}->mtx)')
}
}
@@ -64,7 +64,7 @@ fn (mut g Gen) infix_expr_arrow_op(node ast.InfixExpr) {
if gen_or {
elem_styp := g.typ(elem_type)
g.register_chan_push_optional_fn(elem_styp, styp)
- g.write('${option_name}_void $tmp_opt = __Option_${styp}_pushval(')
+ g.write('${option_name}_void ${tmp_opt} = __Option_${styp}_pushval(')
} else {
g.write('__${styp}_pushval(')
}
@@ -101,7 +101,7 @@ fn (mut g Gen) infix_expr_eq_op(node ast.InfixExpr) {
g.expr(node.left)
g.write(')')
arrow := if left.typ.is_ptr() { '->' } else { '.' }
- g.write('${arrow}len $node.op 0')
+ g.write('${arrow}len ${node.op} 0')
} else if has_defined_eq_operator {
if node.op == .ne {
g.write('!')
@@ -177,7 +177,7 @@ fn (mut g Gen) infix_expr_eq_op(node ast.InfixExpr) {
if node.left is ast.ArrayInit {
if !node.left.has_it {
s := g.typ(left.unaliased)
- g.write('($s)')
+ g.write('(${s})')
}
}
g.expr(node.left)
@@ -185,7 +185,7 @@ fn (mut g Gen) infix_expr_eq_op(node ast.InfixExpr) {
if node.right is ast.ArrayInit {
if !node.right.has_it {
s := g.typ(right.unaliased)
- g.write('($s)')
+ g.write('(${s})')
}
}
g.expr(node.right)
@@ -454,7 +454,7 @@ fn (mut g Gen) infix_expr_in_op(node ast.InfixExpr) {
g.write('_IN_MAP(')
if !left.typ.is_ptr() {
styp := g.typ(node.left_type)
- g.write('ADDR($styp, ')
+ g.write('ADDR(${styp}, ')
g.expr(node.left)
g.write(')')
} else {
@@ -589,7 +589,7 @@ fn (mut g Gen) infix_expr_is_op(node ast.InfixExpr) {
cmp_op := if node.op == .key_is { '==' } else { '!=' }
g.write('(')
if is_aggregate {
- g.write('$node.left')
+ g.write('${node.left}')
} else {
g.expr(node.left)
}
@@ -600,7 +600,7 @@ fn (mut g Gen) infix_expr_is_op(node ast.InfixExpr) {
g.write('.')
}
if left_sym.kind == .interface_ {
- g.write('_typ $cmp_op ')
+ g.write('_typ ${cmp_op} ')
// `_Animal_Dog_index`
sub_type := match node.right {
ast.TypeNode {
@@ -617,10 +617,10 @@ fn (mut g Gen) infix_expr_is_op(node ast.InfixExpr) {
g.write('_${left_sym.cname}_${sub_sym.cname}_index')
return
} else if left_sym.kind == .sum_type {
- g.write('_typ $cmp_op ')
+ g.write('_typ ${cmp_op} ')
}
if node.right is ast.None {
- g.write('$ast.none_type.idx() /* none */')
+ g.write('${ast.none_type.idx()} /* none */')
} else {
g.expr(node.right)
}
@@ -729,7 +729,7 @@ fn (mut g Gen) infix_expr_left_shift_op(node ast.InfixExpr) {
}
g.expr_with_cast(node.right, node.right_type, left.unaliased.clear_flag(.shared_f))
styp := g.typ(expected_push_many_atype)
- g.write('), $tmp_var, $styp)')
+ g.write('), ${tmp_var}, ${styp})')
} else {
// push a single element
elem_type_str := g.typ(array_info.elem_type)
@@ -748,9 +748,9 @@ fn (mut g Gen) infix_expr_left_shift_op(node ast.InfixExpr) {
g.write(', _MOV((voidptr[]){ ')
} else if elem_is_array_var {
addr := if elem_sym.kind == .array_fixed { '' } else { '&' }
- g.write(', $addr')
+ g.write(', ${addr}')
} else {
- g.write(', _MOV(($elem_type_str[]){ ')
+ g.write(', _MOV((${elem_type_str}[]){ ')
}
// if g.autofree
needs_clone := !g.is_builtin_mod && array_info.elem_type.idx() == ast.string_type_idx
@@ -818,12 +818,12 @@ fn (mut g Gen) infix_expr_and_or_op(node ast.InfixExpr) {
tmp := g.new_tmp_var()
cur_line := g.go_before_stmt(0).trim_space()
g.empty_line = true
- g.write('bool $tmp = (')
+ g.write('bool ${tmp} = (')
g.expr(node.left)
g.writeln(');')
g.set_current_pos_as_last_stmt_pos()
- g.write('$cur_line $tmp $node.op.str() ')
- g.infix_left_var_name = if node.op == .and { tmp } else { '!$tmp' }
+ g.write('${cur_line} ${tmp} ${node.op.str()} ')
+ g.infix_left_var_name = if node.op == .and { tmp } else { '!${tmp}' }
g.expr(node.right)
g.infix_left_var_name = ''
g.inside_ternary = prev_inside_ternary
@@ -836,15 +836,15 @@ fn (mut g Gen) infix_expr_and_or_op(node ast.InfixExpr) {
cur_line := g.go_before_stmt(0).trim_space()
g.empty_line = true
if g.infix_left_var_name.len > 0 {
- g.write('bool $tmp = (($g.infix_left_var_name) $node.op.str() ')
+ g.write('bool ${tmp} = ((${g.infix_left_var_name}) ${node.op.str()} ')
} else {
- g.write('bool $tmp = (')
+ g.write('bool ${tmp} = (')
}
g.expr(node.left)
g.writeln(');')
g.set_current_pos_as_last_stmt_pos()
- g.write('$cur_line $tmp $node.op.str() ')
- g.infix_left_var_name = if node.op == .and { tmp } else { '!$tmp' }
+ g.write('${cur_line} ${tmp} ${node.op.str()} ')
+ g.infix_left_var_name = if node.op == .and { tmp } else { '!${tmp}' }
g.expr(node.right)
g.infix_left_var_name = ''
return
@@ -862,7 +862,7 @@ fn (mut g Gen) gen_plain_infix_expr(node ast.InfixExpr) {
g.write('*')
}
g.expr(node.left)
- g.write(' $node.op.str() ')
+ g.write(' ${node.op.str()} ')
g.expr_with_cast(node.right, node.right_type, node.left_type)
}
@@ -877,7 +877,7 @@ fn (mut g Gen) op_arg(expr ast.Expr, expected ast.Type, got ast.Type) {
g.write('&')
} else {
styp := g.typ(got.set_nr_muls(0))
- g.write('ADDR($styp, ')
+ g.write('ADDR(${styp}, ')
needs_closing = true
}
}
diff --git a/vlib/v/gen/c/json.v b/vlib/v/gen/c/json.v
index f0eb8ab4c4..b6fc0ddb2a 100644
--- a/vlib/v/gen/c/json.v
+++ b/vlib/v/gen/c/json.v
@@ -49,9 +49,9 @@ fn (mut g Gen) gen_jsons() {
// cJSON_Parse(str) call is added by the compiler
// Codegen decoder
dec_fn_name := js_dec_name(styp)
- dec_fn_dec := '${result_name}_$styp ${dec_fn_name}(cJSON* root)'
+ dec_fn_dec := '${result_name}_${styp} ${dec_fn_name}(cJSON* root)'
- mut init_styp := '$styp res'
+ mut init_styp := '${styp} res'
if sym.kind == .struct_ {
init_styp += ' = '
init_styp += g.expr_string(ast.Expr(ast.StructInit{
@@ -61,8 +61,8 @@ fn (mut g Gen) gen_jsons() {
}
dec.writeln('
-$dec_fn_dec {
- $init_styp;
+${dec_fn_dec} {
+ ${init_styp};
if (!root) {
const char *error_ptr = cJSON_GetErrorPtr();
if (error_ptr != NULL) {
@@ -89,18 +89,18 @@ $dec_fn_dec {
int maxchars = vstrlen_char(prevline_ptr);
vmemcpy(buf, prevline_ptr, (maxchars < maxcontext_chars ? maxchars : maxcontext_chars));
}
- return (${result_name}_$styp){.is_error = true,.err = _v_error(tos2(buf)),.data = {0}};
+ return (${result_name}_${styp}){.is_error = true,.err = _v_error(tos2(buf)),.data = {0}};
}
}
')
- g.json_forward_decls.writeln('$dec_fn_dec;')
+ g.json_forward_decls.writeln('${dec_fn_dec};')
// Codegen encoder
// encode_TYPE funcs receive an object to encode
enc_fn_name := js_enc_name(styp)
- enc_fn_dec := 'cJSON* ${enc_fn_name}($styp val)'
- g.json_forward_decls.writeln('$enc_fn_dec;\n')
+ enc_fn_dec := 'cJSON* ${enc_fn_name}(${styp} val)'
+ g.json_forward_decls.writeln('${enc_fn_dec};\n')
enc.writeln('
-$enc_fn_dec {
+${enc_fn_dec} {
\tcJSON *o;')
if sym.kind == .array || sym.kind == .array_fixed {
array_size := if sym.kind == .array_fixed {
@@ -133,28 +133,28 @@ $enc_fn_dec {
if psym.info is ast.Struct {
g.gen_struct_enc_dec(psym.info, styp, mut enc, mut dec)
} else if psym.kind == .sum_type {
- verror('json: $sym.name aliased sumtypes does not work at the moment')
+ verror('json: ${sym.name} aliased sumtypes does not work at the moment')
} else {
- verror('json: $sym.name is not struct')
+ verror('json: ${sym.name} is not struct')
}
} else if sym.kind == .sum_type {
enc.writeln('\to = cJSON_CreateObject();')
// Sumtypes. Range through variants of sumtype
if sym.info !is ast.SumType {
- verror('json: $sym.name is not a sumtype')
+ verror('json: ${sym.name} is not a sumtype')
}
g.gen_sumtype_enc_dec(sym, mut enc, mut dec)
} else {
enc.writeln('\to = cJSON_CreateObject();')
// Structs. Range through fields
if sym.info !is ast.Struct {
- verror('json: $sym.name is not struct')
+ verror('json: ${sym.name} is not struct')
}
g.gen_struct_enc_dec(sym.info, styp, mut enc, mut dec)
}
// cJSON_delete
- dec.writeln('\t${result_name}_$styp ret;')
- dec.writeln('\t_result_ok(&res, ($result_name*)&ret, sizeof(res));')
+ dec.writeln('\t${result_name}_${styp} ret;')
+ dec.writeln('\t_result_ok(&res, (${result_name}*)&ret, sizeof(res));')
dec.writeln('\treturn ret;\n}')
enc.writeln('\treturn o;\n}')
g.gowrappers.writeln(dec.str())
@@ -172,9 +172,9 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
$if !json_no_inline_sumtypes ? {
type_tmp := g.new_tmp_var()
dec.writeln('\tif (cJSON_IsObject(root)) {')
- dec.writeln('\t\tcJSON* $type_tmp = js_get(root, "_type");')
- dec.writeln('\t\tif ($type_tmp != 0) {')
- dec.writeln('\t\t\tchar* $type_var = cJSON_GetStringValue($type_tmp);')
+ dec.writeln('\t\tcJSON* ${type_tmp} = js_get(root, "_type");')
+ dec.writeln('\t\tif (${type_tmp} != 0) {')
+ dec.writeln('\t\t\tchar* ${type_var} = cJSON_GetStringValue(${type_tmp});')
// dec.writeln('\t\t\tcJSON_DeleteItemFromObjectCaseSensitive(root, "_type");')
}
@@ -195,29 +195,29 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
// Helpers for decoding
g.get_sumtype_casting_fn(variant, typ)
- g.definitions.writeln('static inline $sym.cname ${variant_typ}_to_sumtype_${sym.cname}($variant_typ* x);')
+ g.definitions.writeln('static inline ${sym.cname} ${variant_typ}_to_sumtype_${sym.cname}(${variant_typ}* x);')
// ENCODING
- enc.writeln('\tif (val._typ == $variant.idx()) {')
+ enc.writeln('\tif (val._typ == ${variant.idx()}) {')
$if json_no_inline_sumtypes ? {
if variant_sym.kind == .enum_ {
- enc.writeln('\t\tcJSON_AddItemToObject(o, "$unmangled_variant_name", ${js_enc_name('u64')}(*val._$variant_typ));')
+ enc.writeln('\t\tcJSON_AddItemToObject(o, "${unmangled_variant_name}", ${js_enc_name('u64')}(*val._${variant_typ}));')
} else if variant_sym.name == 'time.Time' {
- enc.writeln('\t\tcJSON_AddItemToObject(o, "$unmangled_variant_name", ${js_enc_name('i64')}(val._$variant_typ->_v_unix));')
+ enc.writeln('\t\tcJSON_AddItemToObject(o, "${unmangled_variant_name}", ${js_enc_name('i64')}(val._${variant_typ}->_v_unix));')
} else {
- enc.writeln('\t\tcJSON_AddItemToObject(o, "$unmangled_variant_name", ${js_enc_name(variant_typ)}(*val._$variant_typ));')
+ enc.writeln('\t\tcJSON_AddItemToObject(o, "${unmangled_variant_name}", ${js_enc_name(variant_typ)}(*val._${variant_typ}));')
}
} $else {
if is_js_prim(variant_typ) {
- enc.writeln('\t\to = ${js_enc_name(variant_typ)}(*val._$variant_typ);')
+ enc.writeln('\t\to = ${js_enc_name(variant_typ)}(*val._${variant_typ});')
} else if variant_sym.kind == .enum_ {
- enc.writeln('\t\to = ${js_enc_name('u64')}(*val._$variant_typ);')
+ enc.writeln('\t\to = ${js_enc_name('u64')}(*val._${variant_typ});')
} else if variant_sym.name == 'time.Time' {
- enc.writeln('\t\tcJSON_AddItemToObject(o, "_type", cJSON_CreateString("$unmangled_variant_name"));')
- enc.writeln('\t\tcJSON_AddItemToObject(o, "value", ${js_enc_name('i64')}(val._$variant_typ->_v_unix));')
+ enc.writeln('\t\tcJSON_AddItemToObject(o, "_type", cJSON_CreateString("${unmangled_variant_name}"));')
+ enc.writeln('\t\tcJSON_AddItemToObject(o, "value", ${js_enc_name('i64')}(val._${variant_typ}->_v_unix));')
} else {
- enc.writeln('\t\to = ${js_enc_name(variant_typ)}(*val._$variant_typ);')
- enc.writeln('\t\tcJSON_AddItemToObject(o, "_type", cJSON_CreateString("$unmangled_variant_name"));')
+ enc.writeln('\t\to = ${js_enc_name(variant_typ)}(*val._${variant_typ});')
+ enc.writeln('\t\tcJSON_AddItemToObject(o, "_type", cJSON_CreateString("${unmangled_variant_name}"));')
}
}
enc.writeln('\t}')
@@ -225,37 +225,37 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
// DECODING
tmp := g.new_tmp_var()
$if json_no_inline_sumtypes ? {
- dec.writeln('\tif (strcmp("$unmangled_variant_name", root->child->string) == 0) {')
+ dec.writeln('\tif (strcmp("${unmangled_variant_name}", root->child->string) == 0) {')
if is_js_prim(variant_typ) {
gen_js_get(variant_typ, tmp, unmangled_variant_name, mut dec, true)
- dec.writeln('\t\t$variant_typ value = ${js_dec_name(variant_typ)}(jsonroot_$tmp);')
+ dec.writeln('\t\t${variant_typ} value = ${js_dec_name(variant_typ)}(jsonroot_${tmp});')
} else if variant_sym.kind == .enum_ {
gen_js_get(variant_typ, tmp, unmangled_variant_name, mut dec, true)
- dec.writeln('\t\t$variant_typ value = ${js_dec_name('u64')}(jsonroot_$tmp);')
+ dec.writeln('\t\t${variant_typ} value = ${js_dec_name('u64')}(jsonroot_${tmp});')
} else if variant_sym.name == 'time.Time' {
gen_js_get(variant_typ, tmp, unmangled_variant_name, mut dec, true)
- dec.writeln('\t\t$variant_typ value = time__unix(${js_dec_name('i64')}(jsonroot_$tmp));')
+ dec.writeln('\t\t${variant_typ} value = time__unix(${js_dec_name('i64')}(jsonroot_${tmp}));')
} else {
gen_js_get_opt(js_dec_name(variant_typ), variant_typ, sym.cname, tmp,
unmangled_variant_name, mut dec, true)
- dec.writeln('\t\t$variant_typ value = *($variant_typ*)(${tmp}.data);')
+ dec.writeln('\t\t${variant_typ} value = *(${variant_typ}*)(${tmp}.data);')
}
dec.writeln('\t\tres = ${variant_typ}_to_sumtype_${sym.cname}(&value);')
dec.writeln('\t}')
} $else {
if variant_sym.name == 'time.Time' {
- dec.writeln('\t\t\tif (strcmp("Time", $type_var) == 0) {')
+ dec.writeln('\t\t\tif (strcmp("Time", ${type_var}) == 0) {')
gen_js_get(sym.cname, tmp, 'value', mut dec, true)
- dec.writeln('\t\t\t\t$variant_typ $tmp = time__unix(${js_dec_name('i64')}(jsonroot_$tmp));')
- dec.writeln('\t\t\t\tres = ${variant_typ}_to_sumtype_${sym.cname}(&$tmp);')
+ dec.writeln('\t\t\t\t${variant_typ} ${tmp} = time__unix(${js_dec_name('i64')}(jsonroot_${tmp}));')
+ dec.writeln('\t\t\t\tres = ${variant_typ}_to_sumtype_${sym.cname}(&${tmp});')
dec.writeln('\t\t\t}')
} else if !is_js_prim(variant_typ) && variant_sym.kind != .enum_ {
- dec.writeln('\t\t\tif (strcmp("$unmangled_variant_name", $type_var) == 0) {')
- dec.writeln('\t\t\t\t${result_name}_$variant_typ $tmp = ${js_dec_name(variant_typ)}(root);')
+ dec.writeln('\t\t\tif (strcmp("${unmangled_variant_name}", ${type_var}) == 0) {')
+ dec.writeln('\t\t\t\t${result_name}_${variant_typ} ${tmp} = ${js_dec_name(variant_typ)}(root);')
dec.writeln('\t\t\t\tif (${tmp}.is_error) {')
- dec.writeln('\t\t\t\t\treturn (${result_name}_$sym.cname){ .is_error = true, .err = ${tmp}.err, .data = {0} };')
+ dec.writeln('\t\t\t\t\treturn (${result_name}_${sym.cname}){ .is_error = true, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\t\t\t}')
- dec.writeln('\t\t\t\tres = ${variant_typ}_to_sumtype_${sym.cname}(($variant_typ*)${tmp}.data);')
+ dec.writeln('\t\t\t\tres = ${variant_typ}_to_sumtype_${sym.cname}((${variant_typ}*)${tmp}.data);')
dec.writeln('\t\t\t}')
}
}
@@ -275,7 +275,7 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
if 'bool' in variant_types {
var_t := 'bool'
dec.writeln('\t\tif (cJSON_IsBool(root)) {')
- dec.writeln('\t\t\t$var_t value = ${js_dec_name(var_t)}(root);')
+ dec.writeln('\t\t\t${var_t} value = ${js_dec_name(var_t)}(root);')
dec.writeln('\t\t\tres = ${var_t}_to_sumtype_${sym.cname}(&value);')
dec.writeln('\t\t}')
}
@@ -285,12 +285,12 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
if number_is_met {
var_num := var_t.replace('__', '.')
last_num := last_number_type.replace('__', '.')
- verror('json: can not decode `$sym.name` sumtype, too many numeric types (conflict of `$last_num` and `$var_num`), you can try to use alias for `$var_num` or compile v with `json_no_inline_sumtypes` flag')
+ verror('json: can not decode `${sym.name}` sumtype, too many numeric types (conflict of `${last_num}` and `${var_num}`), you can try to use alias for `${var_num}` or compile v with `json_no_inline_sumtypes` flag')
}
number_is_met = true
last_number_type = var_t
dec.writeln('\t\tif (cJSON_IsNumber(root)) {')
- dec.writeln('\t\t\t$var_t value = ${js_dec_name('u64')}(root);')
+ dec.writeln('\t\t\t${var_t} value = ${js_dec_name('u64')}(root);')
dec.writeln('\t\t\tres = ${var_t}_to_sumtype_${sym.cname}(&value);')
dec.writeln('\t\t}')
}
@@ -298,11 +298,11 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
if var_t in ['string', 'rune'] {
if string_is_met {
var_num := var_t.replace('__', '.')
- verror('json: can not decode `$sym.name` sumtype, too many string types (conflict of `string` and `rune`), you can try to use alias for `$var_num` or compile v with `json_no_inline_sumtypes` flag')
+ verror('json: can not decode `${sym.name}` sumtype, too many string types (conflict of `string` and `rune`), you can try to use alias for `${var_num}` or compile v with `json_no_inline_sumtypes` flag')
}
string_is_met = true
dec.writeln('\t\tif (cJSON_IsString(root)) {')
- dec.writeln('\t\t\t$var_t value = ${js_dec_name(var_t)}(root);')
+ dec.writeln('\t\t\t${var_t} value = ${js_dec_name(var_t)}(root);')
dec.writeln('\t\t\tres = ${var_t}_to_sumtype_${sym.cname}(&value);')
dec.writeln('\t\t}')
}
@@ -316,12 +316,12 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
} else {
'cJSON_IsNumber(root->child)'
}
- dec.writeln('\t\tif (cJSON_IsArray(root) && $judge_elem_typ) {')
- dec.writeln('\t\t\t${result_name}_$var_t $tmp = ${js_dec_name(var_t)}(root);')
+ dec.writeln('\t\tif (cJSON_IsArray(root) && ${judge_elem_typ}) {')
+ dec.writeln('\t\t\t${result_name}_${var_t} ${tmp} = ${js_dec_name(var_t)}(root);')
dec.writeln('\t\t\tif (${tmp}.is_error) {')
- dec.writeln('\t\t\t\treturn (${result_name}_$sym.cname){ .is_error = true, .err = ${tmp}.err, .data = {0} };')
+ dec.writeln('\t\t\t\treturn (${result_name}_${sym.cname}){ .is_error = true, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\t\t}')
- dec.writeln('\t\t\tres = ${var_t}_to_sumtype_${sym.cname}(($var_t*)${tmp}.data);')
+ dec.writeln('\t\t\tres = ${var_t}_to_sumtype_${sym.cname}((${var_t}*)${tmp}.data);')
dec.writeln('\t\t}')
}
@@ -330,12 +330,12 @@ fn (mut g Gen) gen_sumtype_enc_dec(sym ast.TypeSymbol, mut enc strings.Builder,
if number_is_met {
var_num := var_t.replace('__', '.')
last_num := last_number_type.replace('__', '.')
- verror('json: can not decode `$sym.name` sumtype, too many numeric types (conflict of `$last_num` and `$var_num`), you can try to use alias for `$var_num` or compile v with `json_no_inline_sumtypes` flag')
+ verror('json: can not decode `${sym.name}` sumtype, too many numeric types (conflict of `${last_num}` and `${var_num}`), you can try to use alias for `${var_num}` or compile v with `json_no_inline_sumtypes` flag')
}
number_is_met = true
last_number_type = var_t
dec.writeln('\t\tif (cJSON_IsNumber(root)) {')
- dec.writeln('\t\t\t$var_t value = ${js_dec_name(var_t)}(root);')
+ dec.writeln('\t\t\t${var_t} value = ${js_dec_name(var_t)}(root);')
dec.writeln('\t\t\tres = ${var_t}_to_sumtype_${sym.cname}(&value);')
dec.writeln('\t\t}')
}
@@ -383,7 +383,7 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
// First generate decoding
if is_raw {
dec.writeln('\tres.${c_name(field.name)} = tos5(cJSON_PrintUnformatted(' +
- 'js_get(root, "$name")));')
+ 'js_get(root, "${name}")));')
} else {
// Now generate decoders for all field types in this struct
// need to do it here so that these functions are generated first
@@ -392,8 +392,8 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
if is_js_prim(field_type) {
tmp := g.new_tmp_var()
gen_js_get(styp, tmp, name, mut dec, is_required)
- dec.writeln('\tif (jsonroot_$tmp) {')
- dec.writeln('\t\tres.${c_name(field.name)} = ${dec_name}(jsonroot_$tmp);')
+ dec.writeln('\tif (jsonroot_${tmp}) {')
+ dec.writeln('\t\tres.${c_name(field.name)} = ${dec_name}(jsonroot_${tmp});')
if field.has_default_expr {
dec.writeln('\t} else {')
dec.writeln('\t\tres.${c_name(field.name)} = ${g.expr_string(field.default_expr)};')
@@ -402,8 +402,8 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
} else if field_sym.kind == .enum_ {
tmp := g.new_tmp_var()
gen_js_get(styp, tmp, name, mut dec, is_required)
- dec.writeln('\tif (jsonroot_$tmp) {')
- dec.writeln('\t\tres.${c_name(field.name)} = json__decode_u64(jsonroot_$tmp);')
+ dec.writeln('\tif (jsonroot_${tmp}) {')
+ dec.writeln('\t\tres.${c_name(field.name)} = json__decode_u64(jsonroot_${tmp});')
if field.has_default_expr {
dec.writeln('\t} else {')
dec.writeln('\t\tres.${c_name(field.name)} = ${g.expr_string(field.default_expr)};')
@@ -414,8 +414,8 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
// it has to be decoded from a unix timestamp number
tmp := g.new_tmp_var()
gen_js_get(styp, tmp, name, mut dec, is_required)
- dec.writeln('\tif (jsonroot_$tmp) {')
- dec.writeln('\t\tres.${c_name(field.name)} = time__unix(json__decode_u64(jsonroot_$tmp));')
+ dec.writeln('\tif (jsonroot_${tmp}) {')
+ dec.writeln('\t\tres.${c_name(field.name)} = time__unix(json__decode_u64(jsonroot_${tmp}));')
if field.has_default_expr {
dec.writeln('\t} else {')
dec.writeln('\t\tres.${c_name(field.name)} = ${g.expr_string(field.default_expr)};')
@@ -428,8 +428,8 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
if is_js_prim(parent_type) {
tmp := g.new_tmp_var()
gen_js_get(styp, tmp, name, mut dec, is_required)
- dec.writeln('\tif (jsonroot_$tmp) {')
- dec.writeln('\t\tres.${c_name(field.name)} = $parent_dec_name (jsonroot_$tmp);')
+ dec.writeln('\tif (jsonroot_${tmp}) {')
+ dec.writeln('\t\tres.${c_name(field.name)} = ${parent_dec_name} (jsonroot_${tmp});')
if field.has_default_expr {
dec.writeln('\t} else {')
dec.writeln('\t\tres.${c_name(field.name)} = ${g.expr_string(field.default_expr)};')
@@ -439,8 +439,8 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
g.gen_json_for_type(alias.parent_type)
tmp := g.new_tmp_var()
gen_js_get_opt(dec_name, field_type, styp, tmp, name, mut dec, is_required)
- dec.writeln('\tif (jsonroot_$tmp) {')
- dec.writeln('\t\tres.${c_name(field.name)} = *($field_type*) ${tmp}.data;')
+ dec.writeln('\tif (jsonroot_${tmp}) {')
+ dec.writeln('\t\tres.${c_name(field.name)} = *(${field_type}*) ${tmp}.data;')
if field.has_default_expr {
dec.writeln('\t} else {')
dec.writeln('\t\tres.${c_name(field.name)} = ${g.expr_string(field.default_expr)};')
@@ -450,11 +450,11 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
} else {
tmp := g.new_tmp_var()
gen_js_get_opt(dec_name, field_type, styp, tmp, name, mut dec, is_required)
- dec.writeln('\tif (jsonroot_$tmp) {')
+ dec.writeln('\tif (jsonroot_${tmp}) {')
if field_sym.kind == .array_fixed {
- dec.writeln('\t\tvmemcpy(res.${c_name(field.name)},*($field_type*)${tmp}.data,sizeof($field_type));')
+ dec.writeln('\t\tvmemcpy(res.${c_name(field.name)},*(${field_type}*)${tmp}.data,sizeof(${field_type}));')
} else {
- dec.writeln('\t\tres.${c_name(field.name)} = *($field_type*) ${tmp}.data;')
+ dec.writeln('\t\tres.${c_name(field.name)} = *(${field_type}*) ${tmp}.data;')
}
if field.has_default_expr {
dec.writeln('\t} else {')
@@ -475,19 +475,19 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
}
}
if field_sym.kind == .enum_ {
- enc.writeln('\tcJSON_AddItemToObject(o, "$name", json__encode_u64(val.${c_name(field.name)}));\n')
+ enc.writeln('\tcJSON_AddItemToObject(o, "${name}", json__encode_u64(val.${c_name(field.name)}));\n')
} else {
if field_sym.name == 'time.Time' {
// time struct requires special treatment
// it has to be encoded as a unix timestamp number
- enc.writeln('\tcJSON_AddItemToObject(o, "$name", json__encode_u64(val.${c_name(field.name)}._v_unix));')
+ enc.writeln('\tcJSON_AddItemToObject(o, "${name}", json__encode_u64(val.${c_name(field.name)}._v_unix));')
} else {
if !field.typ.is_real_pointer() {
- enc.writeln('\tcJSON_AddItemToObject(o, "$name", ${enc_name}(val.${c_name(field.name)}));\n')
+ enc.writeln('\tcJSON_AddItemToObject(o, "${name}", ${enc_name}(val.${c_name(field.name)}));\n')
} else {
sptr_value := 'val.${c_name(field.name)}'
- enc.writeln('\tif ($sptr_value != 0) {')
- enc.writeln('\t\tcJSON_AddItemToObject(o, "$name", ${enc_name}(*$sptr_value));')
+ enc.writeln('\tif (${sptr_value} != 0) {')
+ enc.writeln('\t\tcJSON_AddItemToObject(o, "${name}", ${enc_name}(*${sptr_value}));')
enc.writeln('\t}\n')
}
}
@@ -496,10 +496,10 @@ fn (mut g Gen) gen_struct_enc_dec(type_info ast.TypeInfo, styp string, mut enc s
}
fn gen_js_get(styp string, tmp string, name string, mut dec strings.Builder, is_required bool) {
- dec.writeln('\tcJSON *jsonroot_$tmp = js_get(root, "$name");')
+ dec.writeln('\tcJSON *jsonroot_${tmp} = js_get(root, "${name}");')
if is_required {
- dec.writeln('\tif (jsonroot_$tmp == 0) {')
- dec.writeln('\t\treturn (${result_name}_$styp){ .is_error = true, .err = _v_error(_SLIT("expected field \'$name\' is missing")), .data = {0} };')
+ dec.writeln('\tif (jsonroot_${tmp} == 0) {')
+ dec.writeln('\t\treturn (${result_name}_${styp}){ .is_error = true, .err = _v_error(_SLIT("expected field \'${name}\' is missing")), .data = {0} };')
dec.writeln('\t}')
}
}
@@ -507,24 +507,24 @@ fn gen_js_get(styp string, tmp string, name string, mut dec strings.Builder, is_
fn gen_js_get_opt(dec_name string, field_type string, styp string, tmp string, name string, mut dec strings.Builder, is_required bool) {
gen_js_get(styp, tmp, name, mut dec, is_required)
value_field_type := field_type.trim_right('*')
- dec.writeln('\t${result_name}_$value_field_type $tmp;')
- dec.writeln('\tif (jsonroot_$tmp) {')
- dec.writeln('\t\t$tmp = ${dec_name}(jsonroot_$tmp);')
+ dec.writeln('\t${result_name}_${value_field_type} ${tmp};')
+ dec.writeln('\tif (jsonroot_${tmp}) {')
+ dec.writeln('\t\t${tmp} = ${dec_name}(jsonroot_${tmp});')
dec.writeln('\t\tif (${tmp}.is_error) {')
- dec.writeln('\t\t\treturn (${result_name}_$styp){ .is_error = true, .err = ${tmp}.err, .data = {0} };')
+ dec.writeln('\t\t\treturn (${result_name}_${styp}){ .is_error = true, .err = ${tmp}.err, .data = {0} };')
dec.writeln('\t\t}')
dec.writeln('\t}')
}
fn js_enc_name(typ string) string {
suffix := if typ.ends_with('*') { typ.trim_right('*') } else { typ }
- name := 'json__encode_$suffix'
+ name := 'json__encode_${suffix}'
return util.no_dots(name)
}
fn js_dec_name(typ string) string {
suffix := if typ.ends_with('*') { typ.trim_right('*') } else { typ }
- name := 'json__decode_$suffix'
+ name := 'json__decode_${suffix}'
return util.no_dots(name)
}
@@ -540,9 +540,9 @@ fn (mut g Gen) decode_array(value_type ast.Type, fixed_array_size int) string {
fixed_array_str, fixed_array_size_str, res_str, array_free_str := if fixed_array_size > -1 {
// fixed array
- 'fixed_', '_$fixed_array_size', '', ''
+ 'fixed_', '_${fixed_array_size}', '', ''
} else {
- '', '', 'res = __new_array${noscan}(0, 0, sizeof($styp));', 'array_free(&res);'
+ '', '', 'res = __new_array${noscan}(0, 0, sizeof(${styp}));', 'array_free(&res);'
}
fixed_array_idx, array_element_assign, fixed_array_idx_increment := if fixed_array_size > -1 {
@@ -554,30 +554,30 @@ fn (mut g Gen) decode_array(value_type ast.Type, fixed_array_size int) string {
mut s := ''
if is_js_prim(styp) {
- s = '$styp val = ${fn_name}((cJSON *)jsval); '
+ s = '${styp} val = ${fn_name}((cJSON *)jsval); '
} else {
s = '
- ${result_name}_$styp val2 = $fn_name ((cJSON *)jsval);
+ ${result_name}_${styp} val2 = ${fn_name} ((cJSON *)jsval);
if(val2.is_error) {
- $array_free_str
- return *(${result_name}_Array_$fixed_array_str$styp$fixed_array_size_str*)&val2;
+ ${array_free_str}
+ return *(${result_name}_Array_${fixed_array_str}${styp}${fixed_array_size_str}*)&val2;
}
- $styp val = *($styp*)val2.data;
+ ${styp} val = *(${styp}*)val2.data;
'
}
return '
if(root && !cJSON_IsArray(root) && !cJSON_IsNull(root)) {
- return (${result_name}_Array_$fixed_array_str$styp$fixed_array_size_str){.is_error = true, .err = _v_error(string__plus(_SLIT("Json element is not an array: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
+ return (${result_name}_Array_${fixed_array_str}${styp}${fixed_array_size_str}){.is_error = true, .err = _v_error(string__plus(_SLIT("Json element is not an array: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
}
- $res_str
+ ${res_str}
const cJSON *jsval = NULL;
- $fixed_array_idx
+ ${fixed_array_idx}
cJSON_ArrayForEach(jsval, root)
{
- $s
- $array_element_assign
- $fixed_array_idx_increment
+ ${s}
+ ${array_element_assign}
+ ${fixed_array_idx_increment}
}
'
}
@@ -588,15 +588,15 @@ fn (mut g Gen) encode_array(value_type ast.Type, fixed_array_size int) string {
data_str, size_str := if fixed_array_size > -1 {
// fixed array
- '', '$fixed_array_size'
+ '', '${fixed_array_size}'
} else {
'.data', 'val.len'
}
return '
o = cJSON_CreateArray();
- for (int i = 0; i < $size_str; i++){
- cJSON_AddItemToArray(o, $fn_name ( (($styp*)val$data_str)[i] ));
+ for (int i = 0; i < ${size_str}; i++){
+ cJSON_AddItemToArray(o, ${fn_name} ( ((${styp}*)val${data_str})[i] ));
}
'
}
@@ -609,26 +609,26 @@ fn (mut g Gen) decode_map(key_type ast.Type, value_type ast.Type) string {
fn_name_v := js_dec_name(styp_v)
mut s := ''
if is_js_prim(styp_v) {
- s = '$styp_v val = $fn_name_v (js_get(root, jsval->string));'
+ s = '${styp_v} val = ${fn_name_v} (js_get(root, jsval->string));'
} else {
s = '
- ${result_name}_$styp_v val2 = $fn_name_v (js_get(root, jsval->string));
+ ${result_name}_${styp_v} val2 = ${fn_name_v} (js_get(root, jsval->string));
if(val2.is_error) {
map_free(&res);
- return *(${result_name}_Map_${styp}_$styp_v*)&val2;
+ return *(${result_name}_Map_${styp}_${styp_v}*)&val2;
}
- $styp_v val = *($styp_v*)val2.data;
+ ${styp_v} val = *(${styp_v}*)val2.data;
'
}
return '
if(!cJSON_IsObject(root) && !cJSON_IsNull(root)) {
- return (${result_name}_Map_${styp}_$styp_v){ .is_error = true, .err = _v_error(string__plus(_SLIT("Json element is not an object: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
+ return (${result_name}_Map_${styp}_${styp_v}){ .is_error = true, .err = _v_error(string__plus(_SLIT("Json element is not an object: "), tos2((byteptr)cJSON_PrintUnformatted(root)))), .data = {0}};
}
- res = new_map(sizeof($styp), sizeof($styp_v), $hash_fn, $key_eq_fn, $clone_fn, $free_fn);
+ res = new_map(sizeof(${styp}), sizeof(${styp_v}), ${hash_fn}, ${key_eq_fn}, ${clone_fn}, ${free_fn});
cJSON *jsval = NULL;
cJSON_ArrayForEach(jsval, root)
{
- $s
+ ${s}
string key = tos2((byteptr)jsval->string);
map_set(&res, &key, &val);
}
@@ -643,18 +643,18 @@ fn (mut g Gen) encode_map(key_type ast.Type, value_type ast.Type) string {
keys_tmp := g.new_tmp_var()
mut key := 'string key = '
if key_type.is_string() {
- key += '(($styp*)${keys_tmp}.data)[i];'
+ key += '((${styp}*)${keys_tmp}.data)[i];'
} else {
// key += '${styp}_str((($styp*)${keys_tmp}.data)[i]);'
verror('json: encode only maps with string keys')
}
return '
o = cJSON_CreateObject();
- Array_$styp $keys_tmp = map_keys(&val);
+ Array_${styp} ${keys_tmp} = map_keys(&val);
for (int i = 0; i < ${keys_tmp}.len; ++i) {
- $key
- cJSON_AddItemToObject(o, (char*) key.str, $fn_name_v ( *($styp_v*) map_get(&val, &key, &($styp_v[]) { $zero } ) ) );
+ ${key}
+ cJSON_AddItemToObject(o, (char*) key.str, ${fn_name_v} ( *(${styp_v}*) map_get(&val, &key, &(${styp_v}[]) { ${zero} } ) ) );
}
- array_free(&$keys_tmp);
+ array_free(&${keys_tmp});
'
}
diff --git a/vlib/v/gen/c/live.v b/vlib/v/gen/c/live.v
index 2bba24a107..c93de15a5e 100644
--- a/vlib/v/gen/c/live.v
+++ b/vlib/v/gen/c/live.v
@@ -41,12 +41,12 @@ fn (mut g Gen) generate_hotcode_reloader_code() {
mut load_code := []string{}
if g.pref.os != .windows {
for so_fn in g.hotcode_fn_names {
- load_code << 'impl_live_$so_fn = dlsym(live_lib, "impl_live_$so_fn");'
+ load_code << 'impl_live_${so_fn} = dlsym(live_lib, "impl_live_${so_fn}");'
}
phd = c.posix_hotcode_definitions_1
} else {
for so_fn in g.hotcode_fn_names {
- load_code << 'impl_live_$so_fn = (void *)GetProcAddress(live_lib, "impl_live_$so_fn"); '
+ load_code << 'impl_live_${so_fn} = (void *)GetProcAddress(live_lib, "impl_live_${so_fn}"); '
}
phd = c.windows_hotcode_definitions_1
}
@@ -77,22 +77,22 @@ fn (mut g Gen) generate_hotcode_reloading_main_caller() {
g.writeln('\t{')
g.writeln('\t\t// initialization of live function pointers')
for fname in g.hotcode_fn_names {
- g.writeln('\t\timpl_live_$fname = 0;')
+ g.writeln('\t\timpl_live_${fname} = 0;')
}
vexe := util.cescaped_path(pref.vexe_path())
file := util.cescaped_path(g.pref.path)
- ccompiler := '-cc $g.pref.ccompiler'
+ ccompiler := '-cc ${g.pref.ccompiler}'
so_debug_flag := if g.pref.is_debug { '-cg' } else { '' }
- vopts := '$ccompiler $so_debug_flag -sharedlive -shared'
+ vopts := '${ccompiler} ${so_debug_flag} -sharedlive -shared'
//
g.writeln('\t\t// start background reloading thread')
if g.pref.os == .windows {
g.writeln('\t\tlive_fn_mutex = CreateMutexA(0, 0, 0);')
}
g.writeln('\t\tv__live__LiveReloadInfo* live_info = v__live__executable__new_live_reload_info(')
- g.writeln('\t\t\t\t\t tos2("$file"),')
- g.writeln('\t\t\t\t\t tos2("$vexe"),')
- g.writeln('\t\t\t\t\t tos2("$vopts"),')
+ g.writeln('\t\t\t\t\t tos2("${file}"),')
+ g.writeln('\t\t\t\t\t tos2("${vexe}"),')
+ g.writeln('\t\t\t\t\t tos2("${vopts}"),')
g.writeln('\t\t\t\t\t &live_fn_mutex,')
g.writeln('\t\t\t\t\t v_bind_live_symbols')
g.writeln('\t\t);')
@@ -104,7 +104,7 @@ fn (mut g Gen) generate_hotcode_reloading_main_caller() {
for f, _ in already_added {
fpath := os.real_path(f)
g.writeln('\t\tv__live__executable__add_live_monitored_file(live_info, ${ctoslit(fpath)}); // source V file with [live] ${
- idx + 1}/$already_added.len')
+ idx + 1}/${already_added.len}')
idx++
}
g.writeln('')
diff --git a/vlib/v/gen/c/match.v b/vlib/v/gen/c/match.v
index eade857280..5f96bf27e8 100644
--- a/vlib/v/gen/c/match.v
+++ b/vlib/v/gen/c/match.v
@@ -77,7 +77,7 @@ fn (mut g Gen) match_expr(node ast.MatchExpr) {
''
}
cond_var = g.new_tmp_var()
- g.write('${g.typ(node.cond_type)} $cond_var = ')
+ g.write('${g.typ(node.cond_type)} ${cond_var} = ')
g.expr(node.cond)
g.writeln(';')
g.set_current_pos_as_last_stmt_pos()
@@ -87,7 +87,7 @@ fn (mut g Gen) match_expr(node ast.MatchExpr) {
g.empty_line = true
cur_line = g.go_before_stmt(0).trim_left(' \t')
tmp_var = g.new_tmp_var()
- g.writeln('${g.typ(node.return_type)} $tmp_var = ${g.type_default(node.return_type)};')
+ g.writeln('${g.typ(node.return_type)} ${tmp_var} = ${g.type_default(node.return_type)};')
}
if is_expr && !need_tmp_var {
@@ -129,7 +129,7 @@ fn (mut g Gen) match_expr(node ast.MatchExpr) {
g.set_current_pos_as_last_stmt_pos()
g.write(cur_line)
if need_tmp_var {
- g.write('$tmp_var')
+ g.write('${tmp_var}')
}
if is_expr && !need_tmp_var {
g.write(')')
@@ -179,7 +179,7 @@ fn (mut g Gen) match_expr_sumtype(node ast.MatchExpr, is_expr bool, cond_var str
if cond_sym.kind == .sum_type {
g.write('${dot_or_ptr}_typ == ')
if cur_expr is ast.None {
- g.write('$ast.none_type.idx() /* none */')
+ g.write('${ast.none_type.idx()} /* none */')
} else {
g.expr(cur_expr)
}
@@ -232,14 +232,14 @@ fn (mut g Gen) match_expr_switch(node ast.MatchExpr, is_expr bool, cond_var stri
mut default_generated := false
g.empty_line = true
- g.writeln('switch ($cond_var) {')
+ g.writeln('switch (${cond_var}) {')
g.indent++
for branch in node.branches {
if branch.is_else {
if cond_fsym.info is ast.Enum {
for val in (cond_fsym.info as ast.Enum).vals {
if val !in covered_enum {
- g.writeln('case $cname$val:')
+ g.writeln('case ${cname}${val}:')
}
}
}
@@ -263,15 +263,15 @@ fn (mut g Gen) match_expr_switch(node ast.MatchExpr, is_expr bool, cond_var stri
}
g.write('(')
if !skip_low {
- g.write('$cond_var >= ')
+ g.write('${cond_var} >= ')
g.expr(expr.low)
g.write(' && ')
}
- g.write('$cond_var <= ')
+ g.write('${cond_var} <= ')
g.expr(expr.high)
g.write(')')
} else {
- g.write('$cond_var == (')
+ g.write('${cond_var} == (')
g.expr(expr)
g.write(')')
}
@@ -335,15 +335,15 @@ fn (mut g Gen) match_expr_switch(node ast.MatchExpr, is_expr bool, cond_var stri
}
g.write('(')
if !skip_low {
- g.write('$cond_var >= ')
+ g.write('${cond_var} >= ')
g.expr(expr.low)
g.write(' && ')
}
- g.write('$cond_var <= ')
+ g.write('${cond_var} <= ')
g.expr(expr.high)
g.write(')')
} else {
- g.write('$cond_var == (')
+ g.write('${cond_var} == (')
g.expr(expr)
g.write(')')
}
@@ -404,30 +404,30 @@ fn (mut g Gen) match_expr_classic(node ast.MatchExpr, is_expr bool, cond_var str
match type_sym.kind {
.array {
ptr_typ := g.equality_fn(node.cond_type)
- g.write('${ptr_typ}_arr_eq($cond_var, ')
+ g.write('${ptr_typ}_arr_eq(${cond_var}, ')
g.expr(expr)
g.write(')')
}
.array_fixed {
ptr_typ := g.equality_fn(node.cond_type)
- g.write('${ptr_typ}_arr_eq($cond_var, ')
+ g.write('${ptr_typ}_arr_eq(${cond_var}, ')
g.expr(expr)
g.write(')')
}
.map {
ptr_typ := g.equality_fn(node.cond_type)
- g.write('${ptr_typ}_map_eq($cond_var, ')
+ g.write('${ptr_typ}_map_eq(${cond_var}, ')
g.expr(expr)
g.write(')')
}
.string {
- g.write('string__eq($cond_var, ')
+ g.write('string__eq(${cond_var}, ')
g.expr(expr)
g.write(')')
}
.struct_ {
ptr_typ := g.equality_fn(node.cond_type)
- g.write('${ptr_typ}_struct_eq($cond_var, ')
+ g.write('${ptr_typ}_struct_eq(${cond_var}, ')
g.expr(expr)
g.write(')')
}
@@ -442,15 +442,15 @@ fn (mut g Gen) match_expr_classic(node ast.MatchExpr, is_expr bool, cond_var str
}
g.write('(')
if !skip_low {
- g.write('$cond_var >= ')
+ g.write('${cond_var} >= ')
g.expr(expr.low)
g.write(' && ')
}
- g.write('$cond_var <= ')
+ g.write('${cond_var} <= ')
g.expr(expr.high)
g.write(')')
} else {
- g.write('$cond_var == (')
+ g.write('${cond_var} == (')
g.expr(expr)
g.write(')')
}
diff --git a/vlib/v/gen/c/profile.v b/vlib/v/gen/c/profile.v
index b7d89bb54f..0aeeb1510d 100644
--- a/vlib/v/gen/c/profile.v
+++ b/vlib/v/gen/c/profile.v
@@ -19,26 +19,26 @@ fn (mut g Gen) profile_fn(fn_decl ast.FnDecl) {
g.defer_profile_code = ''
} else {
measure_fn_name := if g.pref.os == .macos { 'time__vpc_now_darwin' } else { 'time__vpc_now' }
- fn_profile_counter_name := 'vpc_$cfn_name'
+ fn_profile_counter_name := 'vpc_${cfn_name}'
fn_profile_counter_name_calls := '${fn_profile_counter_name}_calls'
g.writeln('')
should_restore_v__profile_enabled := g.pref.profile_fns.len > 0
&& cfn_name in g.pref.profile_fns
if should_restore_v__profile_enabled {
$if trace_profile_fns ? {
- eprintln('> profile_fn | $g.pref.profile_fns | $cfn_name')
+ eprintln('> profile_fn | ${g.pref.profile_fns} | ${cfn_name}')
}
g.writeln('\tbool _prev_v__profile_enabled = v__profile_enabled;')
g.writeln('\tv__profile_enabled = true;')
}
g.writeln('\tdouble _PROF_FN_START = ${measure_fn_name}();')
- g.writeln('\tif(v__profile_enabled) { $fn_profile_counter_name_calls++; } // $fn_name')
+ g.writeln('\tif(v__profile_enabled) { ${fn_profile_counter_name_calls}++; } // ${fn_name}')
g.writeln('')
- g.defer_profile_code = '\tif(v__profile_enabled) { $fn_profile_counter_name += ${measure_fn_name}() - _PROF_FN_START; }'
+ g.defer_profile_code = '\tif(v__profile_enabled) { ${fn_profile_counter_name} += ${measure_fn_name}() - _PROF_FN_START; }'
if should_restore_v__profile_enabled {
g.defer_profile_code += '\n\t\tv__profile_enabled = _prev_v__profile_enabled;'
}
- g.pcs_declarations.writeln('double $fn_profile_counter_name = 0.0; u64 $fn_profile_counter_name_calls = 0;')
+ g.pcs_declarations.writeln('double ${fn_profile_counter_name} = 0.0; u64 ${fn_profile_counter_name_calls} = 0;')
g.pcs << ProfileCounterMeta{
fn_name: cfn_name
vpc_name: fn_profile_counter_name
@@ -52,13 +52,13 @@ pub fn (mut g Gen) gen_vprint_profile_stats() {
fstring := '"%14llu %14.3fms %14.0fns %s \\n"'
if g.pref.profile_file == '-' {
for pc_meta in g.pcs {
- g.pcs_declarations.writeln('\tif ($pc_meta.vpc_calls) printf($fstring, $pc_meta.vpc_calls, $pc_meta.vpc_name/1000000.0, $pc_meta.vpc_name/$pc_meta.vpc_calls, "$pc_meta.fn_name" );')
+ g.pcs_declarations.writeln('\tif (${pc_meta.vpc_calls}) printf(${fstring}, ${pc_meta.vpc_calls}, ${pc_meta.vpc_name}/1000000.0, ${pc_meta.vpc_name}/${pc_meta.vpc_calls}, "${pc_meta.fn_name}" );')
}
} else {
g.pcs_declarations.writeln('\tFILE * fp;')
- g.pcs_declarations.writeln('\tfp = fopen ("$g.pref.profile_file", "w+");')
+ g.pcs_declarations.writeln('\tfp = fopen ("${g.pref.profile_file}", "w+");')
for pc_meta in g.pcs {
- g.pcs_declarations.writeln('\tif ($pc_meta.vpc_calls) fprintf(fp, $fstring, $pc_meta.vpc_calls, $pc_meta.vpc_name/1000000.0, $pc_meta.vpc_name/$pc_meta.vpc_calls, "$pc_meta.fn_name" );')
+ g.pcs_declarations.writeln('\tif (${pc_meta.vpc_calls}) fprintf(fp, ${fstring}, ${pc_meta.vpc_calls}, ${pc_meta.vpc_name}/1000000.0, ${pc_meta.vpc_name}/${pc_meta.vpc_calls}, "${pc_meta.fn_name}" );')
}
g.pcs_declarations.writeln('\tfclose(fp);')
}
@@ -66,8 +66,8 @@ pub fn (mut g Gen) gen_vprint_profile_stats() {
g.pcs_declarations.writeln('')
g.pcs_declarations.writeln('void vreset_profile_stats(){')
for pc_meta in g.pcs {
- g.pcs_declarations.writeln('\t$pc_meta.vpc_calls = 0;')
- g.pcs_declarations.writeln('\t$pc_meta.vpc_name = 0.0;')
+ g.pcs_declarations.writeln('\t${pc_meta.vpc_calls} = 0;')
+ g.pcs_declarations.writeln('\t${pc_meta.vpc_name} = 0.0;')
}
g.pcs_declarations.writeln('}')
g.pcs_declarations.writeln('')
diff --git a/vlib/v/gen/c/sql.v b/vlib/v/gen/c/sql.v
index 97b4298250..e07e3679fc 100644
--- a/vlib/v/gen/c/sql.v
+++ b/vlib/v/gen/c/sql.v
@@ -22,7 +22,7 @@ fn (mut g Gen) sql_stmt(node ast.SqlStmt) {
conn := g.new_tmp_var()
g.writeln('')
g.writeln('// orm')
- g.write('orm__Connection $conn = (orm__Connection){._')
+ g.write('orm__Connection ${conn} = (orm__Connection){._')
mut fn_prefix := ''
typ := g.parse_db_type(node.db_expr)
match typ {
@@ -36,10 +36,10 @@ fn (mut g Gen) sql_stmt(node ast.SqlStmt) {
fn_prefix = 'pg__DB'
}
else {
- verror('This database type `$typ` is not implemented yet in orm') // TODO add better error
+ verror('This database type `${typ}` is not implemented yet in orm') // TODO add better error
}
}
- g.write('$fn_prefix = &')
+ g.write('${fn_prefix} = &')
g.expr(node.db_expr)
g.writeln(', ._typ = _orm__Connection_${fn_prefix}_index};')
for line in node.lines {
@@ -75,22 +75,22 @@ fn (mut g Gen) sql_stmt_line(nd ast.SqlStmtLine, expr string, or_expr ast.OrExpr
unsafe { fields.free() }
}
if node.kind == .create {
- g.write('${result_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
+ g.write('${result_name}_void ${res} = orm__Connection_name_table[${expr}._typ]._method_')
g.sql_create_table(node, expr, table_name)
subs = true
} else if node.kind == .drop {
- g.write('${result_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
- g.writeln('drop(${expr}._object, _SLIT("$table_name"));')
+ g.write('${result_name}_void ${res} = orm__Connection_name_table[${expr}._typ]._method_')
+ g.writeln('drop(${expr}._object, _SLIT("${table_name}"));')
subs = true
} else if node.kind == .insert {
arr := g.new_tmp_var()
- g.writeln('Array_orm__Primitive $arr = __new_array_with_default_noscan(0, 0, sizeof(orm__Primitive), 0);')
+ g.writeln('Array_orm__Primitive ${arr} = __new_array_with_default_noscan(0, 0, sizeof(orm__Primitive), 0);')
g.sql_insert(node, expr, table_name, arr, res, '', false, '', or_expr)
} else if node.kind == .update {
- g.write('${result_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
+ g.write('${result_name}_void ${res} = orm__Connection_name_table[${expr}._typ]._method_')
g.sql_update(node, expr, table_name)
} else if node.kind == .delete {
- g.write('${result_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
+ g.write('${result_name}_void ${res} = orm__Connection_name_table[${expr}._typ]._method_')
g.sql_delete(node, expr, table_name)
}
if or_expr.kind == .block {
@@ -104,29 +104,29 @@ fn (mut g Gen) sql_stmt_line(nd ast.SqlStmtLine, expr string, or_expr ast.OrExpr
}
fn (mut g Gen) sql_create_table(node ast.SqlStmtLine, expr string, table_name string) {
- g.write('create(${expr}._object, _SLIT("$table_name"), new_array_from_c_array($node.fields.len, $node.fields.len, sizeof(orm__TableField),')
+ g.write('create(${expr}._object, _SLIT("${table_name}"), new_array_from_c_array(${node.fields.len}, ${node.fields.len}, sizeof(orm__TableField),')
if node.fields.len > 0 {
- g.write(' _MOV((orm__TableField[$node.fields.len]){')
+ g.write(' _MOV((orm__TableField[${node.fields.len}]){')
for field in node.fields {
sym := g.table.sym(field.typ)
g.write('(orm__TableField){')
- g.write('.name = _SLIT("$field.name"),')
+ g.write('.name = _SLIT("${field.name}"),')
mut typ := int(field.typ)
if sym.name == 'time.Time' {
typ = -2
}
- g.write('.typ = $typ,')
+ g.write('.typ = ${typ},')
g.write('.is_arr = ${sym.kind == .array}, ')
g.write('.is_time = ${g.table.get_type_name(field.typ) == 'time__Time'},')
- g.write('.default_val = (string){.str = (byteptr) "$field.default_val", .is_lit = 1},')
- g.write('.attrs = new_array_from_c_array($field.attrs.len, $field.attrs.len, sizeof(StructAttribute),')
+ g.write('.default_val = (string){.str = (byteptr) "${field.default_val}", .is_lit = 1},')
+ g.write('.attrs = new_array_from_c_array(${field.attrs.len}, ${field.attrs.len}, sizeof(StructAttribute),')
if field.attrs.len > 0 {
- g.write(' _MOV((StructAttribute[$field.attrs.len]){')
+ g.write(' _MOV((StructAttribute[${field.attrs.len}]){')
for attr in field.attrs {
g.write('(StructAttribute){')
- g.write('.name = _SLIT("$attr.name"),')
- g.write('.has_arg = $attr.has_arg,')
- g.write('.arg = _SLIT("$attr.arg"),')
+ g.write('.name = _SLIT("${attr.name}"),')
+ g.write('.has_arg = ${attr.has_arg},')
+ g.write('.arg = _SLIT("${attr.arg}"),')
g.write('.kind = ${int(attr.kind)},')
g.write('},')
}
@@ -161,7 +161,7 @@ fn (mut g Gen) sql_insert(node ast.SqlStmtLine, expr string, table_name string,
if attr.kind == .string {
f_key = attr.arg
} else {
- verror("fkey attribute need be string. Try [fkey: '$attr.arg'] instead of [fkey: $attr.arg]")
+ verror("fkey attribute need be string. Try [fkey: '${attr.arg}'] instead of [fkey: ${attr.arg}]")
}
}
}
@@ -183,15 +183,15 @@ fn (mut g Gen) sql_insert(node ast.SqlStmtLine, expr string, table_name string,
for sub in subs {
g.sql_stmt_line(sub, expr, or_expr)
- g.writeln('array_push(&$last_ids_arr, _MOV((orm__Primitive[]){orm__Connection_name_table[${expr}._typ]._method_last_id(${expr}._object)}));')
+ g.writeln('array_push(&${last_ids_arr}, _MOV((orm__Primitive[]){orm__Connection_name_table[${expr}._typ]._method_last_id(${expr}._object)}));')
}
- g.write('${result_name}_void $res = orm__Connection_name_table[${expr}._typ]._method_')
- g.write('insert(${expr}._object, _SLIT("$table_name"), (orm__QueryData){')
+ g.write('${result_name}_void ${res} = orm__Connection_name_table[${expr}._typ]._method_')
+ g.write('insert(${expr}._object, _SLIT("${table_name}"), (orm__QueryData){')
- g.write('.fields = new_array_from_c_array($fields.len, $fields.len, sizeof(string),')
+ g.write('.fields = new_array_from_c_array(${fields.len}, ${fields.len}, sizeof(string),')
if fields.len > 0 {
- g.write('_MOV((string[$fields.len]){')
+ g.write('_MOV((string[${fields.len}]){')
for f in fields {
g.write('_SLIT("${g.get_field_name(f)}"),')
}
@@ -201,26 +201,26 @@ fn (mut g Gen) sql_insert(node ast.SqlStmtLine, expr string, table_name string,
}
g.write('),')
- g.write('.data = new_array_from_c_array($fields.len, $fields.len, sizeof(orm__Primitive),')
+ g.write('.data = new_array_from_c_array(${fields.len}, ${fields.len}, sizeof(orm__Primitive),')
if fields.len > 0 {
- g.write(' _MOV((orm__Primitive[$fields.len]){')
+ g.write(' _MOV((orm__Primitive[${fields.len}]){')
mut structs := 0
for f in fields {
if f.name == fkey {
- g.write('$pid, ')
+ g.write('${pid}, ')
continue
}
mut sym := g.table.sym(f.typ)
mut typ := sym.cname
if sym.kind == .struct_ && typ != 'time__Time' {
- g.write('(*(orm__Primitive*) array_get($last_ids_arr, $structs)),')
+ g.write('(*(orm__Primitive*) array_get(${last_ids_arr}, ${structs})),')
structs++
continue
}
if typ == 'time__Time' {
typ = 'time'
}
- g.write('orm__${typ}_to_primitive(${node.object_var_name}.$f.name),')
+ g.write('orm__${typ}_to_primitive(${node.object_var_name}.${f.name}),')
}
g.write('})')
} else {
@@ -234,15 +234,15 @@ fn (mut g Gen) sql_insert(node ast.SqlStmtLine, expr string, table_name string,
if arrs.len > 0 {
mut id_name := g.new_tmp_var()
- g.writeln('orm__Primitive $id_name = orm__Connection_name_table[${expr}._typ]._method_last_id(${expr}._object);')
+ g.writeln('orm__Primitive ${id_name} = orm__Connection_name_table[${expr}._typ]._method_last_id(${expr}._object);')
for i, mut arr in arrs {
idx := g.new_tmp_var()
- g.writeln('for (int $idx = 0; $idx < ${arr.object_var_name}.${field_names[i]}.len; $idx++) {')
+ g.writeln('for (int ${idx} = 0; ${idx} < ${arr.object_var_name}.${field_names[i]}.len; ${idx}++) {')
last_ids := g.new_tmp_var()
res_ := g.new_tmp_var()
tmp_var := g.new_tmp_var()
ctyp := g.typ(arr.table_expr.typ)
- g.writeln('$ctyp $tmp_var = (*($ctyp*)array_get(${arr.object_var_name}.${field_names[i]}, $idx));')
+ g.writeln('${ctyp} ${tmp_var} = (*(${ctyp}*)array_get(${arr.object_var_name}.${field_names[i]}, ${idx}));')
arr.object_var_name = tmp_var
mut fff := []ast.StructField{}
for f in arr.fields {
@@ -273,25 +273,25 @@ fn (mut g Gen) sql_update(node ast.SqlStmtLine, expr string, table_name string)
// println(table_name)
// println(expr)
// println(node)
- g.write('update(${expr}._object, _SLIT("$table_name"), (orm__QueryData){')
+ g.write('update(${expr}._object, _SLIT("${table_name}"), (orm__QueryData){')
g.write('.kinds = __new_array_with_default_noscan(0, 0, sizeof(orm__OperationKind), 0),')
g.write('.is_and = __new_array_with_default_noscan(0, 0, sizeof(bool), 0),')
g.write('.types = __new_array_with_default_noscan(0, 0, sizeof(int), 0),')
g.write('.parentheses = __new_array_with_default_noscan(0, 0, sizeof(Array_int), 0),')
if node.updated_columns.len > 0 {
- g.write('.fields = new_array_from_c_array($node.updated_columns.len, $node.updated_columns.len, sizeof(string),')
- g.write(' _MOV((string[$node.updated_columns.len]){')
+ g.write('.fields = new_array_from_c_array(${node.updated_columns.len}, ${node.updated_columns.len}, sizeof(string),')
+ g.write(' _MOV((string[${node.updated_columns.len}]){')
for field in node.updated_columns {
- g.write('_SLIT("$field"),')
+ g.write('_SLIT("${field}"),')
}
g.write('})')
} else {
- g.write('.fields = __new_array_with_default_noscan($node.updated_columns.len, $node.updated_columns.len, sizeof(string), 0')
+ g.write('.fields = __new_array_with_default_noscan(${node.updated_columns.len}, ${node.updated_columns.len}, sizeof(string), 0')
}
g.write('),')
- g.write('.data = new_array_from_c_array($node.update_exprs.len, $node.update_exprs.len, sizeof(orm__Primitive),')
+ g.write('.data = new_array_from_c_array(${node.update_exprs.len}, ${node.update_exprs.len}, sizeof(orm__Primitive),')
if node.update_exprs.len > 0 {
- g.write(' _MOV((orm__Primitive[$node.update_exprs.len]){')
+ g.write(' _MOV((orm__Primitive[${node.update_exprs.len}]){')
for e in node.update_exprs {
g.sql_expr_to_orm_primitive(e)
}
@@ -303,7 +303,7 @@ fn (mut g Gen) sql_update(node ast.SqlStmtLine, expr string, table_name string)
}
fn (mut g Gen) sql_delete(node ast.SqlStmtLine, expr string, table_name string) {
- g.write('_v_delete(${expr}._object, _SLIT("$table_name"),')
+ g.write('_v_delete(${expr}._object, _SLIT("${table_name}"),')
g.sql_gen_where_data(node.where_expr)
g.writeln(');')
}
@@ -353,7 +353,7 @@ fn (mut g Gen) sql_write_orm_primitive(t ast.Type, expr ast.Expr) {
g.write('orm__${typ}_to_primitive(')
if expr is ast.InfixExpr {
g.write('(orm__InfixType){')
- g.write('.name = _SLIT("$expr.left"),')
+ g.write('.name = _SLIT("${expr.left}"),')
mut kind := match expr.op {
.plus {
'orm__MathOperationKind__add'
@@ -371,7 +371,7 @@ fn (mut g Gen) sql_write_orm_primitive(t ast.Type, expr ast.Expr) {
''
}
}
- g.write('.operator = $kind,')
+ g.write('.operator = ${kind},')
g.write('.right = ')
g.sql_expr_to_orm_primitive(expr.right)
g.write('}')
@@ -467,20 +467,20 @@ fn (mut g Gen) sql_gen_where_data(where_expr ast.Expr) {
is_and)
g.write('.types = __new_array_with_default_noscan(0, 0, sizeof(int), 0),')
if fields.len > 0 {
- g.write('.fields = new_array_from_c_array($fields.len, $fields.len, sizeof(string),')
- g.write(' _MOV((string[$fields.len]){')
+ g.write('.fields = new_array_from_c_array(${fields.len}, ${fields.len}, sizeof(string),')
+ g.write(' _MOV((string[${fields.len}]){')
for field in fields {
- g.write('_SLIT("$field"),')
+ g.write('_SLIT("${field}"),')
}
g.write('})')
} else {
- g.write('.fields = __new_array_with_default_noscan($fields.len, $fields.len, sizeof(string), 0')
+ g.write('.fields = __new_array_with_default_noscan(${fields.len}, ${fields.len}, sizeof(string), 0')
}
g.write('),')
- g.write('.data = new_array_from_c_array($data.len, $data.len, sizeof(orm__Primitive),')
+ g.write('.data = new_array_from_c_array(${data.len}, ${data.len}, sizeof(orm__Primitive),')
if data.len > 0 {
- g.write(' _MOV((orm__Primitive[$data.len]){')
+ g.write(' _MOV((orm__Primitive[${data.len}]){')
for e in data {
g.sql_expr_to_orm_primitive(e)
}
@@ -490,12 +490,12 @@ fn (mut g Gen) sql_gen_where_data(where_expr ast.Expr) {
g.write('.parentheses = ')
if parentheses.len > 0 {
- g.write('new_array_from_c_array($parentheses.len, $parentheses.len, sizeof(Array_int), _MOV((Array_int[$parentheses.len]){')
+ g.write('new_array_from_c_array(${parentheses.len}, ${parentheses.len}, sizeof(Array_int), _MOV((Array_int[${parentheses.len}]){')
for par in parentheses {
if par.len > 0 {
- g.write('new_array_from_c_array($par.len, $par.len, sizeof(int), _MOV((int[$par.len]){')
+ g.write('new_array_from_c_array(${par.len}, ${par.len}, sizeof(int), _MOV((int[${par.len}]){')
for val in par {
- g.write('$val,')
+ g.write('${val},')
}
g.write('})),')
} else {
@@ -509,26 +509,26 @@ fn (mut g Gen) sql_gen_where_data(where_expr ast.Expr) {
g.write(',')
if kinds.len > 0 {
- g.write('.kinds = new_array_from_c_array($kinds.len, $kinds.len, sizeof(orm__OperationKind),')
- g.write(' _MOV((orm__OperationKind[$kinds.len]){')
+ g.write('.kinds = new_array_from_c_array(${kinds.len}, ${kinds.len}, sizeof(orm__OperationKind),')
+ g.write(' _MOV((orm__OperationKind[${kinds.len}]){')
for k in kinds {
- g.write('$k,')
+ g.write('${k},')
}
g.write('})')
} else {
- g.write('.kinds = __new_array_with_default_noscan($kinds.len, $kinds.len, sizeof(orm__OperationKind), 0')
+ g.write('.kinds = __new_array_with_default_noscan(${kinds.len}, ${kinds.len}, sizeof(orm__OperationKind), 0')
}
g.write('),')
if is_and.len > 0 {
- g.write('.is_and = new_array_from_c_array($is_and.len, $is_and.len, sizeof(bool),')
- g.write(' _MOV((bool[$is_and.len]){')
+ g.write('.is_and = new_array_from_c_array(${is_and.len}, ${is_and.len}, sizeof(bool),')
+ g.write(' _MOV((bool[${is_and.len}]){')
for b in is_and {
- g.write('$b, ')
+ g.write('${b}, ')
}
g.write('})')
} else {
- g.write('.is_and = __new_array_with_default_noscan($is_and.len, $is_and.len, sizeof(bool), 0')
+ g.write('.is_and = __new_array_with_default_noscan(${is_and.len}, ${is_and.len}, sizeof(bool), 0')
}
g.write('),}')
}
@@ -538,7 +538,7 @@ fn (mut g Gen) sql_select_expr(node ast.SqlExpr) {
conn := g.new_tmp_var()
g.writeln('')
g.writeln('// orm')
- g.write('orm__Connection $conn = (orm__Connection){._')
+ g.write('orm__Connection ${conn} = (orm__Connection){._')
mut fn_prefix := ''
typ := g.parse_db_type(node.db_expr)
match typ {
@@ -552,11 +552,11 @@ fn (mut g Gen) sql_select_expr(node ast.SqlExpr) {
fn_prefix = 'pg__DB'
}
else {
- verror('This database type `$typ` is not implemented yet in orm') // TODO add better error
+ verror('This database type `${typ}` is not implemented yet in orm') // TODO add better error
}
}
- g.write('$fn_prefix = &')
+ g.write('${fn_prefix} = &')
g.expr(node.db_expr)
g.writeln(', ._typ = _orm__Connection_${fn_prefix}_index};')
g.sql_select(node, conn, left, node.or_expr)
@@ -583,12 +583,12 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
res := g.new_tmp_var()
table_name := g.get_table_name(node.table_expr)
g.sql_table_name = g.table.sym(node.table_expr.typ).name
- g.write('${result_name}_Array_Array_orm__Primitive _o$res = orm__Connection_name_table[${expr}._typ]._method_select(${expr}._object, ')
+ g.write('${result_name}_Array_Array_orm__Primitive _o${res} = orm__Connection_name_table[${expr}._typ]._method_select(${expr}._object, ')
g.write('(orm__SelectConfig){')
- g.write('.table = _SLIT("$table_name"),')
- g.write('.is_count = $node.is_count,')
- g.write('.has_where = $node.has_where,')
- g.write('.has_order = $node.has_order,')
+ g.write('.table = _SLIT("${table_name}"),')
+ g.write('.is_count = ${node.is_count},')
+ g.write('.has_where = ${node.has_where},')
+ g.write('.has_order = ${node.has_order},')
if node.has_order {
g.write('.order = _SLIT("')
g.expr(node.order_expr)
@@ -599,16 +599,16 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
g.write('.order_type = orm__OrderType__asc,')
}
}
- g.write('.has_limit = $node.has_limit,')
- g.write('.has_offset = $node.has_offset,')
+ g.write('.has_limit = ${node.has_limit},')
+ g.write('.has_offset = ${node.has_offset},')
if prim != '' {
- g.write('.primary = _SLIT("$prim"),')
+ g.write('.primary = _SLIT("${prim}"),')
}
select_fields := fields.filter(g.table.sym(it.typ).kind != .array)
- g.write('.fields = new_array_from_c_array($select_fields.len, $select_fields.len, sizeof(string),')
+ g.write('.fields = new_array_from_c_array(${select_fields.len}, ${select_fields.len}, sizeof(string),')
mut types := []int{}
if select_fields.len > 0 {
- g.write(' _MOV((string[$select_fields.len]){')
+ g.write(' _MOV((string[${select_fields.len}]){')
for field in select_fields {
g.write('_SLIT("${g.get_field_name(field)}"),')
sym := g.table.sym(field.typ)
@@ -627,11 +627,11 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
g.write('NULL')
}
g.write('),')
- g.write('.types = new_array_from_c_array($types.len, $types.len, sizeof(int),')
+ g.write('.types = new_array_from_c_array(${types.len}, ${types.len}, sizeof(int),')
if types.len > 0 {
- g.write(' _MOV((int[$types.len]){')
+ g.write(' _MOV((int[${types.len}]){')
for typ in types {
- g.write('$typ,')
+ g.write('${typ},')
}
g.write('})')
} else {
@@ -652,14 +652,14 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
g.write('.is_and = __new_array_with_default_noscan(0, 0, sizeof(bool), 0),')
g.write('.parentheses = __new_array_with_default_noscan(0, 0, sizeof(Array_int), 0),')
if exprs.len > 0 {
- g.write('.data = new_array_from_c_array($exprs.len, $exprs.len, sizeof(orm__Primitive),')
- g.write(' _MOV((orm__Primitive[$exprs.len]){')
+ g.write('.data = new_array_from_c_array(${exprs.len}, ${exprs.len}, sizeof(orm__Primitive),')
+ g.write(' _MOV((orm__Primitive[${exprs.len}]){')
for e in exprs {
g.sql_expr_to_orm_primitive(e)
}
g.write('})')
} else {
- g.write('.data = __new_array_with_default_noscan($exprs.len, $exprs.len, sizeof(orm__Primitive), 0')
+ g.write('.data = __new_array_with_default_noscan(${exprs.len}, ${exprs.len}, sizeof(orm__Primitive), 0')
}
g.write(')},')
@@ -677,7 +677,7 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
g.writeln(');')
mut tmp_left := g.new_tmp_var()
- g.writeln('${g.typ(node.typ.set_flag(.result))} $tmp_left;')
+ g.writeln('${g.typ(node.typ.set_flag(.result))} ${tmp_left};')
if node.or_expr.kind == .block {
g.writeln('${tmp_left}.is_error = _o${res}.is_error;')
@@ -687,10 +687,10 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
g.indent++
}
- g.writeln('Array_Array_orm__Primitive $res = (*(Array_Array_orm__Primitive*)_o${res}.data);')
+ g.writeln('Array_Array_orm__Primitive ${res} = (*(Array_Array_orm__Primitive*)_o${res}.data);')
if node.is_count {
- g.writeln('*(${g.typ(node.typ)}*) ${tmp_left}.data = *((*(orm__Primitive*) array_get((*(Array_orm__Primitive*)array_get($res, 0)), 0))._int);')
+ g.writeln('*(${g.typ(node.typ)}*) ${tmp_left}.data = *((*(orm__Primitive*) array_get((*(Array_orm__Primitive*)array_get(${res}, 0)), 0))._int);')
if node.or_expr.kind == .block {
g.indent--
g.writeln('}')
@@ -699,15 +699,15 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
tmp := g.new_tmp_var()
styp := g.typ(node.typ)
idx := g.new_tmp_var()
- g.writeln('int $idx = 0;')
+ g.writeln('int ${idx} = 0;')
mut typ_str := ''
if node.is_array {
info := g.table.sym(node.typ).array_info()
typ_str = g.typ(info.elem_type)
- g.writeln('$styp ${tmp}_array = __new_array(0, ${res}.len, sizeof($typ_str));')
- g.writeln('for (; $idx < ${res}.len; $idx++) {')
+ g.writeln('${styp} ${tmp}_array = __new_array(0, ${res}.len, sizeof(${typ_str}));')
+ g.writeln('for (; ${idx} < ${res}.len; ${idx}++) {')
g.indent++
- g.write('$typ_str $tmp = ($typ_str) {')
+ g.write('${typ_str} ${tmp} = (${typ_str}) {')
inf := g.table.sym(info.elem_type).struct_info()
for i, field in inf.fields {
g.zero_struct_field(field)
@@ -717,7 +717,7 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
}
g.writeln('};')
} else {
- g.write('$styp $tmp = ($styp){')
+ g.write('${styp} ${tmp} = (${styp}){')
info := g.table.sym(node.typ).struct_info()
for i, field in info.fields {
g.zero_struct_field(field)
@@ -731,7 +731,7 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
g.writeln('if (${res}.len > 0) {')
g.indent++
for i, field in fields {
- sel := '(*(orm__Primitive*) array_get((*(Array_orm__Primitive*) array_get($res, $idx)), $i))'
+ sel := '(*(orm__Primitive*) array_get((*(Array_orm__Primitive*) array_get(${res}, ${idx})), ${i}))'
sym := g.table.sym(field.typ)
if sym.kind == .struct_ && sym.name != 'time.Time' {
mut sub := node.sub_structs[int(field.typ)]
@@ -748,7 +748,7 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
where_expr.right = ident
sub.where_expr = where_expr
- g.sql_select(sub, expr, '${tmp}.$field.name = ', or_expr)
+ g.sql_select(sub, expr, '${tmp}.${field.name} = ', or_expr)
} else if sym.kind == .array {
mut fkey := ''
for attr in field.attrs {
@@ -756,7 +756,7 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
if attr.kind == .string {
fkey = attr.arg
} else {
- verror("fkey attribute need be string. Try [fkey: '$attr.arg'] instead of [fkey: $attr.arg]")
+ verror("fkey attribute need be string. Try [fkey: '${attr.arg}'] instead of [fkey: ${attr.arg}]")
}
}
}
@@ -800,22 +800,22 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
where_expr: where_expr
}
- g.sql_select(arr, expr, '${tmp}.$field.name = ', or_expr)
+ g.sql_select(arr, expr, '${tmp}.${field.name} = ', or_expr)
} else {
mut typ := sym.cname
- g.writeln('${tmp}.$field.name = *(${sel}._$typ);')
+ g.writeln('${tmp}.${field.name} = *(${sel}._${typ});')
}
}
g.indent--
g.writeln('}')
if node.is_array {
- g.writeln('array_push(&${tmp}_array, _MOV(($typ_str[]){ $tmp }));')
+ g.writeln('array_push(&${tmp}_array, _MOV((${typ_str}[]){ ${tmp} }));')
g.indent--
g.writeln('}')
}
- g.write('*(${g.typ(node.typ)}*) ${tmp_left}.data = $tmp')
+ g.write('*(${g.typ(node.typ)}*) ${tmp_left}.data = ${tmp}')
if node.is_array {
g.write('_array')
}
@@ -825,7 +825,7 @@ fn (mut g Gen) sql_select(node ast.SqlExpr, expr string, left string, or_expr as
g.writeln('}')
}
}
- g.write('$left *(${g.typ(node.typ)}*) ${tmp_left}.data')
+ g.write('${left} *(${g.typ(node.typ)}*) ${tmp_left}.data')
if !g.inside_call {
g.writeln(';')
}
diff --git a/vlib/v/gen/c/str.v b/vlib/v/gen/c/str.v
index e301b29879..bc646e1917 100644
--- a/vlib/v/gen/c/str.v
+++ b/vlib/v/gen/c/str.v
@@ -8,9 +8,9 @@ import v.util
fn (mut g Gen) string_literal(node ast.StringLiteral) {
escaped_val := cescape_nonascii(util.smart_quote(node.val, node.is_raw))
if node.language == .c {
- g.write('"$escaped_val"')
+ g.write('"${escaped_val}"')
} else {
- g.write('_SLIT("$escaped_val")')
+ g.write('_SLIT("${escaped_val}")')
}
}
@@ -102,7 +102,7 @@ fn (mut g Gen) gen_expr_to_string(expr ast.Expr, etype ast.Type) {
is_var_mut := expr.is_auto_deref_var()
str_fn_name := g.get_str_fn(typ)
if is_ptr && !is_var_mut {
- g.write('str_intp(1, _MOV((StrIntpData[]){{_SLIT("&"), $si_s_code ,{.d_s = isnil(')
+ g.write('str_intp(1, _MOV((StrIntpData[]){{_SLIT("&"), ${si_s_code} ,{.d_s = isnil(')
g.expr(expr)
g.write(') ? _SLIT("nil") : ')
}
@@ -116,7 +116,7 @@ fn (mut g Gen) gen_expr_to_string(expr ast.Expr, etype ast.Type) {
if expr.is_fixed {
s := g.typ(expr.typ)
if !expr.has_it {
- g.write('($s)')
+ g.write('(${s})')
}
}
}
diff --git a/vlib/v/gen/c/str_intp.v b/vlib/v/gen/c/str_intp.v
index d88eddb949..b8c3f967de 100644
--- a/vlib/v/gen/c/str_intp.v
+++ b/vlib/v/gen/c/str_intp.v
@@ -24,7 +24,7 @@ fn (mut g Gen) str_format(node ast.StringInterLiteral, i int) (u64, string) {
mut remove_tail_zeros := false
fspec := node.fmts[i]
mut fmt_type := StrIntpType{}
- g.write('/*$fspec $sym*/')
+ g.write('/*${fspec} ${sym}*/')
// upper cases
if (fspec - `A`) <= (`Z` - `A`) {
upper_case = true
@@ -209,13 +209,13 @@ fn (mut g Gen) string_inter_literal(node ast.StringInterLiteral) {
}
}
}
- g.write(' str_intp($node.vals.len, ')
+ g.write(' str_intp(${node.vals.len}, ')
g.write('_MOV((StrIntpData[]){')
for i, val in node.vals {
escaped_val := util.smart_quote(val, false)
if escaped_val.len > 0 {
- g.write('{_SLIT("$escaped_val"), ')
+ g.write('{_SLIT("${escaped_val}"), ')
} else {
g.write('{_SLIT0, ')
}
@@ -227,7 +227,7 @@ fn (mut g Gen) string_inter_literal(node ast.StringInterLiteral) {
}
ft_u64, ft_str := g.str_format(node, i)
- g.write('0x$ft_u64.hex(), {.d_$ft_str = ')
+ g.write('0x${ft_u64.hex()}, {.d_${ft_str} = ')
// for pointers we need a void* cast
if unsafe { ft_str.str[0] } == `p` {
diff --git a/vlib/v/gen/c/struct.v b/vlib/v/gen/c/struct.v
index 505183e061..31065b24b5 100644
--- a/vlib/v/gen/c/struct.v
+++ b/vlib/v/gen/c/struct.v
@@ -16,7 +16,7 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
s := g.go_before_stmt(0)
styp := g.typ(node.update_expr_type)
g.empty_line = true
- g.write('$styp $tmp_update_var = ')
+ g.write('${styp} ${tmp_update_var} = ')
g.expr(node.update_expr)
g.writeln(';')
g.empty_line = false
@@ -49,25 +49,25 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
}
}
if is_anon {
- g.writeln('($styp){')
+ g.writeln('(${styp}){')
} else if g.is_shared && !g.inside_opt_data && !g.is_arraymap_set {
mut shared_typ := node.typ.set_flag(.shared_f)
shared_styp = g.typ(shared_typ)
- g.writeln('($shared_styp*)__dup${shared_styp}(&($shared_styp){.mtx = {0}, .val =($styp){')
+ g.writeln('(${shared_styp}*)__dup${shared_styp}(&(${shared_styp}){.mtx = {0}, .val =(${styp}){')
} else if is_amp || g.inside_cast_in_heap > 0 {
- g.write('($styp*)memdup(&($styp){')
+ g.write('(${styp}*)memdup(&(${styp}){')
} else if node.typ.is_ptr() {
basetyp := g.typ(node.typ.set_nr_muls(0))
if is_multiline {
- g.writeln('&($basetyp){')
+ g.writeln('&(${basetyp}){')
} else {
- g.write('&($basetyp){')
+ g.write('&(${basetyp}){')
}
} else {
if is_multiline {
- g.writeln('($styp){')
+ g.writeln('(${styp}){')
} else {
- g.write('($styp){')
+ g.write('(${styp}){')
}
}
mut inited_fields := map[string]int{}
@@ -132,7 +132,7 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
inside_cast_in_heap := g.inside_cast_in_heap
g.inside_cast_in_heap = 0 // prevent use of pointers in child structs
- g.write('.$embed_name = ')
+ g.write('.${embed_name} = ')
g.struct_init(default_init)
g.inside_cast_in_heap = inside_cast_in_heap // restore value for further struct inits
@@ -182,7 +182,7 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
}
field_name := c_name(field.name)
if field.typ.has_flag(.optional) || field.typ.has_flag(.result) {
- g.write('.$field_name = {EMPTY_STRUCT_INITIALIZATION},')
+ g.write('.${field_name} = {EMPTY_STRUCT_INITIALIZATION},')
initialized = true
continue
}
@@ -190,7 +190,7 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
continue
}
if node.has_update_expr {
- g.write('.$field_name = ')
+ g.write('.${field_name} = ')
if is_update_tmp_var {
g.write(tmp_update_var)
} else {
@@ -250,9 +250,9 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
g.write('}')
if g.is_shared && !g.inside_opt_data && !g.is_arraymap_set {
- g.write('}, sizeof($shared_styp))')
+ g.write('}, sizeof(${shared_styp}))')
} else if is_amp || g.inside_cast_in_heap > 0 {
- g.write(', sizeof($styp))')
+ g.write(', sizeof(${styp}))')
}
}
@@ -265,7 +265,7 @@ fn (mut g Gen) zero_struct_field(field ast.StructField) bool {
}
}
field_name := if sym.language == .v { c_name(field.name) } else { field.name }
- g.write('.$field_name = ')
+ g.write('.${field_name} = ')
if field.has_default_expr {
if sym.kind in [.sum_type, .interface_] {
g.expr_with_cast(field.default_expr, field.default_expr_typ, field.typ)
@@ -298,7 +298,7 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
return
}
if name.contains('_T_') {
- g.typedefs.writeln('typedef struct $name $name;')
+ g.typedefs.writeln('typedef struct ${name} ${name};')
}
// TODO avoid buffer manip
start_pos := g.type_definitions.len
@@ -309,7 +309,7 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
for attr in s.attrs {
match attr.name {
'_pack' {
- pre_pragma += '#pragma pack(push, $attr.arg)\n'
+ pre_pragma += '#pragma pack(push, ${attr.arg})\n'
post_pragma += '#pragma pack(pop)'
}
'packed' {
@@ -324,12 +324,12 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
g.type_definitions.writeln(pre_pragma)
if is_anon {
- g.type_definitions.write_string('\t$name ')
+ g.type_definitions.write_string('\t${name} ')
return
} else if s.is_union {
- g.type_definitions.writeln('union $name {')
+ g.type_definitions.writeln('union ${name} {')
} else {
- g.type_definitions.writeln('struct $name {')
+ g.type_definitions.writeln('struct ${name} {')
}
if s.fields.len > 0 || s.embeds.len > 0 {
for field in s.fields {
@@ -348,7 +348,7 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
g.done_optionals << base
last_text := g.type_definitions.after(start_pos).clone()
g.type_definitions.go_back_to(start_pos)
- g.typedefs.writeln('typedef struct $styp $styp;')
+ g.typedefs.writeln('typedef struct ${styp} ${styp};')
g.type_definitions.writeln('${g.optional_type_text(styp, base)};')
g.type_definitions.write_string(last_text)
}
@@ -371,7 +371,7 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
bits_needed++
l >>= 1
}
- size_suffix = ' : $bits_needed'
+ size_suffix = ' : ${bits_needed}'
}
}
}
@@ -384,11 +384,11 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
// Recursively generate code for this anon struct (this is the field's type)
g.struct_decl(field_sym.info, field_sym.cname, true)
// Now the field's name
- g.type_definitions.writeln(' $field_name$size_suffix;')
+ g.type_definitions.writeln(' ${field_name}${size_suffix};')
}
}
if !field_is_anon {
- g.type_definitions.writeln('\t$volatile_prefix$type_name $field_name$size_suffix;')
+ g.type_definitions.writeln('\t${volatile_prefix}${type_name} ${field_name}${size_suffix};')
}
}
} else {
@@ -401,7 +401,7 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
} else {
''
}
- g.type_definitions.write_string('}$ti_attrs')
+ g.type_definitions.write_string('}${ti_attrs}')
if !is_anon {
g.type_definitions.write_string(';')
}
@@ -411,7 +411,7 @@ fn (mut g Gen) struct_decl(s ast.Struct, name string, is_anon bool) {
fn (mut g Gen) struct_init_field(sfield ast.StructInitField, language ast.Language) {
field_name := if language == .v { c_name(sfield.name) } else { sfield.name }
- g.write('.$field_name = ')
+ g.write('.${field_name} = ')
field_type_sym := g.table.sym(sfield.typ)
mut cloned := false
if g.is_autofree && !sfield.typ.is_ptr() && field_type_sym.kind in [.array, .string] {
@@ -429,7 +429,7 @@ fn (mut g Gen) struct_init_field(sfield ast.StructInitField, language ast.Langua
g.write('{')
for i in 0 .. fixed_array_info.size {
g.expr(sfield.expr)
- g.write('[$i]')
+ g.write('[${i}]')
if i != fixed_array_info.size - 1 {
g.write(', ')
}
diff --git a/vlib/v/gen/c/text_manipulation.v b/vlib/v/gen/c/text_manipulation.v
index 2394423544..cf41b4ee7d 100644
--- a/vlib/v/gen/c/text_manipulation.v
+++ b/vlib/v/gen/c/text_manipulation.v
@@ -8,9 +8,9 @@ import v.util
fn (mut g Gen) write(s string) {
$if trace_gen ? {
if g.file == unsafe { nil } {
- eprintln('gen file: | last_fn_c_name: ${g.last_fn_c_name:-45} | write: $s')
+ eprintln('gen file: | last_fn_c_name: ${g.last_fn_c_name:-45} | write: ${s}')
} else {
- eprintln('gen file: ${g.file.path:-30} | last_fn_c_name: ${g.last_fn_c_name:-45} | write: $s')
+ eprintln('gen file: ${g.file.path:-30} | last_fn_c_name: ${g.last_fn_c_name:-45} | write: ${s}')
}
}
if g.indent > 0 && g.empty_line {
@@ -25,9 +25,9 @@ fn (mut g Gen) write(s string) {
fn (mut g Gen) writeln(s string) {
$if trace_gen ? {
if g.file == unsafe { nil } {
- eprintln('gen file: | last_fn_c_name: ${g.last_fn_c_name:-45} | writeln: $s')
+ eprintln('gen file: | last_fn_c_name: ${g.last_fn_c_name:-45} | writeln: ${s}')
} else {
- eprintln('gen file: ${g.file.path:-30} | last_fn_c_name: ${g.last_fn_c_name:-45} | writeln: $s')
+ eprintln('gen file: ${g.file.path:-30} | last_fn_c_name: ${g.last_fn_c_name:-45} | writeln: ${s}')
}
}
if g.indent > 0 && g.empty_line {
diff --git a/vlib/v/gen/c/utils.v b/vlib/v/gen/c/utils.v
index d61d2e213d..bac579a438 100644
--- a/vlib/v/gen/c/utils.v
+++ b/vlib/v/gen/c/utils.v
@@ -76,7 +76,7 @@ fn (mut g Gen) unwrap(typ ast.Type) Type {
// generate function variable definition, e.g. `void (*var_name) (int, string)`
fn (mut g Gen) fn_var_signature(return_type ast.Type, arg_types []ast.Type, var_name string) string {
ret_styp := g.typ(return_type)
- mut sig := '$ret_styp (*${c_name(var_name)}) ('
+ mut sig := '${ret_styp} (*${c_name(var_name)}) ('
for j, arg_typ in arg_types {
arg_sym := g.table.sym(arg_typ)
if arg_sym.info is ast.FnType {
@@ -98,11 +98,11 @@ fn (mut g Gen) fn_var_signature(return_type ast.Type, arg_types []ast.Type, var_
// escape quotes for string
fn escape_quotes(val string) string {
bs := '\\'
- unescaped_val := val.replace('$bs$bs', '\x01').replace_each([
- "$bs'",
+ unescaped_val := val.replace('${bs}${bs}', '\x01').replace_each([
+ "${bs}'",
"'",
- '$bs"',
+ '${bs}"',
'"',
])
- return unescaped_val.replace_each(['\x01', '$bs$bs', "'", "$bs'", '"', '$bs"'])
+ return unescaped_val.replace_each(['\x01', '${bs}${bs}', "'", "${bs}'", '"', '${bs}"'])
}
diff --git a/vlib/v/gen/golang/attrs.v b/vlib/v/gen/golang/attrs.v
index ec7e2af663..d544ee2cd8 100644
--- a/vlib/v/gen/golang/attrs.v
+++ b/vlib/v/gen/golang/attrs.v
@@ -17,7 +17,7 @@ pub fn (mut f Gen) attrs(attrs []ast.Attr) {
f.single_line_attrs(sorted_attrs[i..])
break
}
- f.writeln('[$attr]')
+ f.writeln('[${attr}]')
}
}
@@ -40,7 +40,7 @@ pub fn (mut f Gen) single_line_attrs(attrs []ast.Attr, options AttrsOptions) {
if i > 0 {
f.write('; ')
}
- f.write('$attr')
+ f.write('${attr}')
}
f.write(']')
if !options.inline {
@@ -57,7 +57,7 @@ fn inline_attrs_len(attrs []ast.Attr) int {
if i > 0 {
n += 2 // '; '.len
}
- n += '$attr'.len
+ n += '${attr}'.len
}
n++ // ']'.len
return n
diff --git a/vlib/v/gen/golang/golang.v b/vlib/v/gen/golang/golang.v
index a098cde1c2..a36c4e6621 100644
--- a/vlib/v/gen/golang/golang.v
+++ b/vlib/v/gen/golang/golang.v
@@ -64,7 +64,7 @@ pub fn gen(files []&ast.File, table &ast.Table, out_file string, pref &pref.Pref
g.stmts(file.stmts)
}
os.write_file(out_file, g.out.str()) or { panic(err) }
- os.system('go fmt "$out_file"')
+ os.system('go fmt "${out_file}"')
return g.nlines, 0 // g.buf.len
}
@@ -72,11 +72,11 @@ pub fn (mut f Gen) process_file_imports(file &ast.File) {
for imp in file.imports {
f.mod2alias[imp.mod] = imp.alias
for sym in imp.syms {
- f.mod2alias['${imp.mod}.$sym.name'] = sym.name
- f.mod2alias['${imp.mod.all_after_last('.')}.$sym.name'] = sym.name
+ f.mod2alias['${imp.mod}.${sym.name}'] = sym.name
+ f.mod2alias['${imp.mod.all_after_last('.')}.${sym.name}'] = sym.name
f.mod2alias[sym.name] = sym.name
- f.mod2syms['${imp.mod}.$sym.name'] = sym.name
- f.mod2syms['${imp.mod.all_after_last('.')}.$sym.name'] = sym.name
+ f.mod2syms['${imp.mod}.${sym.name}'] = sym.name
+ f.mod2syms['${imp.mod.all_after_last('.')}.${sym.name}'] = sym.name
f.mod2syms[sym.name] = sym.name
f.import_syms_used[sym.name] = false
}
@@ -209,7 +209,7 @@ pub fn (mut f Gen) short_module(name string) string {
mut res := '${f.short_module(generic_levels[0])}'
for i in 1 .. generic_levels.len {
genshorts := generic_levels[i].split(', ').map(f.short_module(it)).join(', ')
- res += '<$genshorts'
+ res += '<${genshorts}'
}
res += '>'
return res
@@ -231,9 +231,9 @@ pub fn (mut f Gen) short_module(name string) string {
}
}
if aname == '' {
- return '$tprefix$symname'
+ return '${tprefix}${symname}'
}
- return '$tprefix${aname}.$symname'
+ return '${tprefix}${aname}.${symname}'
}
//=== Import-related methods ===//
@@ -304,7 +304,7 @@ pub fn (mut f Gen) imports(imports []ast.Import) {
pub fn (f Gen) imp_stmt_str(imp ast.Import) string {
mod := if imp.mod.len == 0 { imp.alias } else { imp.mod }
is_diff := imp.alias != mod && !mod.ends_with('.' + imp.alias)
- mut imp_alias_suffix := if is_diff { ' as $imp.alias' } else { '' }
+ mut imp_alias_suffix := if is_diff { ' as ${imp.alias}' } else { '' }
mut syms := imp.syms.map(it.name).filter(f.import_syms_used[it])
syms.sort()
if syms.len > 0 {
@@ -314,7 +314,7 @@ pub fn (f Gen) imp_stmt_str(imp ast.Import) string {
' {\n\t' + syms.join(',\n\t') + ',\n}'
}
}
- return '$mod$imp_alias_suffix'
+ return '${mod}${imp_alias_suffix}'
}
//=== Node helpers ===//
@@ -550,7 +550,7 @@ pub fn (mut f Gen) expr(node_ ast.Expr) {
f.concat_expr(node)
}
ast.CTempVar {
- eprintln('ast.CTempVar of $node.orig.str() should be generated/used only in cgen')
+ eprintln('ast.CTempVar of ${node.orig.str()} should be generated/used only in cgen')
}
ast.DumpExpr {
f.dump_expr(node)
@@ -728,7 +728,7 @@ pub fn (mut f Gen) assign_stmt(node ast.AssignStmt) {
}
}
f.is_assign = true
- f.write(' $node.op.str() ')
+ f.write(' ${node.op.str()} ')
for i, val in node.right {
f.expr(val)
if i < node.right.len - 1 {
@@ -759,7 +759,7 @@ pub fn (mut f Gen) branch_stmt(node ast.BranchStmt) {
pub fn (mut f Gen) comptime_for(node ast.ComptimeFor) {
typ := f.no_cur_mod(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
- f.write('\$for $node.val_var in ${typ}.$node.kind.str() {')
+ f.write('\$for ${node.val_var} in ${typ}.${node.kind.str()} {')
f.mark_types_import_as_used(node.typ)
if node.stmts.len > 0 || node.pos.line_nr < node.pos.last_line {
f.writeln('')
@@ -822,7 +822,7 @@ pub fn (mut f Gen) const_decl(node ast.ConstDecl) {
f.writeln('')
}
name := field.name.after('.')
- f.write('$name ')
+ f.write('${name} ')
f.write(strings.repeat(` `, align_infos[align_idx].max - field.name.len))
f.write('= ')
f.expr(field.expr)
@@ -874,12 +874,12 @@ pub fn (mut f Gen) enum_decl(node ast.EnumDecl) {
}
name := node.name.after('.')
if node.fields.len == 0 && node.pos.line_nr == node.pos.last_line {
- f.writeln('enum $name {}\n')
+ f.writeln('enum ${name} {}\n')
return
}
- f.writeln('enum $name {')
+ f.writeln('enum ${name} {')
for field in node.fields {
- f.write('\t$field.name')
+ f.write('\t${field.name}')
if field.has_expr {
f.write(' = ')
f.expr(field.expr)
@@ -927,7 +927,7 @@ fn (mut f Gen) fn_body(node ast.FnDecl) {
pub fn (mut f Gen) for_c_stmt(node ast.ForCStmt) {
if node.label.len > 0 {
- f.write('$node.label: ')
+ f.write('${node.label}: ')
}
f.write('for ')
if node.has_init {
@@ -950,7 +950,7 @@ pub fn (mut f Gen) for_c_stmt(node ast.ForCStmt) {
pub fn (mut f Gen) for_in_stmt(node ast.ForInStmt) {
if node.label.len > 0 {
- f.write('$node.label: ')
+ f.write('${node.label}: ')
}
f.write('for ')
if node.key_var != '' {
@@ -981,7 +981,7 @@ pub fn (mut f Gen) for_in_stmt(node ast.ForInStmt) {
pub fn (mut f Gen) for_stmt(node ast.ForStmt) {
if node.label.len > 0 {
- f.write('$node.label: ')
+ f.write('${node.label}: ')
}
f.write('for ')
f.expr(node.cond)
@@ -1018,7 +1018,7 @@ pub fn (mut f Gen) global_decl(node ast.GlobalDecl) {
}
}
for field in node.fields {
- f.write('$field.name ')
+ f.write('${field.name} ')
f.write(strings.repeat(` `, max - field.name.len))
if field.has_expr {
f.write('= ')
@@ -1045,11 +1045,11 @@ pub fn (mut f Gen) go_expr(node ast.GoExpr) {
}
pub fn (mut f Gen) goto_label(node ast.GotoLabel) {
- f.writeln('$node.name:')
+ f.writeln('${node.name}:')
}
pub fn (mut f Gen) goto_stmt(node ast.GotoStmt) {
- f.writeln('goto $node.name')
+ f.writeln('goto ${node.name}')
}
pub fn (mut f Gen) hash_stmt(node ast.HashStmt) {
@@ -1071,7 +1071,7 @@ pub fn (mut f Gen) interface_decl(node ast.InterfaceDecl) {
f.writeln('')
}
for embed in node.embeds {
- f.write('\t$embed.name')
+ f.write('\t${embed.name}')
f.writeln('')
}
immut_fields := if node.mut_pos < 0 { node.fields } else { node.fields[..node.mut_pos] }
@@ -1109,7 +1109,7 @@ pub fn (mut f Gen) interface_decl(node ast.InterfaceDecl) {
pub fn (mut f Gen) interface_field(field ast.StructField) {
mut ft := f.no_cur_mod(f.table.type_to_str_using_aliases(field.typ, f.mod2alias))
// end_pos := field.pos.pos + field.pos.len
- f.write('\t$field.name $ft')
+ f.write('\t${field.name} ${ft}')
f.writeln('')
f.mark_types_import_as_used(field.typ)
}
@@ -1130,7 +1130,7 @@ pub fn (mut f Gen) module_stmt(mod ast.Module) {
return
}
f.attrs(mod.attrs)
- f.writeln('package $mod.short_name\n')
+ f.writeln('package ${mod.short_name}\n')
if f.import_pos == 0 {
f.import_pos = f.out.len
}
@@ -1173,12 +1173,12 @@ pub fn (mut f Gen) sql_stmt_line(node ast.SqlStmtLine) {
f.write('\t')
match node.kind {
.insert {
- f.writeln('insert $node.object_var_name into $table_name')
+ f.writeln('insert ${node.object_var_name} into ${table_name}')
}
.update {
- f.write('update $table_name set ')
+ f.write('update ${table_name} set ')
for i, col in node.updated_columns {
- f.write('$col = ')
+ f.write('${col} = ')
f.expr(node.update_exprs[i])
if i < node.updated_columns.len - 1 {
f.write(', ')
@@ -1192,15 +1192,15 @@ pub fn (mut f Gen) sql_stmt_line(node ast.SqlStmtLine) {
f.writeln('')
}
.delete {
- f.write('delete from $table_name where ')
+ f.write('delete from ${table_name} where ')
f.expr(node.where_expr)
f.writeln('')
}
.create {
- f.writeln('create table $table_name')
+ f.writeln('create table ${table_name}')
}
.drop {
- f.writeln('drop table $table_name')
+ f.writeln('drop table ${table_name}')
}
}
}
@@ -1219,7 +1219,7 @@ pub fn (mut f Gen) alias_type_decl(node ast.AliasTypeDecl) {
f.write('pub ')
}
ptype := f.table.type_to_str_using_aliases(node.parent_type, f.mod2alias)
- f.write('type $node.name = $ptype')
+ f.write('type ${node.name} = ${ptype}')
f.mark_types_import_as_used(node.parent_type)
}
@@ -1233,7 +1233,7 @@ pub fn (mut f Gen) fn_type_decl(node ast.FnTypeDecl) {
fn_typ_info := typ_sym.info as ast.FnType
fn_info := fn_typ_info.func
fn_name := f.no_cur_mod(node.name)
- f.write('type $fn_name = fn (')
+ f.write('type ${fn_name} = fn (')
for i, arg in fn_info.params {
if arg.is_mut {
f.write(arg.typ.share().str() + ' ')
@@ -1267,7 +1267,7 @@ pub fn (mut f Gen) fn_type_decl(node ast.FnTypeDecl) {
f.mark_types_import_as_used(fn_info.return_type)
ret_str := f.no_cur_mod(f.table.type_to_str_using_aliases(fn_info.return_type,
f.mod2alias))
- f.write(' $ret_str')
+ f.write(' ${ret_str}')
} else if fn_info.return_type.has_flag(.optional) {
f.write(' ?')
} else if fn_info.return_type.has_flag(.result) {
@@ -1283,7 +1283,7 @@ pub fn (mut f Gen) sum_type_decl(node ast.SumTypeDecl) {
if node.is_pub {
f.write('pub ')
}
- f.write('type $node.name')
+ f.write('type ${node.name}')
f.write_generic_types(node.generic_types)
f.write(' = ')
@@ -1339,7 +1339,7 @@ pub fn (mut f Gen) array_init(node ast.ArrayInit) {
}
// `[1,2,3]`
sym := f.table.sym(node.typ)
- f.write('$sym.name{')
+ f.write('${sym.name}{')
// mut inc_indent := false
mut last_line_nr := node.pos.line_nr // to have the same newlines between array elements
if node.pre_cmnts.len > 0 {
@@ -1375,15 +1375,15 @@ pub fn (mut f Gen) as_cast(node ast.AsCast) {
f.mark_types_import_as_used(node.typ)
type_str := f.table.type_to_str_using_aliases(node.typ, f.mod2alias)
f.expr(node.expr)
- f.write(' as $type_str')
+ f.write(' as ${type_str}')
}
pub fn (mut f Gen) assoc(node ast.Assoc) {
f.writeln('{')
f.indent++
- f.writeln('...$node.var_name')
+ f.writeln('...${node.var_name}')
for i, field in node.fields {
- f.write('$field: ')
+ f.write('${field}: ')
f.expr(node.exprs[i])
f.writeln('')
}
@@ -1421,7 +1421,7 @@ pub fn (mut f Gen) call_expr(node ast.CallExpr) {
} else {
mut name := f.short_module(node.name)
f.mark_import_as_used(name)
- f.write('$name')
+ f.write('${name}')
}
}
if node.mod == '' && node.name == '' {
@@ -1525,19 +1525,19 @@ pub fn (mut f Gen) comptime_call(node ast.ComptimeCall) {
if node.method_name == 'html' {
f.write('\$vweb.html()')
} else {
- f.write("\$tmpl('$node.args_var')")
+ f.write("\$tmpl('${node.args_var}')")
}
} else {
if node.is_embed {
if node.embed_file.compression_type == 'none' {
- f.write("\$embed_file('$node.embed_file.rpath')")
+ f.write("\$embed_file('${node.embed_file.rpath}')")
} else {
- f.write("\$embed_file('$node.embed_file.rpath', .$node.embed_file.compression_type)")
+ f.write("\$embed_file('${node.embed_file.rpath}', .${node.embed_file.compression_type})")
}
} else if node.is_env {
- f.write("\$env('$node.args_var')")
+ f.write("\$env('${node.args_var}')")
} else if node.is_pkgconfig {
- f.write("\$pkgconfig('$node.args_var')")
+ f.write("\$pkgconfig('${node.args_var}')")
} else {
inner_args := if node.args_var != '' {
node.args_var
@@ -1545,17 +1545,17 @@ pub fn (mut f Gen) comptime_call(node ast.ComptimeCall) {
node.args.map(it.str()).join(', ')
}
method_expr := if node.has_parens {
- '(${node.method_name}($inner_args))'
+ '(${node.method_name}(${inner_args}))'
} else {
- '${node.method_name}($inner_args)'
+ '${node.method_name}(${inner_args})'
}
- f.write('${node.left}.$$method_expr')
+ f.write('${node.left}.$${method_expr}')
}
}
}
pub fn (mut f Gen) comptime_selector(node ast.ComptimeSelector) {
- f.write('${node.left}.\$($node.field_expr)')
+ f.write('${node.left}.\$(${node.field_expr})')
}
pub fn (mut f Gen) concat_expr(node ast.ConcatExpr) {
@@ -1720,7 +1720,7 @@ pub fn (mut f Gen) infix_expr(node ast.InfixExpr) {
} else if is_array_push {
f.write(' = ')
} else {
- f.write(' $node.op.str() ')
+ f.write(' ${node.op.str()} ')
}
if is_one_val_array_init {
// `var in [val]` => `var == val`
@@ -1770,20 +1770,20 @@ fn split_up_infix(infix_str string, ignore_paren bool, is_cond_infix bool) ([]st
for p in parts {
if is_cond_infix && p in ['&&', '||'] {
if inside_paren {
- conditions[ind] += '$p '
+ conditions[ind] += '${p} '
} else {
pen := if p == '||' { or_pen } else { 5 }
penalties << pen
- conditions << '$p '
+ conditions << '${p} '
ind++
}
} else if !is_cond_infix && p == '+' {
penalties << 5
- conditions[ind] += '$p '
+ conditions[ind] += '${p} '
conditions << ''
ind++
} else {
- conditions[ind] += '$p '
+ conditions[ind] += '${p} '
if ignore_paren {
continue
}
@@ -1993,7 +1993,7 @@ pub fn (mut f Gen) match_expr(node ast.MatchExpr) {
}
pub fn (mut f Gen) offset_of(node ast.OffsetOf) {
- f.write('__offsetof(${f.table.type_to_str_using_aliases(node.struct_type, f.mod2alias)}, $node.field)')
+ f.write('__offsetof(${f.table.type_to_str_using_aliases(node.struct_type, f.mod2alias)}, ${node.field})')
f.mark_types_import_as_used(node.struct_type)
}
@@ -2052,7 +2052,7 @@ pub fn (mut f Gen) postfix_expr(node ast.PostfixExpr) {
if node.op == .question {
f.write(' ?')
} else {
- f.write('$node.op')
+ f.write('${node.op}')
}
if node.is_c2v_prefix {
f.write('$')
@@ -2166,7 +2166,7 @@ pub fn (mut f Gen) sql_expr(node ast.SqlExpr) {
}
}
}
- f.write('from $table_name')
+ f.write('from ${table_name}')
if node.has_where {
f.write(' where ')
f.expr(node.where_expr)
@@ -2198,25 +2198,25 @@ pub fn (mut f Gen) char_literal(node ast.CharLiteral) {
if node.val.len == 1 {
clit := node.val[0]
if clit < 32 || clit > 127 || clit == 92 || clit == 96 {
- f.write('`\\x$clit.hex()`')
+ f.write('`\\x${clit.hex()}`')
return
}
}
- f.write('`$node.val`')
+ f.write('`${node.val}`')
}
pub fn (mut f Gen) string_literal(node ast.StringLiteral) {
if node.is_raw {
- f.write('`$node.val`')
+ f.write('`${node.val}`')
} else {
- unescaped_val := node.val.replace('$golang.bs$golang.bs', '\x01').replace_each([
- "$golang.bs'",
+ unescaped_val := node.val.replace('${golang.bs}${golang.bs}', '\x01').replace_each([
+ "${golang.bs}'",
"'",
- '$golang.bs"',
+ '${golang.bs}"',
'"',
])
- s := unescaped_val.replace_each(['\x01', '$golang.bs$golang.bs', '"', '$golang.bs"'])
- f.write('"$s"')
+ s := unescaped_val.replace_each(['\x01', '${golang.bs}${golang.bs}', '"', '${golang.bs}"'])
+ f.write('"${s}"')
}
}
@@ -2227,7 +2227,7 @@ pub fn (mut f Gen) string_inter_literal(node ast.StringInterLiteral) {
// work too different for the various exprs that are interpolated
f.write(quote)
for i, val in node.vals {
- f.write(val.replace("$golang.bs'", "'"))
+ f.write(val.replace("${golang.bs}'", "'"))
if i >= node.exprs.len {
break
}
diff --git a/vlib/v/gen/golang/struct.v b/vlib/v/gen/golang/struct.v
index 24427e01dd..03d78a50e2 100644
--- a/vlib/v/gen/golang/struct.v
+++ b/vlib/v/gen/golang/struct.v
@@ -43,7 +43,7 @@ pub fn (mut f Gen) struct_decl(node ast.StructDecl) {
for embed in node.embeds {
f.mark_types_import_as_used(embed.typ)
styp := f.table.type_to_str_using_aliases(embed.typ, f.mod2alias)
- f.writeln('\t$styp')
+ f.writeln('\t${styp}')
}
// mut field_align_i := 0
// mut comment_align_i := 0
@@ -74,7 +74,7 @@ pub fn (mut f Gen) struct_decl(node ast.StructDecl) {
}
// end_pos := field.pos.pos + field.pos.len
volatile_prefix := if field.is_volatile { 'volatile ' } else { '' }
- f.write('\t$volatile_prefix$field.name ')
+ f.write('\t${volatile_prefix}${field.name} ')
// before_len := f.line_len
// mut field_align := field_aligns[field_align_i]
// if field_align.line_nr < field.pos.line_nr {
@@ -126,15 +126,15 @@ pub fn (mut f Gen) struct_init(node ast.StructInit) {
if node.fields.len == 0 && !node.has_update_expr {
// `Foo{}` on one line if there are no fields or comments
if node.pre_comments.len == 0 {
- f.write('$name{}')
+ f.write('${name}{}')
} else {
- f.writeln('$name{')
+ f.writeln('${name}{')
f.write('}')
}
f.mark_import_as_used(name)
} else if node.no_keys {
// `Foo{1,2,3}` (short syntax )
- f.write('$name{')
+ f.write('${name}{')
f.mark_import_as_used(name)
if node.has_update_expr {
f.write('...')
@@ -157,7 +157,7 @@ pub fn (mut f Gen) struct_init(node ast.StructInit) {
single_line_fields = false
}
if !use_short_args {
- f.write('$name{')
+ f.write('${name}{')
f.mark_import_as_used(name)
if single_line_fields {
f.write(' ')
@@ -189,7 +189,7 @@ pub fn (mut f Gen) struct_init(node ast.StructInit) {
}
}
for i, field in node.fields {
- f.write('$field.name: ')
+ f.write('${field.name}: ')
f.expr(field.expr)
if single_line_fields {
if i < node.fields.len - 1 {
diff --git a/vlib/v/gen/golang/tests/golang_test.v b/vlib/v/gen/golang/tests/golang_test.v
index dfda3b7c13..cec7bd5561 100644
--- a/vlib/v/gen/golang/tests/golang_test.v
+++ b/vlib/v/gen/golang/tests/golang_test.v
@@ -36,8 +36,8 @@ fn test_golang() {
full_test_path := os.real_path(os.join_path(dir, test))
test_file_name := os.file_name(test)
relative_test_path := full_test_path.replace(vroot + '/', '')
- work_test_path := '$wrkdir/$test_file_name'
- go_out_test_path := '$wrkdir/${test_file_name}.go'
+ work_test_path := '${wrkdir}/${test_file_name}'
+ go_out_test_path := '${wrkdir}/${test_file_name}.go'
cmd := '${os.quoted_path(vexe)} -o ${os.quoted_path(go_out_test_path)} -b go ${os.quoted_path(full_test_path)}'
if is_verbose {
println(cmd)
@@ -48,28 +48,28 @@ fn test_golang() {
eprintln(bench.step_message_fail(cmd))
continue
}
- tmperrfile := '$dir/${test}.tmperr'
+ tmperrfile := '${dir}/${test}.tmperr'
go_basename := $if windows { 'go.exe' } $else { 'go' }
- res := os.execute('$go_basename run ${os.quoted_path(go_out_test_path)} 2> ${os.quoted_path(tmperrfile)}')
+ res := os.execute('${go_basename} run ${os.quoted_path(go_out_test_path)} 2> ${os.quoted_path(tmperrfile)}')
if res.exit_code != 0 {
bench.fail()
- eprintln(bench.step_message_fail('$full_test_path failed to run'))
+ eprintln(bench.step_message_fail('${full_test_path} failed to run'))
eprintln(res.output)
continue
}
- mut expected := os.read_file('$dir/${test}.out') or { panic(err) }
- errfile := '$dir/${test}.err'
+ mut expected := os.read_file('${dir}/${test}.out') or { panic(err) }
+ errfile := '${dir}/${test}.err'
if os.exists(errfile) {
- mut err_expected := os.read_file('$dir/${test}.err') or { panic(err) }
+ mut err_expected := os.read_file('${dir}/${test}.err') or { panic(err) }
err_expected = err_expected.trim_right('\r\n').replace('\r\n', '\n')
errstr := os.read_file(tmperrfile) or { panic(err) }
mut err_found := errstr.trim_right('\r\n').replace('\r\n', '\n')
if err_expected != err_found {
println(term.red('FAIL'))
println('============')
- println('stderr expected: "$err_expected" len=$err_expected.len')
+ println('stderr expected: "${err_expected}" len=${err_expected.len}')
println('============')
- println('stderr found:"$err_found" len=$err_found.len')
+ println('stderr found:"${err_found}" len=${err_found.len}')
println('============\n')
bench.fail()
continue
@@ -82,9 +82,9 @@ fn test_golang() {
if expected != found {
println(term.red('FAIL'))
println('============')
- println('expected: "$expected" len=$expected.len')
+ println('expected: "${expected}" len=${expected.len}')
println('============')
- println('found:"$found" len=$found.len')
+ println('found:"${found}" len=${found.len}')
println('============\n')
bench.fail()
continue
diff --git a/vlib/v/gen/js/array.v b/vlib/v/gen/js/array.v
index d365357706..b40e7feee4 100644
--- a/vlib/v/gen/js/array.v
+++ b/vlib/v/gen/js/array.v
@@ -269,13 +269,13 @@ fn (mut g JsGen) gen_array_sort(node ast.CallExpr) {
g.definitions.writeln('function ${compare_fn}(a,b) {')
c_condition := if comparison_type.sym.has_method('<') {
- '${g.typ(comparison_type.typ)}__lt($left_expr, $right_expr)'
+ '${g.typ(comparison_type.typ)}__lt(${left_expr}, ${right_expr})'
} else if comparison_type.unaliased_sym.has_method('<') {
- '${g.typ(comparison_type.unaliased)}__lt($left_expr, $right_expr)'
+ '${g.typ(comparison_type.unaliased)}__lt(${left_expr}, ${right_expr})'
} else {
'${left_expr}.valueOf() < ${right_expr}.valueOf()'
}
- g.definitions.writeln('\tif ($c_condition) return -1;')
+ g.definitions.writeln('\tif (${c_condition}) return -1;')
g.definitions.writeln('\telse return 1;')
g.definitions.writeln('}\n')
@@ -287,5 +287,5 @@ fn (mut g JsGen) gen_array_sort_call(node ast.CallExpr, compare_fn string) {
g.write('v_sort(')
g.expr(node.left)
g.gen_deref_ptr(node.left_type)
- g.write(',$compare_fn)')
+ g.write(',${compare_fn})')
}
diff --git a/vlib/v/gen/js/auto_eq_methods.v b/vlib/v/gen/js/auto_eq_methods.v
index 967117f44b..6a9e5d3814 100644
--- a/vlib/v/gen/js/auto_eq_methods.v
+++ b/vlib/v/gen/js/auto_eq_methods.v
@@ -89,27 +89,27 @@ fn (mut g JsGen) gen_struct_equality_fn(left_type ast.Type) string {
fn_builder.write_string('a.${field_name}.str == b.${field_name}.str')
} else if field_type.sym.kind == .sum_type && !field.typ.is_ptr() {
eq_fn := g.gen_sumtype_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_sumtype_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_sumtype_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .struct_ && !field.typ.is_ptr() {
eq_fn := g.gen_struct_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_struct_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_struct_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .array && !field.typ.is_ptr() {
eq_fn := g.gen_array_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_arr_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_arr_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .array_fixed && !field.typ.is_ptr() {
eq_fn := g.gen_fixed_array_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_arr_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_arr_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .map && !field.typ.is_ptr() {
eq_fn := g.gen_map_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_map_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_map_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .alias && !field.typ.is_ptr() {
eq_fn := g.gen_alias_equality_fn(field.typ)
- fn_builder.write_string('${eq_fn}_alias_eq(a.$field_name, b.$field_name)')
+ fn_builder.write_string('${eq_fn}_alias_eq(a.${field_name}, b.${field_name})')
} else if field_type.sym.kind == .function {
- fn_builder.write_string('a.$field_name == b.$field_name')
+ fn_builder.write_string('a.${field_name} == b.${field_name}')
} else {
// fallback to vEq for JS types or primitives.
- fn_builder.write_string('vEq(a.$field_name,b.$field_name)')
+ fn_builder.write_string('vEq(a.${field_name},b.${field_name})')
}
}
} else {
@@ -231,7 +231,7 @@ fn (mut g JsGen) gen_fixed_array_equality_fn(left_type ast.Type) string {
g.definitions.writeln(fn_builder.str())
}
fn_builder.writeln('function ${ptr_styp}_arr_eq(a,b) {')
- fn_builder.writeln('\tfor (let i = 0; i < $size; ++i) {')
+ fn_builder.writeln('\tfor (let i = 0; i < ${size}; ++i) {')
// compare every pair of elements of the two fixed arrays
if elem.sym.kind == .string {
fn_builder.writeln('\t\tif (a.arr.get(new int(i)).str != b.arr.get(new int(i)).str) {')
diff --git a/vlib/v/gen/js/auto_str_methods.v b/vlib/v/gen/js/auto_str_methods.v
index ef551c96d0..e7ccdfc51d 100644
--- a/vlib/v/gen/js/auto_str_methods.v
+++ b/vlib/v/gen/js/auto_str_methods.v
@@ -101,7 +101,7 @@ fn (mut g JsGen) final_gen_str(typ StrType) {
g.gen_str_for_thread(sym.info, styp, str_fn_name)
}
else {
- verror("could not generate string method $str_fn_name for type '$styp'")
+ verror("could not generate string method ${str_fn_name} for type '${styp}'")
}
}
}
@@ -202,14 +202,14 @@ fn (mut g JsGen) gen_str_default(sym ast.TypeSymbol, styp string, str_fn_name st
convertor = 'bool'
typename_ = 'bool'
} else {
- panic("could not generate string method for type '$styp'")
+ panic("could not generate string method for type '${styp}'")
}
g.definitions.writeln('function ${str_fn_name}(it) {')
if convertor == 'bool' {
g.definitions.writeln('\tlet tmp1 = string__plus(new string("${styp}("), it.valueOf()? new string("true") : new string("false"));')
} else {
- g.definitions.writeln('\tlet tmp1 = string__plus(new string("${styp}("), new string(${typename_}_str(($convertor)it).str));')
+ g.definitions.writeln('\tlet tmp1 = string__plus(new string("${styp}("), new string(${typename_}_str((${convertor})it).str));')
}
g.definitions.writeln('\tstring tmp2 = string__plus(tmp1, new string(")"));')
g.definitions.writeln('\treturn tmp2;')
@@ -237,7 +237,7 @@ fn (mut g JsGen) gen_str_for_option(typ ast.Type, styp string, str_fn_name strin
g.definitions.writeln('\t} else {')
tmp_str := str_intp_sub('error: %%', 'IError_str(it.err)')
- g.definitions.writeln('\t\tres = $tmp_str;')
+ g.definitions.writeln('\t\tres = ${tmp_str};')
g.definitions.writeln('\t}')
g.definitions.writeln('\treturn ${str_intp_sub('Option(%%)', 'res')};')
@@ -259,7 +259,7 @@ fn (mut g JsGen) gen_str_for_alias(info ast.Alias, styp string, str_fn_name stri
fn (mut g JsGen) gen_str_for_multi_return(info ast.MultiReturn, styp string, str_fn_name string) {
mut fn_builder := strings.new_builder(512)
fn_builder.writeln('function ${str_fn_name}(a) {')
- fn_builder.writeln('\tlet sb = strings__new_builder($info.types.len * 10);')
+ fn_builder.writeln('\tlet sb = strings__new_builder(${info.types.len} * 10);')
fn_builder.writeln('\tstrings__Builder_write_string(sb, new string("("));')
for i, typ in info.types {
sym := g.table.sym(typ)
@@ -268,24 +268,24 @@ fn (mut g JsGen) gen_str_for_multi_return(info ast.MultiReturn, styp string, str
arg_str_fn_name := g.get_str_fn(typ)
if should_use_indent_func(sym.kind) && !sym_has_str_method {
- fn_builder.writeln('\tstrings__Builder_write_string(sb, ${arg_str_fn_name}(a[$i]));')
+ fn_builder.writeln('\tstrings__Builder_write_string(sb, ${arg_str_fn_name}(a[${i}]));')
} else if sym.kind in [.f32, .f64] {
if sym.kind == .f32 {
- tmp_val := str_intp_g32('a[$i]')
- fn_builder.writeln('\tstrings__Builder_write_string(sb, $tmp_val);')
+ tmp_val := str_intp_g32('a[${i}]')
+ fn_builder.writeln('\tstrings__Builder_write_string(sb, ${tmp_val});')
} else {
- tmp_val := str_intp_g64('a[$i]')
- fn_builder.writeln('\tstrings__Builder_write_string(sb, $tmp_val);')
+ tmp_val := str_intp_g64('a[${i}]')
+ fn_builder.writeln('\tstrings__Builder_write_string(sb, ${tmp_val});')
}
} else if sym.kind == .string {
- tmp_str := str_intp_sq('a[$i]')
- fn_builder.writeln('\tstrings__Builder_write_string(sb, $tmp_str);')
+ tmp_str := str_intp_sq('a[${i}]')
+ fn_builder.writeln('\tstrings__Builder_write_string(sb, ${tmp_str});')
} else if sym.kind == .function {
fn_builder.writeln('\tstrings__Builder_write_string(sb, ${arg_str_fn_name}());')
} else {
deref, deref_label := deref_kind(str_method_expects_ptr, is_arg_ptr, typ)
- fn_builder.writeln('\t\tstrings__Builder_write_string(sb, new string("$deref_label"));')
- fn_builder.writeln('\tstrings__Builder_write_string(sb, ${arg_str_fn_name}( a[$i] $deref ));')
+ fn_builder.writeln('\t\tstrings__Builder_write_string(sb, new string("${deref_label}"));')
+ fn_builder.writeln('\tstrings__Builder_write_string(sb, ${arg_str_fn_name}( a[${i}] ${deref} ));')
}
if i != info.types.len - 1 {
fn_builder.writeln('\tstrings__Builder_write_string(sb, new string(", "));')
@@ -305,10 +305,10 @@ fn (mut g JsGen) gen_str_for_enum(info ast.Enum, styp string, str_fn_name string
// Enums tagged with `[flag]` are special in that they can be a combination of enum values
if info.is_flag {
clean_name := util.strip_main_name(styp.replace('__', '.'))
- g.definitions.writeln('\tlet ret = new string("$clean_name{");')
+ g.definitions.writeln('\tlet ret = new string("${clean_name}{");')
g.definitions.writeln('\tlet first = 1;')
for i, val in info.vals {
- g.definitions.writeln('\tif (it & (1 << $i)) {if (!first) {ret = string__plus(ret, new string(" | "));} ret = string__plus(ret, new string(".$val")); first = 0;}')
+ g.definitions.writeln('\tif (it & (1 << ${i})) {if (!first) {ret = string__plus(ret, new string(" | "));} ret = string__plus(ret, new string(".${val}")); first = 0;}')
}
g.definitions.writeln('\tret = string__plus(ret, new string("}"));')
g.definitions.writeln('\treturn ret;')
@@ -322,7 +322,7 @@ fn (mut g JsGen) gen_str_for_enum(info ast.Enum, styp string, str_fn_name string
} else if info.is_multi_allowed {
seen << val
}
- g.definitions.writeln('\t\tcase ${s}.$val: return new string("$val");')
+ g.definitions.writeln('\t\tcase ${s}.${val}: return new string("${val}");')
}
g.definitions.writeln('\t\tdefault: return new string("unknown enum value");')
g.definitions.writeln('\t}')
@@ -352,7 +352,7 @@ fn (mut g JsGen) gen_str_for_interface(info ast.Interface, styp string, str_fn_n
mut func_name := g.get_str_fn(typ)
sym_has_str_method, str_method_expects_ptr, _ := subtype.str_method_info()
if should_use_indent_func(subtype.kind) && !sym_has_str_method {
- func_name = 'indent_$func_name'
+ func_name = 'indent_${func_name}'
}
deref := if sym_has_str_method && str_method_expects_ptr { ' ' } else { '.valueOf()' }
// str_intp
@@ -361,13 +361,13 @@ fn (mut g JsGen) gen_str_for_interface(info ast.Interface, styp string, str_fn_n
fn_builder.write_string('\tif (x.val instanceof string)')
fn_builder.write_string(' return "new string(${clean_interface_v_type_name}(" + x.val.str + ")");')
} else {
- mut val := '${func_name}(x $deref'
+ mut val := '${func_name}(x ${deref}'
if should_use_indent_func(subtype.kind) && !sym_has_str_method {
val += ', indent_count'
}
val += ')'
- fn_builder.write_string('\tif (x.val instanceof $subtype.cname)')
+ fn_builder.write_string('\tif (x.val instanceof ${subtype.cname})')
fn_builder.write_string(' return new string("${clean_interface_v_type_name}(" + ${val}.str + ")");\n')
}
}
@@ -391,9 +391,9 @@ fn (mut g JsGen) gen_str_for_union_sum_type(info ast.SumType, styp string, str_f
if typ.is_ptr() { '.valueOf()' } else { ' ' }
}
if should_use_indent_func(sym.kind) && !sym_has_str_method {
- func_name = 'indent_$func_name'
+ func_name = 'indent_${func_name}'
}
- fn_builder.writeln('if (x instanceof $typ_str) { return ${func_name}(x$deref); }')
+ fn_builder.writeln('if (x instanceof ${typ_str}) { return ${func_name}(x${deref}); }')
}
fn_builder.writeln('builtin__panic(new string("unknown sum type value"));\n}')
g.definitions.writeln(fn_builder.str())
@@ -418,9 +418,9 @@ fn (mut g JsGen) fn_decl_str(info ast.FnType) string {
} else if info.func.return_type != ast.void_type {
x := util.strip_main_name(g.table.get_type_name(g.unwrap_generic(info.func.return_type)))
if info.func.return_type.has_flag(.optional) {
- fn_str += ' ?$x'
+ fn_str += ' ?${x}'
} else {
- fn_str += ' $x'
+ fn_str += ' ${x}'
}
}
return fn_str
@@ -433,13 +433,13 @@ fn (mut g JsGen) gen_str_for_fn_type(info ast.FnType, styp string, str_fn_name s
fn (mut g JsGen) gen_str_for_chan(info ast.Chan, styp string, str_fn_name string) {
elem_type_name := util.strip_main_name(g.table.get_type_name(g.unwrap_generic(info.elem_type)))
- g.definitions.writeln('function ${str_fn_name}(x) { return sync__Channel_auto_str(x, new string("$elem_type_name")); }')
+ g.definitions.writeln('function ${str_fn_name}(x) { return sync__Channel_auto_str(x, new string("${elem_type_name}")); }')
}
fn (mut g JsGen) gen_str_for_thread(info ast.Thread, styp string, str_fn_name string) {
ret_type_name := util.strip_main_name(g.table.get_type_name(info.return_type))
- g.definitions.writeln('function ${str_fn_name}(_) { return new string("thread($ret_type_name)");}')
+ g.definitions.writeln('function ${str_fn_name}(_) { return new string("thread(${ret_type_name})");}')
}
[inline]
@@ -502,8 +502,8 @@ fn (mut g JsGen) gen_str_for_array(info ast.Array, styp string, str_fn_name stri
// Note: we need to take account of whether the user has defined
// `fn (x T) str() {` or `fn (x &T) str() {`, and convert accordingly
deref, deref_label := deref_kind(str_method_expects_ptr, is_elem_ptr, typ)
- g.definitions.writeln('\t\tstrings__Builder_write_string(sb, new string("$deref_label"));')
- g.definitions.writeln('\t\tlet x = ${elem_str_fn_name}( $deref it);')
+ g.definitions.writeln('\t\tstrings__Builder_write_string(sb, new string("${deref_label}"));')
+ g.definitions.writeln('\t\tlet x = ${elem_str_fn_name}( ${deref} it);')
}
}
g.definitions.writeln('\t\tstrings__Builder_write_string(sb, x);')
@@ -532,9 +532,9 @@ fn (mut g JsGen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_f
g.definitions.writeln('function ${str_fn_name}(a) { return indent_${str_fn_name}(a, 0);}')
g.definitions.writeln('function indent_${str_fn_name}(a, indent_count) {')
- g.definitions.writeln('\tlet sb = strings__new_builder($info.size * 10);')
+ g.definitions.writeln('\tlet sb = strings__new_builder(${info.size} * 10);')
g.definitions.writeln('\tstrings__Builder_write_string(sb, new string("["));')
- g.definitions.writeln('\tfor (let i = 0; i < $info.size; ++i) {')
+ g.definitions.writeln('\tfor (let i = 0; i < ${info.size}; ++i) {')
if sym.kind == .function {
g.definitions.writeln('\t\tstring x = ${elem_str_fn_name}();')
g.definitions.writeln('\t\tstrings__Builder_write_string(sb, x);')
@@ -542,11 +542,11 @@ fn (mut g JsGen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_f
deref, deref_label := deref_kind(str_method_expects_ptr, is_elem_ptr, typ)
if should_use_indent_func(sym.kind) && !sym_has_str_method {
if is_elem_ptr {
- g.definitions.writeln('\t\tstrings__Builder_write_string(sb, new string("$deref_label"));')
+ g.definitions.writeln('\t\tstrings__Builder_write_string(sb, new string("${deref_label}"));')
g.definitions.writeln('\t\tif ( 0 == a.arr.get(new int(i)) ) {')
g.definitions.writeln('\t\t\tstrings__Builder_write_string(sb, new string("0"));')
g.definitions.writeln('\t\t}else{')
- g.definitions.writeln('\t\t\tstrings__Builder_write_string(sb, ${elem_str_fn_name}(a.arr.get(new int(i)) $deref) );')
+ g.definitions.writeln('\t\t\tstrings__Builder_write_string(sb, ${elem_str_fn_name}(a.arr.get(new int(i)) ${deref}) );')
g.definitions.writeln('\t\t}')
} else {
g.definitions.writeln('\t\tstrings__Builder_write_string(sb, ${elem_str_fn_name}(a.arr.get(new int(i))) );')
@@ -559,7 +559,7 @@ fn (mut g JsGen) gen_str_for_array_fixed(info ast.ArrayFixed, styp string, str_f
g.definitions.writeln('\t\tlet x = new string("\`" + String.fromCharCode(a.arr.get(new int(i)).val) + "\`");')
g.definitions.writeln('\t\tstrings__Builder_write_string(sb,x);')
} else {
- g.definitions.writeln('\t\tstrings__Builder_write_string(sb, ${elem_str_fn_name}(a.arr.get(new int(i)) $deref));')
+ g.definitions.writeln('\t\tstrings__Builder_write_string(sb, ${elem_str_fn_name}(a.arr.get(new int(i)) ${deref}));')
}
}
g.definitions.writeln('\t\tif (i < ${info.size - 1}) {')
@@ -705,12 +705,12 @@ fn (mut g JsGen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name st
clean_struct_v_type_name = util.strip_main_name(clean_struct_v_type_name)
// generate ident / indent length = 4 spaces
if info.fields.len == 0 {
- fn_builder.writeln('\treturn new string("$clean_struct_v_type_name{}");')
+ fn_builder.writeln('\treturn new string("${clean_struct_v_type_name}{}");')
fn_builder.writeln('}')
return
}
- fn_builder.writeln('\tlet res = /*struct name*/new string("$clean_struct_v_type_name{\\n")')
+ fn_builder.writeln('\tlet res = /*struct name*/new string("${clean_struct_v_type_name}{\\n")')
for i, field in info.fields {
mut ptr_amp := if field.typ.is_ptr() { '&' } else { '' }
@@ -731,9 +731,9 @@ fn (mut g JsGen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name st
sym := g.table.sym(g.unwrap_generic(field.typ))
// first fields doesn't need \n
if i == 0 {
- fn_builder.write_string('res.str += " $field.name: $ptr_amp$prefix" + ')
+ fn_builder.write_string('res.str += " ${field.name}: ${ptr_amp}${prefix}" + ')
} else {
- fn_builder.write_string('res.str += "\\n $field.name: $ptr_amp$prefix" + ')
+ fn_builder.write_string('res.str += "\\n ${field.name}: ${ptr_amp}${prefix}" + ')
}
// custom methods management
@@ -747,7 +747,7 @@ fn (mut g JsGen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name st
mut func := struct_auto_str_func(mut g, sym, field.typ, field_styp_fn_name, field.name)
if field.typ in ast.cptr_types {
- func = '(voidptr) it.$field.name'
+ func = '(voidptr) it.${field.name}'
} else if field.typ.is_ptr() {
// reference types can be "nil"
fn_builder.write_string('isnil(it.${g.js_name(field.name)})')
@@ -763,7 +763,7 @@ fn (mut g JsGen) gen_str_for_struct(info ast.Struct, styp string, str_fn_name st
} else {
// manage C charptr
if field.typ in ast.charptr_types {
- fn_builder.write_string('tos4((byteptr)$func)')
+ fn_builder.write_string('tos4((byteptr)${func})')
} else {
if field.typ.is_ptr() && sym.kind == .struct_ {
fn_builder.write_string('(indent_count > 25)? new string("") : ')
@@ -787,12 +787,12 @@ fn struct_auto_str_func(mut g JsGen, sym &ast.TypeSymbol, field_type ast.Type, f
} else if should_use_indent_func(sym.kind) {
mut obj := 'it.${g.js_name(field_name)}'
if field_type.is_ptr() && !expects_ptr {
- obj = '*$obj'
+ obj = '*${obj}'
}
if has_custom_str {
- return '${fn_name}($obj)'
+ return '${fn_name}(${obj})'
}
- return 'indent_${fn_name}($obj, indent_count + 1)'
+ return 'indent_${fn_name}(${obj}, indent_count + 1)'
} else if sym.kind in [.array, .array_fixed, .map, .sum_type] {
if has_custom_str {
return '${fn_name}(it.${g.js_name(field_name)})'
@@ -812,18 +812,18 @@ fn struct_auto_str_func(mut g JsGen, sym &ast.TypeSymbol, field_type ast.Type, f
// ptr int can be "nil", so this needs to be casted to a string
if sym.kind == .f32 {
return 'str_intp(1, _MOV((StrIntpData[]){
- {_SLIT0, $si_g32_code, {.d_f32 = *$method_str }}
+ {_SLIT0, ${si_g32_code}, {.d_f32 = *${method_str} }}
}))'
} else if sym.kind == .f64 {
return 'str_intp(1, _MOV((StrIntpData[]){
- {_SLIT0, $si_g64_code, {.d_f64 = *$method_str }}
+ {_SLIT0, ${si_g64_code}, {.d_f64 = *${method_str} }}
}))'
} else if sym.kind == .u64 {
fmt_type := StrIntpType.si_u64
- return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_u64 = *$method_str }}}))'
+ return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_u64 = *${method_str} }}}))'
}
fmt_type := StrIntpType.si_i32
- return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_i32 = *$method_str }}}))'
+ return 'str_intp(1, _MOV((StrIntpData[]){{_SLIT0, ${u32(fmt_type) | 0xfe00}, {.d_i32 = *${method_str} }}}))'
}
return method_str
}
diff --git a/vlib/v/gen/js/builtin_types.v b/vlib/v/gen/js/builtin_types.v
index 1de85d1c1a..bcdc7211af 100644
--- a/vlib/v/gen/js/builtin_types.v
+++ b/vlib/v/gen/js/builtin_types.v
@@ -17,7 +17,7 @@ fn (mut g JsGen) to_js_typ_def_val(s string) string {
fn (mut g JsGen) copy_val(t ast.Type, tmp string) string {
fun := g.get_copy_fn(t)
temp := g.new_tmp_var()
- g.writeln('let $temp = ${fun}($tmp);')
+ g.writeln('let ${temp} = ${fun}(${tmp});')
return temp
}
@@ -27,13 +27,13 @@ fn (mut g JsGen) to_js_typ_val(t ast.Type) string {
mut prefix := 'new '
match sym.kind {
.i8, .i16, .int, .i64, .u8, .u16, .u32, .u64, .f32, .f64, .int_literal, .float_literal {
- styp = '$prefix${g.sym_to_js_typ(sym)}(0)'
+ styp = '${prefix}${g.sym_to_js_typ(sym)}(0)'
}
.bool {
- styp = '$prefix${g.sym_to_js_typ(sym)}(false)'
+ styp = '${prefix}${g.sym_to_js_typ(sym)}(false)'
}
.string {
- styp = '$prefix${g.sym_to_js_typ(sym)}("")'
+ styp = '${prefix}${g.sym_to_js_typ(sym)}("")'
}
.map {
styp = 'new map(new Map())'
@@ -191,7 +191,7 @@ pub fn (mut g JsGen) doc_typ(t ast.Type) string {
info := sym.info as ast.Map
key := g.typ(info.key_type)
val := g.typ(info.value_type)
- styp = 'Map<$key, $val>'
+ styp = 'Map<${key}, ${val}>'
}
.any {
styp = 'any'
@@ -206,7 +206,7 @@ pub fn (mut g JsGen) doc_typ(t ast.Type) string {
info := sym.info as ast.MultiReturn
types := info.types.map(g.typ(it))
joined := types.join(', ')
- styp = '[$joined]'
+ styp = '[${joined}]'
}
.sum_type {
// TODO: Implement sumtypes
@@ -215,7 +215,7 @@ pub fn (mut g JsGen) doc_typ(t ast.Type) string {
.alias {
fsym := g.table.final_sym(t)
name := g.js_name(fsym.name)
- styp += '$name'
+ styp += '${name}'
}
.enum_ {
// Note: We could declare them as TypeScript enums but TS doesn't like
@@ -257,7 +257,7 @@ pub fn (mut g JsGen) doc_typ(t ast.Type) string {
fn (mut g JsGen) fn_typ(args []ast.Param, return_type ast.Type) string {
mut res := '('
for i, arg in args {
- res += '$arg.name: ${g.typ(arg.typ)}'
+ res += '${arg.name}: ${g.typ(arg.typ)}'
if i < args.len - 1 {
res += ', '
}
@@ -276,7 +276,7 @@ fn (mut g JsGen) struct_typ(s string) string {
if i == 0 {
styp = v
} else {
- styp += '["$v"]'
+ styp += '["${v}"]'
}
}
if ns in ['', g.ns.name] {
@@ -299,26 +299,26 @@ struct BuiltinPrototypeConfig {
}
fn (mut g JsGen) gen_builtin_prototype(c BuiltinPrototypeConfig) {
- g.writeln('function ${c.typ_name}($c.val_name) { if ($c.val_name === undefined) { $c.val_name = $c.default_value; }$c.constructor }')
+ g.writeln('function ${c.typ_name}(${c.val_name}) { if (${c.val_name} === undefined) { ${c.val_name} = ${c.default_value}; }${c.constructor} }')
g.writeln('${c.typ_name}.prototype = {')
g.inc_indent()
- g.writeln('$c.val_name: $c.default_value,')
+ g.writeln('${c.val_name}: ${c.default_value},')
if c.extras.len > 0 {
- g.writeln('$c.extras,')
+ g.writeln('${c.extras},')
}
if g.pref.output_es5 {
- g.writeln('valueOf: (function() { return $c.value_of }).bind(this),')
- g.writeln('toString: (function() { return $c.to_string }).bind(this),')
- g.writeln('\$toJS: (function() { return $c.to_jsval }).bind(this), ')
+ g.writeln('valueOf: (function() { return ${c.value_of} }).bind(this),')
+ g.writeln('toString: (function() { return ${c.to_string} }).bind(this),')
+ g.writeln('\$toJS: (function() { return ${c.to_jsval} }).bind(this), ')
if c.has_strfn {
g.writeln('str: (function() { return new string(this.toString())).bind(this) }')
}
// g.writeln('eq: (function(other) { return $c.eq }).bind(this),')
} else {
- g.writeln('valueOf() { return $c.value_of },')
- g.writeln('toString() { return $c.to_string },')
- g.writeln('\$toJS() { return $c.to_jsval }, ')
+ g.writeln('valueOf() { return ${c.value_of} },')
+ g.writeln('toString() { return ${c.to_string} },')
+ g.writeln('\$toJS() { return ${c.to_jsval} }, ')
if c.has_strfn {
g.writeln('str() { return new string(this.toString()) }')
}
@@ -326,7 +326,7 @@ fn (mut g JsGen) gen_builtin_prototype(c BuiltinPrototypeConfig) {
}
g.dec_indent()
g.writeln('};\n')
- g.writeln('function ${c.typ_name}__eq(self,other) { return $c.eq; } ')
+ g.writeln('function ${c.typ_name}__eq(self,other) { return ${c.eq}; } ')
}
// generate builtin type definitions, used for casting and methods.
diff --git a/vlib/v/gen/js/comptime.v b/vlib/v/gen/js/comptime.v
index 97ac659169..40ed9bd296 100644
--- a/vlib/v/gen/js/comptime.v
+++ b/vlib/v/gen/js/comptime.v
@@ -25,21 +25,21 @@ fn (mut g JsGen) comptime_if(node ast.IfExpr) {
}
if node.is_expr {
- print('$branch.stmts')
+ print('${branch.stmts}')
len := branch.stmts.len
if len > 0 {
last := branch.stmts.last() as ast.ExprStmt
if len > 1 {
tmp := g.new_tmp_var()
g.inc_indent()
- g.writeln('let $tmp;')
+ g.writeln('let ${tmp};')
g.writeln('{')
g.stmts(branch.stmts[..len - 1])
- g.write('\t$tmp = ')
+ g.write('\t${tmp} = ')
g.stmt(last)
g.writeln('}')
g.dec_indent()
- g.writeln('$tmp;')
+ g.writeln('${tmp};')
} else {
g.stmt(last)
}
@@ -77,14 +77,14 @@ fn (mut g JsGen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
verror(err.msg())
return false
}
- g.write('$ifdef')
+ g.write('${ifdef}')
return true
}
ast.InfixExpr {
match cond.op {
.and, .logical_or {
l := g.comptime_if_cond(cond.left, pkg_exist)
- g.write(' $cond.op ')
+ g.write(' ${cond.op} ')
r := g.comptime_if_cond(cond.right, pkg_exist)
return if cond.op == .and { l && r } else { l || r }
}
@@ -126,7 +126,7 @@ fn (mut g JsGen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
//}
}
} else if left is ast.SelectorExpr {
- name = '${left.expr}.$left.field_name'
+ name = '${left.expr}.${left.field_name}'
exp_type = g.comptime_var_type_map[name]
} else if left is ast.TypeNode {
// this is only allowed for generics currently, otherwise blocked by checker
@@ -134,10 +134,10 @@ fn (mut g JsGen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
}
if cond.op == .key_is {
- g.write('$exp_type == $got_type')
+ g.write('${exp_type} == ${got_type}')
return exp_type == got_type
} else {
- g.write('$exp_type != $got_type')
+ g.write('${exp_type} != ${got_type}')
return exp_type != got_type
}
}
@@ -153,11 +153,11 @@ fn (mut g JsGen) comptime_if_cond(cond ast.Expr, pkg_exist bool) bool {
}
ast.Ident {
ifdef := g.comptime_if_to_ifdef(cond.name, false) or { 'true' } // handled in checker
- g.write('$ifdef')
+ g.write('${ifdef}')
return true
}
ast.ComptimeCall {
- g.write('$pkg_exist')
+ g.write('${pkg_exist}')
return true
}
else {
@@ -308,9 +308,9 @@ fn (mut g JsGen) comptime_if_to_ifdef(name string, is_comptime_optional bool) !s
else {
if is_comptime_optional
|| (g.pref.compile_defines_all.len > 0 && name in g.pref.compile_defines_all) {
- return 'checkDefine("CUSTOM_DEFINE_$name")'
+ return 'checkDefine("CUSTOM_DEFINE_${name}")'
}
- return error('bad os ifdef name "$name"') // should never happen, caught in the checker
+ return error('bad os ifdef name "${name}"') // should never happen, caught in the checker
}
}
return error('none')
diff --git a/vlib/v/gen/js/deep_copy.v b/vlib/v/gen/js/deep_copy.v
index 9e20095820..7d78012e46 100644
--- a/vlib/v/gen/js/deep_copy.v
+++ b/vlib/v/gen/js/deep_copy.v
@@ -60,16 +60,16 @@ fn (mut g JsGen) gen_copy_for_multi_return(info ast.MultiReturn, styp string, co
if sym.kind in [.f32, .f64] {
if sym.kind == .f32 {
- fn_builder.writeln('\tarr.push(new f32(a[$i].val));')
+ fn_builder.writeln('\tarr.push(new f32(a[${i}].val));')
} else {
- fn_builder.writeln('\tarr.push(new f64(a[$i].val));')
+ fn_builder.writeln('\tarr.push(new f64(a[${i}].val));')
}
} else if sym.kind == .string {
- fn_builder.writeln('\tarr.push(new string(a[$i].str +""));')
+ fn_builder.writeln('\tarr.push(new string(a[${i}].str +""));')
} else if sym.kind == .function {
- fn_builder.writeln('\tarr.push(a[$i]);')
+ fn_builder.writeln('\tarr.push(a[${i}]);')
} else {
- fn_builder.writeln('\tarr.push(${arg_copy_fn_name}(a[$i]));')
+ fn_builder.writeln('\tarr.push(${arg_copy_fn_name}(a[${i}]));')
}
}
fn_builder.writeln('\treturn arr;')
@@ -87,7 +87,7 @@ fn (mut g JsGen) gen_copy_for_union_sum_type(info ast.SumType, styp string, copy
for typ in info.variants {
typ_str := g.typ(typ)
mut func_name := g.get_copy_fn(typ)
- fn_builder.writeln('if (x instanceof $typ_str) { return ${func_name}(x); }')
+ fn_builder.writeln('if (x instanceof ${typ_str}) { return ${func_name}(x); }')
}
fn_builder.writeln('builtin__panic(new string("unknown sum type value"));\n}')
g.definitions.writeln(fn_builder.str())
@@ -134,19 +134,19 @@ fn (mut g JsGen) gen_copy_for_interface(info ast.Interface, styp string, copy_fn
}
fn (mut g JsGen) gen_copy_for_fn_type(info ast.FnType, styp string, copy_fn_name string) {
- g.definitions.writeln('function $copy_fn_name (x) { return x; }')
+ g.definitions.writeln('function ${copy_fn_name} (x) { return x; }')
}
fn (mut g JsGen) gen_copy_for_array(info ast.Array, styp string, copy_fn_name string) {
- g.definitions.writeln('function $copy_fn_name (x) { return x; }')
+ g.definitions.writeln('function ${copy_fn_name} (x) { return x; }')
}
fn (mut g JsGen) gen_copy_for_array_fixed(info ast.ArrayFixed, styp string, copy_fn_name string) {
- g.definitions.writeln('function $copy_fn_name (x) { return x; }')
+ g.definitions.writeln('function ${copy_fn_name} (x) { return x; }')
}
fn (mut g JsGen) gen_copy_for_map(info ast.Map, styp string, copy_fn_name string) {
- g.definitions.writeln('function $copy_fn_name (x) { return x; }')
+ g.definitions.writeln('function ${copy_fn_name} (x) { return x; }')
}
fn (mut g JsGen) gen_copy_for_struct(info ast.Struct, styp string, copy_fn_name string) {
@@ -238,7 +238,7 @@ fn (mut g JsGen) final_gen_copy(typ StrType) {
g.gen_copy_for_interface(sym.info, styp, copy_fn_name)
}
else {
- verror("could not generate string method $copy_fn_name for type '$styp'")
+ verror("could not generate string method ${copy_fn_name} for type '${styp}'")
}
}
}
diff --git a/vlib/v/gen/js/fn.v b/vlib/v/gen/js/fn.v
index a7346a8e73..f8fff9eb76 100644
--- a/vlib/v/gen/js/fn.v
+++ b/vlib/v/gen/js/fn.v
@@ -56,7 +56,7 @@ fn (mut g JsGen) js_mname(name_ string) string {
if !is_js {
for i, p in parts {
if p in js_reserved {
- parts[i] = 'v_$p'
+ parts[i] = 'v_${p}'
}
}
}
@@ -119,9 +119,9 @@ fn (mut g JsGen) js_call(node ast.CallExpr) {
.propagate_option {
panicstr := '`optional not set (\${err + ""})`'
if g.file.mod.name == 'main' && g.fn_decl.name == 'main.main' {
- g.writeln('return builtin__panic($panicstr)')
+ g.writeln('return builtin__panic(${panicstr})')
} else {
- g.writeln('throw new Option({ state: new u8(2), err: error(new string($panicstr)) });')
+ g.writeln('throw new Option({ state: new u8(2), err: error(new string(${panicstr})) });')
}
}
else {}
@@ -179,9 +179,9 @@ fn (mut g JsGen) js_method_call(node ast.CallExpr) {
.propagate_option {
panicstr := '`optional not set (\${err + ""})`'
if g.file.mod.name == 'main' && g.fn_decl.name == 'main.main' {
- g.writeln('return builtin__panic($panicstr)')
+ g.writeln('return builtin__panic(${panicstr})')
} else {
- g.writeln('throw new option({ state: new u8(2), err: error(new string($panicstr)) });')
+ g.writeln('throw new option({ state: new u8(2), err: error(new string(${panicstr})) });')
}
}
else {}
@@ -330,7 +330,7 @@ fn (mut g JsGen) method_call(node ast.CallExpr) {
g.expr(it.left)
g.write('.promise')
} else {
- mut name := util.no_dots('${receiver_type_name}_$node.name')
+ mut name := util.no_dots('${receiver_type_name}_${node.name}')
name = g.generic_fn_name(node.concrete_types, name)
g.write('${name}(')
@@ -370,7 +370,7 @@ fn (mut g JsGen) method_call(node ast.CallExpr) {
.propagate_option {
panicstr := '`optional not set (\${err.valueOf().msg})`'
if g.file.mod.name == 'main' && g.fn_decl.name == 'main.main' {
- g.writeln('return builtin__panic($panicstr)')
+ g.writeln('return builtin__panic(${panicstr})')
} else {
g.writeln('js_throw(err)')
}
@@ -408,7 +408,7 @@ fn (mut g JsGen) gen_call_expr(it ast.CallExpr) {
is_print := name in ['print', 'println', 'eprint', 'eprintln', 'panic']
if name in js.builtin_functions {
- name = 'builtin__$name'
+ name = 'builtin__${name}'
}
print_method := name
ret_sym := g.table.sym(it.return_type)
@@ -429,7 +429,7 @@ fn (mut g JsGen) gen_call_expr(it ast.CallExpr) {
mut typ := node.args[0].typ
expr := node.args[0].expr
- g.write('$print_method (')
+ g.write('${print_method} (')
g.gen_expr_to_string(expr, typ)
g.write(')')
return
@@ -471,7 +471,7 @@ fn (mut g JsGen) gen_call_expr(it ast.CallExpr) {
.propagate_option {
panicstr := '`optional not set (\${err.valueOf().msg})`'
if g.file.mod.name == 'main' && g.fn_decl.name == 'main.main' {
- g.writeln('return builtin__panic($panicstr)')
+ g.writeln('return builtin__panic(${panicstr})')
} else {
g.writeln('js_throw(err)')
}
@@ -516,16 +516,16 @@ fn (mut g JsGen) is_used_by_main(node ast.FnDecl) bool {
fkey := node.fkey()
is_used_by_main = g.table.used_fns[fkey]
$if trace_skip_unused_fns ? {
- println('> is_used_by_main: $is_used_by_main | node.name: $node.name | fkey: $fkey | node.is_method: $node.is_method')
+ println('> is_used_by_main: ${is_used_by_main} | node.name: ${node.name} | fkey: ${fkey} | node.is_method: ${node.is_method}')
}
if !is_used_by_main {
$if trace_skip_unused_fns_in_js_code ? {
- g.writeln('// trace_skip_unused_fns_in_js_code, $node.name, fkey: $fkey')
+ g.writeln('// trace_skip_unused_fns_in_js_code, ${node.name}, fkey: ${fkey}')
}
}
} else {
$if trace_skip_unused_fns_in_js_code ? {
- g.writeln('// trace_skip_unused_fns_in_js_code, $node.name, fkey: $node.fkey()')
+ g.writeln('// trace_skip_unused_fns_in_js_code, ${node.name}, fkey: ${node.fkey()}')
}
}
return is_used_by_main
@@ -594,7 +594,7 @@ fn (mut g JsGen) gen_method_decl(it ast.FnDecl, typ FnGenType) {
if g.pref.is_verbose {
syms := concrete_types.map(g.table.sym(it))
the_type := syms.map(it.name).join(', ')
- println('gen fn `$node.name` for type `$the_type`')
+ println('gen fn `${node.name}` for type `${the_type}`')
}
g.cur_concrete_types = concrete_types
g.gen_method_decl(node, typ)
@@ -625,13 +625,13 @@ fn (mut g JsGen) gen_method_decl(it ast.FnDecl, typ FnGenType) {
name = g.generic_fn_name(g.cur_concrete_types, name)
if name in js.builtin_functions {
- name = 'builtin__$name'
+ name = 'builtin__${name}'
}
if it.is_pub && !it.is_method {
g.push_pub_var(name)
}
if it.language == .js && it.is_method {
- g.writeln('${g.typ(it.receiver.typ)}.prototype.$it.name = ')
+ g.writeln('${g.typ(it.receiver.typ)}.prototype.${it.name} = ')
}
mut has_go := fn_has_go(it) || it.has_await
@@ -683,12 +683,12 @@ fn (mut g JsGen) gen_method_decl(it ast.FnDecl, typ FnGenType) {
is_varg := i == args.len - 1 && it.is_variadic
arg_name := g.js_name(arg.name)
if is_varg {
- g.writeln('$arg_name = new array(new array_buffer({arr: $arg_name,len: new int(${arg_name}.length),index_start: new int(0)}));')
+ g.writeln('${arg_name} = new array(new array_buffer({arr: ${arg_name},len: new int(${arg_name}.length),index_start: new int(0)}));')
} else {
asym := g.table.sym(arg.typ)
if asym.kind != .interface_ && asym.language != .js {
if arg.typ.is_ptr() || arg.is_mut {
- g.writeln('$arg_name = new \$ref($arg_name)')
+ g.writeln('${arg_name} = new \$ref(${arg_name})')
}
}
}
@@ -712,19 +712,19 @@ fn (mut g JsGen) gen_method_decl(it ast.FnDecl, typ FnGenType) {
for attr in it.attrs {
match attr.name {
'export' {
- g.writeln('globalThis.$attr.arg = ${g.js_name(it.name)};')
+ g.writeln('globalThis.${attr.arg} = ${g.js_name(it.name)};')
}
'wasm_export' {
mut x := g.wasm_export[attr.arg] or { []string{} }
g.write('function \$wasm${g.js_name(it.name)}(')
g.fn_args(args, it.is_variadic)
g.writeln(') {')
- g.write('\treturn $name (')
+ g.write('\treturn ${name} (')
for i, arg in args {
is_varg := i == args.len - 1 && it.is_variadic
arg_name := g.js_name(arg.name)
if is_varg {
- g.write('...$arg_name')
+ g.write('...${arg_name}')
} else {
g.gen_cast_tmp(arg_name, arg.typ)
}
@@ -754,7 +754,7 @@ fn (mut g JsGen) fn_args(args []ast.Param, is_variadic bool) {
name := g.js_name(arg.name)
is_varg := i == args.len - 1 && is_variadic
if is_varg {
- g.write('...$name')
+ g.write('...${name}')
} else {
g.write(name)
}
@@ -799,7 +799,7 @@ fn (mut g JsGen) gen_anon_fn(mut fun ast.AnonFn) {
name = g.generic_fn_name(g.table.cur_concrete_types, name)
if name in js.builtin_functions {
- name = 'builtin__$name'
+ name = 'builtin__${name}'
}
if it.is_pub && !it.is_method {
g.push_pub_var(name)
@@ -818,19 +818,19 @@ fn (mut g JsGen) gen_anon_fn(mut fun ast.AnonFn) {
is_varg := i == args.len - 1 && it.is_variadic
arg_name := g.js_name(arg.name)
if is_varg {
- g.writeln('$arg_name = new array(new array_buffer({arr: $arg_name,len: new int(${arg_name}.length),index_start: new int(0)}));')
+ g.writeln('${arg_name} = new array(new array_buffer({arr: ${arg_name},len: new int(${arg_name}.length),index_start: new int(0)}));')
} else {
asym := g.table.sym(arg.typ)
if arg.typ.is_ptr() || (arg.is_mut && asym.kind != .interface_ && asym.language != .js) {
- g.writeln('$arg_name = new \$ref($arg_name)')
+ g.writeln('${arg_name} = new \$ref(${arg_name})')
}
}
}
for inherited in fun.inherited_vars {
if !inherited.is_mut {
- g.writeln('let $inherited.name = ${inherited2copy[inherited.name]};')
+ g.writeln('let ${inherited.name} = ${inherited2copy[inherited.name]};')
}
}
g.stmts(it.stmts)
diff --git a/vlib/v/gen/js/infix.v b/vlib/v/gen/js/infix.v
index df8af67d78..2e4e3b3a3f 100644
--- a/vlib/v/gen/js/infix.v
+++ b/vlib/v/gen/js/infix.v
@@ -17,7 +17,7 @@ fn (mut g JsGen) gen_plain_infix_expr(node ast.InfixExpr) {
g.expr(node.left)
g.gen_deref_ptr(node.left_type)
g.write('.valueOf())')
- g.write(' $node.op.str() ')
+ g.write(' ${node.op.str()} ')
g.write('BigInt(')
g.expr(node.right)
g.gen_deref_ptr(node.left_type)
@@ -26,7 +26,7 @@ fn (mut g JsGen) gen_plain_infix_expr(node ast.InfixExpr) {
g.expr(node.left)
g.gen_deref_ptr(node.left_type)
g.write('.valueOf()')
- g.write(' $node.op.str() ')
+ g.write(' ${node.op.str()} ')
g.expr(node.right)
g.gen_deref_ptr(node.left_type)
g.write('.valueOf()')
@@ -199,7 +199,7 @@ fn (mut g JsGen) infix_expr_eq_op(node ast.InfixExpr) {
} else {
g.expr(node.left)
g.gen_deref_ptr(node.left_type)
- g.write('.valueOf() $node.op.str() ')
+ g.write('.valueOf() ${node.op.str()} ')
g.expr(node.right)
g.gen_deref_ptr(node.right_type)
g.write('.valueOf()')
@@ -239,7 +239,7 @@ fn (mut g JsGen) infix_expr_cmp_op(node ast.InfixExpr) {
} else {
g.expr(node.left)
g.gen_deref_ptr(node.left_type)
- g.write('.valueOf() $node.op.str() ')
+ g.write('.valueOf() ${node.op.str()} ')
g.expr(node.right)
g.gen_deref_ptr(node.right_type)
g.write('.valueOf()')
diff --git a/vlib/v/gen/js/js.v b/vlib/v/gen/js/js.v
index 946e1203c2..16f0dfa18b 100644
--- a/vlib/v/gen/js/js.v
+++ b/vlib/v/gen/js/js.v
@@ -139,7 +139,7 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
g.file = file
g.enter_namespace(g.file.mod.name)
if g.enable_doc {
- g.writeln('/** @namespace $file.mod.name */')
+ g.writeln('/** @namespace ${file.mod.name} */')
}
g.is_test = g.pref.is_test
// store imports
@@ -188,7 +188,7 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
} else {
g.js_name(iface_name) + '_' + method.name
}
- g.write('${g.js_name(sym.name)}.prototype.$method.name = function(')
+ g.write('${g.js_name(sym.name)}.prototype.${method.name} = function(')
for i, param in method.params {
if i == 0 {
continue
@@ -219,9 +219,9 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
}
for mod_name in g.table.modules {
- g.writeln('// Initializations for module $mod_name')
+ g.writeln('// Initializations for module ${mod_name}')
for global, expr in g.init_global[mod_name] {
- g.write('$global = ')
+ g.write('${global} = ')
g.expr(expr)
g.writeln(';')
}
@@ -254,7 +254,7 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
out += 'const loadRoutine = async () => {\n'
for mod, functions in g.wasm_import {
if g.pref.backend == .js_browser {
- out += '\nawait fetch("$mod").then(respone => respone.arrayBuffer()).then(bytes => '
+ out += '\nawait fetch("${mod}").then(respone => respone.arrayBuffer()).then(bytes => '
out += 'WebAssembly.instantiate(bytes,'
exports := g.wasm_export[mod]
out += '{ imports: { \n'
@@ -355,13 +355,13 @@ pub fn (mut g JsGen) gen_js_main_for_tests() {
g.writeln('')
g.writeln('globalThis.VTEST=1')
if g.pref.is_stats {
- g.writeln('let bt = main__start_testing(new int($all_tfuncs.len), new string("$g.pref.path"))')
+ g.writeln('let bt = main__start_testing(new int(${all_tfuncs.len}), new string("${g.pref.path}"))')
}
for tname in all_tfuncs {
tcname := g.js_name(tname)
if g.pref.is_stats {
- g.writeln('main__BenchedTests_testing_step_start(bt,new string("$tcname"))')
+ g.writeln('main__BenchedTests_testing_step_start(bt,new string("${tcname}"))')
}
g.writeln('try { let res = ${tcname}(); if (res instanceof Promise) { await res; } } catch (_e) {} ')
@@ -490,14 +490,14 @@ pub fn (mut g JsGen) init() {
}
pub fn (g JsGen) hashes() string {
- mut res := '// V_COMMIT_HASH $version.vhash()\n'
+ mut res := '// V_COMMIT_HASH ${version.vhash()}\n'
res += '// V_CURRENT_COMMIT_HASH ${version.githash(g.pref.building_v)}\n'
return res
}
[noreturn]
fn verror(msg string) {
- eprintln('jsgen error: $msg')
+ eprintln('jsgen error: ${msg}')
exit(1)
}
@@ -541,7 +541,7 @@ pub fn (mut g JsGen) writeln(s string) {
[inline]
pub fn (mut g JsGen) new_tmp_var() string {
g.tmp_count++
- return '_tmp$g.tmp_count'
+ return '_tmp${g.tmp_count}'
}
// 'mod1.mod2.fn' => 'mod1.mod2'
@@ -572,7 +572,7 @@ fn (mut g JsGen) js_name(name_ string) string {
}
name = name_.replace('.', '__')
if name in js.js_reserved {
- return '_v_$name'
+ return '_v_${name}'
}
return name
}
@@ -594,7 +594,7 @@ fn (mut g JsGen) write_v_source_line_info(pos token.Pos) {
}
}
if g.pref.is_vlines && g.is_vlines_enabled {
- g.write(' /* ${pos.line_nr + 1} $g.out.len */ ')
+ g.write(' /* ${pos.line_nr + 1} ${g.out.len} */ ')
}
}
@@ -603,31 +603,31 @@ fn (mut g JsGen) gen_global_decl(node ast.GlobalDecl) {
for field in node.fields {
if field.has_expr {
tmp_var := g.new_tmp_var()
- g.write('const $tmp_var = ')
+ g.write('const ${tmp_var} = ')
g.expr(field.expr)
g.writeln(';')
- g.writeln('Object.defineProperty(\$global,"$field.name", {
+ g.writeln('Object.defineProperty(\$global,"${field.name}", {
configurable: false,
- $mod ,
+ ${mod} ,
writable: true,
- value: $tmp_var
+ value: ${tmp_var}
}
); // global')
} else {
// TODO(playXE): Initialize with default value of type
if field.typ.is_ptr() {
- g.writeln('Object.defineProperty(\$global,"$field.name", {
+ g.writeln('Object.defineProperty(\$global,"${field.name}", {
configurable: false,
- $mod ,
+ ${mod} ,
writable: true,
value: new \$ref({})
}
); // global')
} else {
- g.writeln('Object.defineProperty(\$global,"$field.name", {
+ g.writeln('Object.defineProperty(\$global,"${field.name}", {
configurable: false,
- $mod ,
+ ${mod} ,
writable: true,
value: {}
}
@@ -638,7 +638,7 @@ fn (mut g JsGen) gen_global_decl(node ast.GlobalDecl) {
}
fn (mut g JsGen) gen_alias_type_decl(node ast.AliasTypeDecl) {
- name := if g.ns.name == 'builtin' { node.name } else { '${g.js_name(g.ns.name)}__$node.name' }
+ name := if g.ns.name == 'builtin' { node.name } else { '${g.js_name(g.ns.name)}__${node.name}' }
g.writeln('function ${name}(val) { return val; }')
}
@@ -879,7 +879,7 @@ fn (mut g JsGen) expr(node_ ast.Expr) {
// TODO
}
ast.AtExpr {
- g.write('"$node.val"')
+ g.write('"${node.val}"')
}
ast.BoolLiteral {
g.write('new bool(')
@@ -901,9 +901,9 @@ fn (mut g JsGen) expr(node_ ast.Expr) {
}
ast.CharLiteral {
if node.val.len_utf8() < node.val.len {
- g.write("new rune('$node.val'.charCodeAt())")
+ g.write("new rune('${node.val}'.charCodeAt())")
} else {
- g.write("new u8('$node.val')")
+ g.write("new u8('${node.val}')")
}
}
ast.Comment {}
@@ -917,15 +917,15 @@ fn (mut g JsGen) expr(node_ ast.Expr) {
// TODO
}
ast.CTempVar {
- g.write('$node.name')
+ g.write('${node.name}')
}
ast.DumpExpr {
- g.write('/* ast.DumpExpr: $node.expr */')
+ g.write('/* ast.DumpExpr: ${node.expr} */')
}
ast.EnumVal {
sym := g.table.sym(node.typ)
styp := g.js_name(sym.name)
- g.write('${styp}.$node.val')
+ g.write('${styp}.${node.val}')
}
ast.FloatLiteral {
g.gen_float_literal_expr(node)
@@ -987,7 +987,7 @@ fn (mut g JsGen) expr(node_ ast.Expr) {
// } else {
g.expr(node.expr)
if node.op in [.inc, .dec] {
- g.write('.val $node.op')
+ g.write('.val ${node.op}')
} else {
g.write(node.op.str())
}
@@ -1117,7 +1117,7 @@ fn (mut g JsGen) new_ctemp_var_then_gen(expr ast.Expr, expr_type ast.Type) ast.C
}
fn (mut g JsGen) gen_ctemp_var(tvar ast.CTempVar) {
- g.write('let $tvar.name = ')
+ g.write('let ${tvar.name} = ')
g.expr(tvar.orig)
g.writeln(';')
}
@@ -1127,22 +1127,22 @@ fn (mut g JsGen) gen_assert_metainfo(node ast.AssertStmt) string {
fn_name := if g.fn_decl == unsafe { nil } || g.fn_decl.is_anon { 'anon' } else { g.fn_decl.name }
line_nr := node.pos.line_nr
src := node.expr.str()
- metaname := 'v_assert_meta_info_$g.new_tmp_var()'
- g.writeln('let $metaname = {}')
- g.writeln('${metaname}.fpath = new string("$mod_path");')
- g.writeln('${metaname}.line_nr = new int("$line_nr")')
- g.writeln('${metaname}.fn_name = new string("$fn_name")')
+ metaname := 'v_assert_meta_info_${g.new_tmp_var()}'
+ g.writeln('let ${metaname} = {}')
+ g.writeln('${metaname}.fpath = new string("${mod_path}");')
+ g.writeln('${metaname}.line_nr = new int("${line_nr}")')
+ g.writeln('${metaname}.fn_name = new string("${fn_name}")')
metasrc := src
- g.writeln('${metaname}.src = "$metasrc"')
+ g.writeln('${metaname}.src = "${metasrc}"')
match node.expr {
ast.InfixExpr {
expr_op_str := node.expr.op.str()
expr_left_str := node.expr.left.str()
expr_right_str := node.expr.right.str()
- g.writeln('\t${metaname}.op = new string("$expr_op_str");')
- g.writeln('\t${metaname}.llabel = new string("$expr_left_str");')
- g.writeln('\t${metaname}.rlabel = new string("$expr_right_str");')
+ g.writeln('\t${metaname}.op = new string("${expr_op_str}");')
+ g.writeln('\t${metaname}.llabel = new string("${expr_left_str}");')
+ g.writeln('\t${metaname}.rlabel = new string("${expr_right_str}");')
g.write('\t${metaname}.lvalue = ')
g.gen_assert_single_expr(node.expr.left, node.expr.left_type)
g.writeln(';')
@@ -1163,21 +1163,21 @@ fn (mut g JsGen) gen_assert_single_expr(expr ast.Expr, typ ast.Type) {
unknown_value := '*unknown value*'
match expr {
ast.CastExpr, ast.IfExpr, ast.IndexExpr, ast.MatchExpr {
- g.write('new string("$unknown_value")')
+ g.write('new string("${unknown_value}")')
}
ast.PrefixExpr {
if expr.right is ast.CastExpr {
// TODO: remove this check;
// vlib/builtin/map_test.v (a map of &int, set to &int(0)) fails
// without special casing ast.CastExpr here
- g.write('new string("$unknown_value")')
+ g.write('new string("${unknown_value}")')
} else {
g.gen_expr_to_string(expr, typ)
}
}
ast.TypeNode {
sym := g.table.sym(g.unwrap_generic(typ))
- g.write('new string("$sym.name"')
+ g.write('new string("${sym.name}"')
}
else {
mut should_clone := true
@@ -1233,11 +1233,11 @@ fn (mut g JsGen) gen_assert_stmt(mut node ast.AssertStmt) {
if g.is_test {
metaname_ok := g.gen_assert_metainfo(node)
g.writeln(' g_test_oks++;')
- g.writeln(' main__cb_assertion_ok($metaname_ok);')
+ g.writeln(' main__cb_assertion_ok(${metaname_ok});')
g.writeln('} else {')
metaname_fail := g.gen_assert_metainfo(node)
g.writeln(' g_test_fails++;')
- g.writeln(' main__cb_assertion_failed($metaname_fail);')
+ g.writeln(' main__cb_assertion_failed(${metaname_fail});')
g.writeln(' builtin__exit(1);')
g.writeln('}')
return
@@ -1245,7 +1245,7 @@ fn (mut g JsGen) gen_assert_stmt(mut node ast.AssertStmt) {
g.writeln('} else {')
g.inc_indent()
fname := if g.fn_decl == unsafe { nil } || g.fn_decl.is_anon { 'anon' } else { g.fn_decl.name }
- g.writeln('builtin__eprintln(new string("$mod_path:${node.pos.line_nr + 1}: FAIL: fn ${fname}(): assert $s_assertion"));')
+ g.writeln('builtin__eprintln(new string("${mod_path}:${node.pos.line_nr + 1}: FAIL: fn ${fname}(): assert ${s_assertion}"));')
g.writeln('builtin__exit(1);')
g.dec_indent()
g.writeln('}')
@@ -1286,7 +1286,7 @@ fn (mut g JsGen) gen_assign_stmt(stmt ast.AssignStmt, semicolon bool) {
if left.kind == .blank_ident || left.name in ['', '_'] {
tmp_var := g.new_tmp_var()
// TODO: Can the tmp_var declaration be omitted?
- g.write('const $tmp_var = ')
+ g.write('const ${tmp_var} = ')
g.expr(val)
g.writeln(';')
continue
@@ -1396,7 +1396,7 @@ fn (mut g JsGen) gen_assign_stmt(stmt ast.AssignStmt, semicolon bool) {
g.write(' | ')
}
else {
- panic('unexpected op $op')
+ panic('unexpected op ${op}')
}
}
} else if is_assign && !array_set {
@@ -1449,13 +1449,13 @@ fn (mut g JsGen) gen_assign_stmt(stmt ast.AssignStmt, semicolon bool) {
g.write(' | ')
}
else {
- panic('unexpected op $op')
+ panic('unexpected op ${op}')
}
}
} else {
if op == .assign && array_set {
} else {
- g.write(' $op ')
+ g.write(' ${op} ')
}
}
// TODO: Multiple types??
@@ -1503,7 +1503,7 @@ fn (mut g JsGen) gen_assign_stmt(stmt ast.AssignStmt, semicolon bool) {
fn (mut g JsGen) gen_attrs(attrs []ast.Attr) {
for attr in attrs {
- g.writeln('/* [$attr.name] */')
+ g.writeln('/* [${attr.name}] */')
}
}
@@ -1526,7 +1526,7 @@ fn (mut g JsGen) gen_branch_stmt(it ast.BranchStmt) {
g.writeln('throw new ContinueException();')
}
else {
- verror('unexpected branch stmt: $it.kind')
+ verror('unexpected branch stmt: ${it.kind}')
}
}
return
@@ -1572,11 +1572,11 @@ fn (mut g JsGen) gen_enum_decl(it ast.EnumDecl) {
g.inc_indent()
mut i := 0
for field in it.fields {
- g.write('$field.name: ')
+ g.write('${field.name}: ')
if field.has_expr && field.expr is ast.IntegerLiteral {
i = field.expr.val.int()
}
- g.writeln('$i,')
+ g.writeln('${i},')
i++
}
g.dec_indent()
@@ -1607,7 +1607,7 @@ fn (mut g JsGen) cc_type(typ ast.Type, is_prefix_struct bool) string {
mut sgtyps := '_T'
for gt in sym.info.generic_types {
gts := g.table.sym(g.unwrap_generic(gt))
- sgtyps += '_$gts.cname'
+ sgtyps += '_${gts.cname}'
}
styp += sgtyps
}
@@ -1660,11 +1660,11 @@ fn (mut g JsGen) gen_for_in_stmt(it ast.ForInStmt) {
i = g.new_tmp_var()
}
g.inside_loop = true
- g.write('for (let $i = ')
+ g.write('for (let ${i} = ')
g.expr(it.cond)
- g.write('; $i < ')
+ g.write('; ${i} < ')
g.expr(it.high)
- g.writeln('; $i = new int($i + 1)) {')
+ g.writeln('; ${i} = new int(${i} + 1)) {')
g.inside_loop = false
g.inc_indent()
g.writeln('try { ')
@@ -1682,18 +1682,18 @@ fn (mut g JsGen) gen_for_in_stmt(it ast.ForInStmt) {
val := if it.val_var in ['', '_'] { '_' } else { it.val_var }
// styp := g.typ(it.val_type)
if it.key_var.len > 0 {
- g.write('for (const [$it.key_var, $val] of ')
+ g.write('for (const [${it.key_var}, ${val}] of ')
if it.kind == .string {
g.write('Array.from(')
g.expr(it.cond)
if it.cond_type.is_ptr() {
g.write('.valueOf()')
}
- g.write('.str.split(\'\').entries(), ([$it.key_var, $val]) => [$it.key_var, ')
+ g.write('.str.split(\'\').entries(), ([${it.key_var}, ${val}]) => [${it.key_var}, ')
g.write('new ')
- g.write('u8($val)])')
+ g.write('u8(${val})])')
} else {
g.expr(it.cond)
if it.cond_type.is_ptr() {
@@ -1702,7 +1702,7 @@ fn (mut g JsGen) gen_for_in_stmt(it ast.ForInStmt) {
g.write('.entries()')
}
} else {
- g.write('for (const $val of ')
+ g.write('for (const ${val} of ')
g.expr(it.cond)
if it.cond_type.is_ptr() {
g.write('.valueOf()')
@@ -1741,18 +1741,18 @@ fn (mut g JsGen) gen_for_in_stmt(it ast.ForInStmt) {
tmp2 := g.new_tmp_var()
if g.pref.output_es5 {
tmp3 := g.new_tmp_var()
- g.write('let $tmp2 = ')
+ g.write('let ${tmp2} = ')
g.expr(it.cond)
if it.cond_type.is_ptr() {
g.write('.valueOf()')
}
g.writeln(';')
- g.write('for (var $tmp3 = 0; $tmp3 < Object.keys(${tmp2}.map).length; $tmp3++) ')
+ g.write('for (var ${tmp3} = 0; ${tmp3} < Object.keys(${tmp2}.map).length; ${tmp3}++) ')
g.write('{')
- g.writeln('\tlet $tmp = Object.keys(${tmp2}.map)')
- g.writeln('\tlet $key = $tmp[$tmp3];')
- g.writeln('\tlet $val = ${tmp2}.map[$tmp[$tmp3]];')
+ g.writeln('\tlet ${tmp} = Object.keys(${tmp2}.map)')
+ g.writeln('\tlet ${key} = ${tmp}[${tmp3}];')
+ g.writeln('\tlet ${val} = ${tmp2}.map[${tmp}[${tmp3}]];')
g.inc_indent()
g.writeln('try { ')
g.stmts(it.stmts)
@@ -1763,17 +1763,17 @@ fn (mut g JsGen) gen_for_in_stmt(it ast.ForInStmt) {
g.dec_indent()
g.writeln('}')
} else {
- g.write('let $tmp = ')
+ g.write('let ${tmp} = ')
g.expr(it.cond)
if it.cond_type.is_ptr() {
g.write('.valueOf()')
}
g.writeln(';')
- g.writeln('for (var $tmp2 in ${tmp}.map) {')
+ g.writeln('for (var ${tmp2} in ${tmp}.map) {')
g.inc_indent()
- g.writeln('let $val = ${tmp}.map[$tmp2];')
- g.writeln('let $key = $tmp2;')
+ g.writeln('let ${val} = ${tmp}.map[${tmp2}];')
+ g.writeln('let ${key} = ${tmp2};')
g.writeln('try { ')
g.inc_indent()
@@ -1840,7 +1840,7 @@ fn (mut g JsGen) gen_interface_decl(it ast.InterfaceDecl) {
// This is a hack to make the interface's type accessible outside its namespace
// TODO: interfaces are always `pub`?
name := g.js_name(it.name)
- g.push_pub_var('/** @type $name */\n\t\t$name')
+ g.push_pub_var('/** @type ${name} */\n\t\t${name}')
g.writeln('function ${g.js_name(it.name)} (arg) { return new \$ref(arg); }')
}
@@ -1878,8 +1878,8 @@ fn (mut g JsGen) gen_return_stmt(it ast.Return) {
if optional_none || is_regular_option || node.types[0] == ast.error_type_idx {
if !isnil(g.fn_decl) && g.fn_decl.is_test {
test_error_var := g.new_tmp_var()
- g.writeln('let $test_error_var = "TODO";')
- g.writeln('return $test_error_var;')
+ g.writeln('let ${test_error_var} = "TODO";')
+ g.writeln('return ${test_error_var};')
return
}
if !g.inside_or {
@@ -1897,7 +1897,7 @@ fn (mut g JsGen) gen_return_stmt(it ast.Return) {
}
if fn_return_is_optional {
tmp := g.new_tmp_var()
- g.write('const $tmp = new ')
+ g.write('const ${tmp} = new ')
g.writeln('${js.option_name}({});')
g.write('${tmp}.state = new u8(0);')
@@ -1909,9 +1909,9 @@ fn (mut g JsGen) gen_return_stmt(it ast.Return) {
}
g.writeln('')
if g.inside_or {
- g.write('throw new ReturnException($tmp);')
+ g.write('throw new ReturnException(${tmp});')
} else {
- g.write('return $tmp;')
+ g.write('return ${tmp};')
}
return
}
@@ -1937,7 +1937,7 @@ fn (mut g JsGen) gen_hash_stmt(it ast.HashStmt) {
fn (mut g JsGen) gen_sumtype_decl(it ast.SumTypeDecl) {
name := g.js_name(it.name)
- g.push_pub_var('/** @type $name */\n\t\t$name')
+ g.push_pub_var('/** @type ${name} */\n\t\t${name}')
g.writeln('function ${g.js_name(it.name)} (arg) { return arg; }')
}
@@ -1954,9 +1954,9 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
g.doc.gen_fac_fn(node.fields)
if g.pref.output_es5 {
obj := g.new_tmp_var()
- g.writeln('function ${js_name}($obj) {')
+ g.writeln('function ${js_name}(${obj}) {')
g.inc_indent()
- g.writeln('if ($obj === undefined) { obj = {}; }')
+ g.writeln('if (${obj} === undefined) { obj = {}; }')
for field in node.fields {
mut keep := true
for attr in field.attrs {
@@ -1965,8 +1965,8 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
}
}
if keep {
- g.writeln('if (${obj}.$field.name === undefined) {')
- g.write('${obj}.$field.name = ')
+ g.writeln('if (${obj}.${field.name} === undefined) {')
+ g.write('${obj}.${field.name} = ')
if field.has_default_expr {
g.expr(field.default_expr)
} else {
@@ -1974,14 +1974,14 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
}
g.writeln('\n}')
}
- g.writeln('var $field.name = ${obj}.$field.name;')
+ g.writeln('var ${field.name} = ${obj}.${field.name};')
}
g.dec_indent()
} else {
g.write('function ${js_name}({ ')
for i, field in node.fields {
- g.write('$field.name')
+ g.write('${field.name}')
mut keep := true
for attr in field.attrs {
if attr.name == 'noinit' {
@@ -2005,7 +2005,7 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
}
g.inc_indent()
for field in node.fields {
- g.writeln('this.$field.name = $field.name')
+ g.writeln('this.${field.name} = ${field.name}')
}
g.dec_indent()
g.writeln('};')
@@ -2036,7 +2036,7 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
g.writeln('toString() {')
}
g.inc_indent()
- g.write('return `$js_name {')
+ g.write('return `${js_name} {')
for i, field in node.fields {
if i == 0 {
g.write(' ')
@@ -2044,8 +2044,8 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
g.write(', ')
}
match g.typ(field.typ).split('.').last() {
- 'string' { g.write('$field.name: "\${this["$field.name"].toString()}"') }
- else { g.write('$field.name: \${this["$field.name"].toString()} ') }
+ 'string' { g.write('${field.name}: "\${this["${field.name}"].toString()}"') }
+ else { g.write('${field.name}: \${this["${field.name}"].toString()} ') }
}
}
g.writeln('}`')
@@ -2066,7 +2066,7 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
}
}
if keep {
- g.write('$field.name: ${g.to_js_typ_val(field.typ)}')
+ g.write('${field.name}: ${g.to_js_typ_val(field.typ)}')
g.writeln(',')
}
}
@@ -2097,7 +2097,7 @@ fn (mut g JsGen) gen_array_init_expr(it ast.ArrayInit) {
t1 := g.new_tmp_var()
g.writeln('(function(length) {')
g.inc_indent()
- g.writeln('const $t1 = [];')
+ g.writeln('const ${t1} = [];')
g.write('for (let it = 0; it < length')
g.writeln('; it++) {')
g.inc_indent()
@@ -2112,7 +2112,7 @@ fn (mut g JsGen) gen_array_init_expr(it ast.ArrayInit) {
g.writeln(');')
g.dec_indent()
g.writeln('};')
- g.writeln('return $t1;')
+ g.writeln('return ${t1};')
g.dec_indent()
g.write('})(')
g.expr(it.len_expr)
@@ -2128,10 +2128,10 @@ fn (mut g JsGen) gen_array_init_expr(it ast.ArrayInit) {
t2 := g.new_tmp_var()
g.writeln('(function() {')
g.inc_indent()
- g.writeln('const $t1 = [];')
- g.write('for (let $t2 = 0; $t2 < ')
+ g.writeln('const ${t1} = [];')
+ g.write('for (let ${t2} = 0; ${t2} < ')
g.expr(it.exprs[0])
- g.writeln('; $t2++) {')
+ g.writeln('; ${t2}++) {')
g.inc_indent()
g.write('${t1}.push(')
if it.has_default {
@@ -2144,7 +2144,7 @@ fn (mut g JsGen) gen_array_init_expr(it ast.ArrayInit) {
g.writeln(');')
g.dec_indent()
g.writeln('};')
- g.writeln('return $t1;')
+ g.writeln('return ${t1};')
g.dec_indent()
g.write('})(), len: new int(')
g.expr(it.exprs[0])
@@ -2159,7 +2159,7 @@ fn (mut g JsGen) gen_array_init_expr(it ast.ArrayInit) {
} else {
g.gen_array_init_values(it.exprs)
}
- g.write(', len: new int($c), cap: new int($c)')
+ g.write(', len: new int(${c}), cap: new int(${c})')
}
g.dec_indent()
g.write('}))')
@@ -2436,7 +2436,7 @@ fn (mut g JsGen) match_expr(node ast.MatchExpr) {
} else {
s := g.new_tmp_var()
cond_var = CondString{s}
- g.write('let $s = ')
+ g.write('let ${s} = ')
g.expr(node.cond)
g.writeln(';')
}
@@ -2444,7 +2444,7 @@ fn (mut g JsGen) match_expr(node ast.MatchExpr) {
g.empty_line = true
cur_line = g.out.cut_to(g.stmt_start_pos).trim_left(' \t')
tmp_var = g.new_tmp_var()
- g.writeln('let $tmp_var = undefined;')
+ g.writeln('let ${tmp_var} = undefined;')
}
if is_expr && !need_tmp_var {
g.write('(')
@@ -2460,7 +2460,7 @@ fn (mut g JsGen) match_expr(node ast.MatchExpr) {
}
g.write(cur_line)
if need_tmp_var {
- g.write('$tmp_var')
+ g.write('${tmp_var}')
}
if is_expr && !need_tmp_var {
g.write(')')
@@ -2486,11 +2486,11 @@ fn (mut g JsGen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) {
} else {
g.write('opt_ok(')
g.stmt(stmt)
- g.writeln(', $tmp_var);')
+ g.writeln(', ${tmp_var});')
}
}
} else {
- g.write('$tmp_var = ')
+ g.write('${tmp_var} = ')
g.stmt(stmt)
g.writeln('')
}
@@ -2578,7 +2578,7 @@ fn (mut g JsGen) match_expr_sumtype(node ast.MatchExpr, is_expr bool, cond_var M
tsym := g.table.sym(typ)
if tsym.language == .js && (tsym.name == 'Number'
|| tsym.name == 'Boolean' || tsym.name == 'String') {
- g.write(' === $tsym.name.to_lower()')
+ g.write(' === ${tsym.name.to_lower()}')
} else {
g.write(' instanceof ')
g.expr(branch.exprs[sumtype_index])
@@ -2774,7 +2774,7 @@ fn (mut g JsGen) gen_if_expr(node ast.IfExpr) {
g.inside_if_optional = true
}
- g.writeln('let $tmp; /* if prepend */')
+ g.writeln('let ${tmp}; /* if prepend */')
} else if node.is_expr || g.inside_ternary {
g.write('(')
prev := g.inside_ternary
@@ -2811,7 +2811,7 @@ fn (mut g JsGen) gen_if_expr(node ast.IfExpr) {
if cond.expr !is ast.IndexExpr && cond.expr !is ast.PrefixExpr {
var_name := g.new_tmp_var()
guard_vars[i] = var_name
- g.writeln('let $var_name;')
+ g.writeln('let ${var_name};')
} else {
guard_vars[i] = ''
}
@@ -2842,7 +2842,7 @@ fn (mut g JsGen) gen_if_expr(node ast.IfExpr) {
g.tmp_count--
g.writeln('if (${var_name}.state == 0) {')
} else {
- g.write('if ($var_name = ')
+ g.write('if (${var_name} = ')
g.expr(branch.cond.expr)
g.writeln(', ${var_name}.state == 0) {')
}
@@ -2853,11 +2853,11 @@ fn (mut g JsGen) gen_if_expr(node ast.IfExpr) {
} else {
branch.cond.vars[0].name
}
- g.write('\tlet $cond_var_name = ')
+ g.write('\tlet ${cond_var_name} = ')
g.expr(branch.cond.expr)
g.writeln(';')
} else {
- g.writeln('\tlet $branch.cond.vars[0].name = ${var_name}.data;')
+ g.writeln('\tlet ${branch.cond.vars}[0].name = ${var_name}.data;')
}
}
}
@@ -2880,7 +2880,7 @@ fn (mut g JsGen) gen_if_expr(node ast.IfExpr) {
g.writeln('}')
}
if needs_tmp_var {
- g.write('$tmp')
+ g.write('${tmp}')
}
if node.typ.has_flag(.optional) {
g.inside_if_optional = false
@@ -3013,7 +3013,7 @@ fn (mut g JsGen) gen_infix_expr(it ast.InfixExpr) {
g.expr(it.left)
g.gen_deref_ptr(it.left_type)
g.write(').\$toJS())')
- g.write(' $it.op ')
+ g.write(' ${it.op} ')
g.write('BigInt((')
g.expr(it.right)
g.gen_deref_ptr(it.right_type)
@@ -3147,7 +3147,7 @@ fn (mut g JsGen) gen_infix_expr(it ast.InfixExpr) {
''
}
}
- g.write('.$name (')
+ g.write('.${name} (')
g.expr(it.right)
g.gen_deref_ptr(it.right_type)
g.write(')')
@@ -3172,7 +3172,7 @@ fn (mut g JsGen) gen_infix_expr(it ast.InfixExpr) {
g.gen_deref_ptr(it.left_type)
// g.write('.val')
- g.write(' $it.op ')
+ g.write(' ${it.op} ')
g.expr(it.right)
g.gen_deref_ptr(it.right_type)
@@ -3285,7 +3285,7 @@ fn (mut g JsGen) type_name(raw_type ast.Type) {
} else {
s = g.table.type_to_str(g.unwrap_generic(typ))
}
- g.write('new string("$s")')
+ g.write('new string("${s}")')
}
fn (mut g JsGen) gen_selector_expr(it ast.SelectorExpr) {
@@ -3314,7 +3314,7 @@ fn (mut g JsGen) gen_selector_expr(it ast.SelectorExpr) {
g.write(')')
return
}
- panic('unknown generic field $it.pos')
+ panic('unknown generic field ${it.pos}')
}
}
}
@@ -3327,7 +3327,7 @@ fn (mut g JsGen) gen_selector_expr(it ast.SelectorExpr) {
ltyp = ltyp.deref()
}
}
- g.write('.$it.field_name')
+ g.write('.${it.field_name}')
}
fn (mut g JsGen) gen_string_inter_literal(it ast.StringInterLiteral) {
@@ -3376,7 +3376,7 @@ fn (mut g JsGen) gen_string_literal(it ast.StringLiteral) {
if it.is_raw {
g.writeln('(function() { let s = String(); ')
for x in text {
- g.writeln('s += String.fromCharCode($x);')
+ g.writeln('s += String.fromCharCode(${x});')
}
g.writeln('return s; })()')
} else {
@@ -3385,7 +3385,7 @@ fn (mut g JsGen) gen_string_literal(it ast.StringLiteral) {
if ch == `\n` {
g.write('\\n')
} else {
- g.write('$ch.ascii_str()')
+ g.write('${ch.ascii_str()}')
}
}
g.write('"')
@@ -3415,7 +3415,7 @@ fn (mut g JsGen) gen_struct_init(it ast.StructInit) {
g.writeln('let tmp = new ${g.js_name(name)}()')
for field in it.fields {
- g.write('tmp.$field.name = ')
+ g.write('tmp.${field.name} = ')
g.expr(field.expr)
g.writeln(';')
}
@@ -3427,7 +3427,7 @@ fn (mut g JsGen) gen_struct_init(it ast.StructInit) {
g.inc_indent()
for i, field in it.fields {
if field.name.len != 0 {
- g.write('$field.name: ')
+ g.write('${field.name}: ')
}
g.expr(field.expr)
if i < it.fields.len - 1 {
@@ -3442,18 +3442,18 @@ fn (mut g JsGen) gen_struct_init(it ast.StructInit) {
g.writeln('(function() {')
g.inc_indent()
tmp := g.new_tmp_var()
- g.writeln('let $tmp = new ${g.js_name(name)}({});')
+ g.writeln('let ${tmp} = new ${g.js_name(name)}({});')
for field in it.fields {
if field.name.len != 0 {
- g.write('${tmp}.$field.name = ')
+ g.write('${tmp}.${field.name} = ')
g.expr(field.expr)
}
g.write(';')
g.writeln('')
}
- g.writeln('return $tmp;')
+ g.writeln('return ${tmp};')
g.dec_indent()
g.writeln('})()')
}
@@ -3466,7 +3466,7 @@ fn (mut g JsGen) gen_typeof_expr(it ast.TypeOf) {
} else if sym.kind == .array_fixed {
fixed_info := sym.info as ast.ArrayFixed
typ_name := g.table.get_type_name(fixed_info.elem_type)
- g.write('"[$fixed_info.size]$typ_name"')
+ g.write('"[${fixed_info.size}]${typ_name}"')
} else if sym.kind == .function {
info := sym.info as ast.FnType
fn_info := info.func
@@ -3481,9 +3481,9 @@ fn (mut g JsGen) gen_typeof_expr(it ast.TypeOf) {
if fn_info.return_type != ast.void_type {
repr += ' ${g.table.get_type_name(fn_info.return_type)}'
}
- g.write('"$repr"')
+ g.write('"${repr}"')
} else {
- g.write('"$sym.name"')
+ g.write('"${sym.name}"')
}
}
@@ -3493,7 +3493,7 @@ fn (mut g JsGen) gen_cast_tmp(tmp string, typ_ ast.Type) {
if !g.pref.output_es5 && (tsym.kind == .i64 || tsym.kind == .u64) {
g.write('new ')
- g.write('$tsym.kind.str()')
+ g.write('${tsym.kind.str()}')
g.write('(BigInt(')
g.write(tmp)
g.write('n))')
@@ -3577,7 +3577,7 @@ fn (mut g JsGen) gen_type_cast_expr(it ast.CastExpr) {
&& (tsym.kind == .i64 || tsym.kind == .u64) {
g.write('new ')
- g.write('$tsym.kind.str()')
+ g.write('${tsym.kind.str()}')
g.write('(BigInt(')
g.write(it.expr.val)
g.write('n))')
@@ -3637,13 +3637,13 @@ fn (mut g JsGen) gen_integer_literal_expr(it ast.IntegerLiteral) {
if g.cast_stack.last() in ast.integer_type_idxs {
g.write('new ')
- g.write('int($it.val)')
+ g.write('int(${it.val})')
return
}
}
g.write('new ')
- g.write('${g.typ(typ)}($it.val)')
+ g.write('${g.typ(typ)}(${it.val})')
}
fn (mut g JsGen) gen_float_literal_expr(it ast.FloatLiteral) {
@@ -3672,7 +3672,7 @@ fn (mut g JsGen) gen_float_literal_expr(it ast.FloatLiteral) {
// Skip cast if type is the same as the parrent caster
if g.cast_stack.len > 0 {
if g.cast_stack.last() in ast.float_type_idxs {
- g.write('new f32($it.val)')
+ g.write('new f32(${it.val})')
return
} else if g.cast_stack.last() in ast.integer_type_idxs {
g.write(int(it.val.f64()).str())
@@ -3681,7 +3681,7 @@ fn (mut g JsGen) gen_float_literal_expr(it ast.FloatLiteral) {
}
g.write('new ')
- g.write('${g.typ(typ)}($it.val)')
+ g.write('${g.typ(typ)}(${it.val})')
}
fn (mut g JsGen) unwrap_generic(typ ast.Type) ast.Type {
diff --git a/vlib/v/gen/js/jsdoc.v b/vlib/v/gen/js/jsdoc.v
index ba3ea05f4b..a7781e11dd 100644
--- a/vlib/v/gen/js/jsdoc.v
+++ b/vlib/v/gen/js/jsdoc.v
@@ -28,17 +28,17 @@ fn (mut d JsDoc) writeln(s string) {
}
fn (mut d JsDoc) gen_typ(typ string) {
- d.writeln('/** @type {$typ} */')
+ d.writeln('/** @type {${typ}} */')
}
fn (mut d JsDoc) gen_const(typ string) {
- d.writeln('/** @constant {$typ} */')
+ d.writeln('/** @constant {${typ}} */')
}
fn (mut d JsDoc) gen_enum() {
// Enum values can only be ints for now
typ := 'number'
- d.writeln('/** @enum {$typ} */')
+ d.writeln('/** @enum {${typ}} */')
}
fn (mut d JsDoc) gen_fac_fn(fields []ast.StructField) {
@@ -48,7 +48,7 @@ fn (mut d JsDoc) gen_fac_fn(fields []ast.StructField) {
for i, field in fields {
// Marked as optional: structs have default default values,
// so all struct members don't have to be initialized.
- d.write('$field.name?: ${d.gen.typ(field.typ)}')
+ d.write('${field.name}?: ${d.gen.typ(field.typ)}')
if i < fields.len - 1 {
d.write(', ')
}
@@ -72,25 +72,25 @@ fn (mut d JsDoc) gen_fn(it ast.FnDecl) {
is_varg := i == it.params.len - 1 && it.is_variadic
name := d.gen.js_name(arg.name)
if is_varg {
- d.writeln(' * @param {...$arg_type_name} $name')
+ d.writeln(' * @param {...${arg_type_name}} ${name}')
} else {
- d.writeln(' * @param {$arg_type_name} $name')
+ d.writeln(' * @param {${arg_type_name}} ${name}')
}
}
- d.writeln(' * @returns {$type_name}')
+ d.writeln(' * @returns {${type_name}}')
d.writeln('*/')
}
fn (mut d JsDoc) gen_interface(it ast.InterfaceDecl) {
name := d.gen.js_name(it.name)
d.writeln('/**')
- d.writeln(' * @interface $name')
- d.writeln(' * @typedef $name')
+ d.writeln(' * @interface ${name}')
+ d.writeln(' * @typedef ${name}')
for method in it.methods {
// Skip receiver
typ := d.gen.fn_typ(method.params[1..], method.return_type)
method_name := d.gen.js_name(method.name)
- d.writeln(' * @property {$typ} $method_name')
+ d.writeln(' * @property {${typ}} ${method_name}')
}
d.writeln(' */\n')
}
diff --git a/vlib/v/gen/js/jsgen_test.v b/vlib/v/gen/js/jsgen_test.v
index 15ec1d9cf4..d3b2b13ac4 100644
--- a/vlib/v/gen/js/jsgen_test.v
+++ b/vlib/v/gen/js/jsgen_test.v
@@ -22,7 +22,7 @@ fn test_example_compilation() {
files := find_test_files()
for file in files {
path := os.join_path(test_dir, file)
- println('Testing $file')
+ println('Testing ${file}')
mut v_options_file := v_options
mut node_options_file := node_options
should_create_source_map := file.ends_with('_sourcemap.v')
@@ -34,14 +34,14 @@ fn test_example_compilation() {
node_options_file += ' --enable-source-maps' // activate souremap generation
}
jsfile := os.join_path_single(output_dir, '${file}.js')
- v_code := os.system('${os.quoted_path(vexe)} $v_options_file -o ${os.quoted_path(jsfile)} ${os.quoted_path(path)}')
+ v_code := os.system('${os.quoted_path(vexe)} ${v_options_file} -o ${os.quoted_path(jsfile)} ${os.quoted_path(path)}')
if v_code != 0 {
assert false
}
// Compilation failed
assert v_code == 0
if !there_is_node_available {
- println(' ... skipping running $file, there is no NodeJS present')
+ println(' ... skipping running ${file}, there is no NodeJS present')
continue
}
js_code := os.system('node ${os.quoted_path(jsfile)}')
@@ -56,7 +56,7 @@ fn test_example_compilation() {
assert grep_code_sourcemap_found == 0
println('file has a source map embeded')
} else {
- println(' ... skipping testing for sourcemap $file, there is no grep present')
+ println(' ... skipping testing for sourcemap ${file}, there is no grep present')
}
}
}
diff --git a/vlib/v/gen/js/program_test.v b/vlib/v/gen/js/program_test.v
index be7bad3f90..9660d7d614 100644
--- a/vlib/v/gen/js/program_test.v
+++ b/vlib/v/gen/js/program_test.v
@@ -16,7 +16,7 @@ fn find_diff_cmd() string {
[noreturn]
fn exit_because(msg string) {
- eprintln('$msg, tests will not run')
+ eprintln('${msg}, tests will not run')
exit(0)
}
@@ -30,9 +30,9 @@ fn test_node_exists() {
}
version := res.output.trim_left('v').int()
if version < 10 {
- exit_because('node should be at least version 10, but is currently version: $version')
+ exit_because('node should be at least version 10, but is currently version: ${version}')
}
- println('Using node version: $version')
+ println('Using node version: ${version}')
}
fn test_running_programs_compiled_with_the_js_backend() {
@@ -70,7 +70,7 @@ fn check_path(dir string, tests []string) !int {
if expected != found {
println(term.red('FAIL'))
println('============')
- println('expected $program_out content:')
+ println('expected ${program_out} content:')
println(expected)
println('============')
println('found:')
diff --git a/vlib/v/gen/js/tests/array.v b/vlib/v/gen/js/tests/array.v
index b8f1f2da50..ef63016ef0 100644
--- a/vlib/v/gen/js/tests/array.v
+++ b/vlib/v/gen/js/tests/array.v
@@ -1,5 +1,5 @@
fn map_cb(s string) string {
- return 'CB: $s'
+ return 'CB: ${s}'
}
fn filter_cb(n int) bool {
@@ -90,11 +90,11 @@ fn main() {
println('\n\n')
// map
- a := arr1.map('VAL: $it')
+ a := arr1.map('VAL: ${it}')
b := arr1.map(map_cb)
c := arr1.map(map_cb(it))
d := arr1.map(fn (a string) string {
- return 'ANON: $a'
+ return 'ANON: ${a}'
})
// I don't know when this would ever be used,
// but it's what the C backend does ¯\_(ツ)_/¯
@@ -129,8 +129,8 @@ fn main() {
f4 := [u64(0xffffffffffffffff), 0xdeadface]!
println('
-$f1
-$f2
-$f3
-$f4')
+${f1}
+${f2}
+${f3}
+${f4}')
}
diff --git a/vlib/v/gen/js/tests/hello/hello.v b/vlib/v/gen/js/tests/hello/hello.v
index 9b75511526..356cf399cb 100644
--- a/vlib/v/gen/js/tests/hello/hello.v
+++ b/vlib/v/gen/js/tests/hello/hello.v
@@ -29,5 +29,5 @@ pub fn debugger() string {
}
pub fn excited() string {
- return '$hello1.nested() $debugger()!'
+ return '${hello1.nested()} ${debugger()}!'
}
diff --git a/vlib/v/gen/js/tests/interface.v b/vlib/v/gen/js/tests/interface.v
index a43cadfdda..798f94a0de 100644
--- a/vlib/v/gen/js/tests/interface.v
+++ b/vlib/v/gen/js/tests/interface.v
@@ -14,11 +14,11 @@ interface Animal {
}
fn (d Dog) say(s string) {
- println('Dog $d.name: "$s"')
+ println('Dog ${d.name}: "${s}"')
}
fn (c Cat) say(s string) {
- println('Cat $c.name: "$s"')
+ println('Cat ${c.name}: "${s}"')
}
fn (d Dog) greet() int {
diff --git a/vlib/v/gen/js/tests/interp.v b/vlib/v/gen/js/tests/interp.v
index 54f911cf54..9faed80748 100644
--- a/vlib/v/gen/js/tests/interp.v
+++ b/vlib/v/gen/js/tests/interp.v
@@ -1,30 +1,30 @@
fn test_fn(s1 string, s2 string) {
print(if s1 == s2 { 'true' } else { 'false' })
print('\t=> ')
- println('"$s1", "$s2"')
+ println('"${s1}", "${s2}"')
}
fn simple_string_interpolation() {
a := 'Hello'
b := 'World'
- res := '$a $b'
+ res := '${a} ${b}'
test_fn(res, 'Hello World')
}
fn mixed_string_interpolation() {
num := 7
str := 'abc'
- s1 := 'number=$num'
+ s1 := 'number=${num}'
test_fn(s1, 'number=7')
- s2 := 'string=$str'
+ s2 := 'string=${str}'
test_fn(s2, 'string=abc')
- s3 := 'a: $num | b: $str'
+ s3 := 'a: ${num} | b: ${str}'
test_fn(s3, 'a: 7 | b: abc')
}
fn formatted_string_interpolation() {
x := 'abc'
- axb := 'a:$x:b'
+ axb := 'a:${x}:b'
test_fn(axb, 'a:abc:b')
x_10 := 'a:${x:10s}:b'
x10_ := 'a:${x:-10s}:b'
@@ -57,31 +57,31 @@ fn excape_dollar_in_string() {
fn implicit_str() {
i := 42
- test_fn('int $i', 'int 42')
- test_fn('$i', '42')
- check := '$i' == '42'
+ test_fn('int ${i}', 'int 42')
+ test_fn('${i}', '42')
+ check := '${i}' == '42'
// println(check)
- text := '$i' + '42'
+ text := '${i}' + '42'
test_fn(text, '4242')
}
fn string_interpolation_percent_escaping() {
test := 'hello'
hello := 'world'
- x := '%.*s$hello$test |${hello:-30s}|'
+ x := '%.*s${hello}${test} |${hello:-30s}|'
test_fn(x, '%.*sworldhello |world |')
}
fn string_interpolation_string_prefix() {
// `r`, `c` and `js` are also used as a string prefix.
r := 'r'
- rr := '$r$r'
+ rr := '${r}${r}'
test_fn(rr, 'rr')
c := 'c'
- cc := '$c$c'
+ cc := '${c}${c}'
test_fn(cc, 'cc')
js := 'js'
- jsjs := '$js$js'
+ jsjs := '${js}${js}'
test_fn(jsjs, 'jsjs')
}
@@ -90,7 +90,7 @@ fn interpolation_string_prefix_expr() {
c := 2
js := 1
test_fn('>${3 + r}<', '>4<')
- test_fn('${r == js} $js', 'true 1')
+ test_fn('${r == js} ${js}', 'true 1')
test_fn('>${js + c} ${js + r == c}<', '>3 true<')
}
@@ -131,7 +131,7 @@ fn utf8_string_interpolation() {
a := 'à-côté'
st := 'Sträßle'
m := '10€'
- test_fn('$a $st $m', 'à-côté Sträßle 10€')
+ test_fn('${a} ${st} ${m}', 'à-côté Sträßle 10€')
zz := '>${a:10}< >${st:-8}< >${m:5}<-'
zz_expected := '> à-côté< >Sträßle < > 10€<-'
// println(' zz: $zz')
@@ -139,7 +139,7 @@ fn utf8_string_interpolation() {
test_fn(zz, zz_expected)
// e := '\u20AC' // Eurosign doesn' work with MSVC and tcc
e := '€'
- test_fn('100.00 $e', '100.00 €')
+ test_fn('100.00 ${e}', '100.00 €')
m2 := 'Москва́' // cyrillic а́: combination of U+0430 and U+0301, UTF-8: d0 b0 cc 81
d := 'Antonín Dvořák' // latin á: U+00E1, UTF-8: c3 a1
test_fn(':${m2:7}:${d:-15}:', ': Москва́:Antonín Dvořák :')
@@ -153,12 +153,12 @@ struct Sss {
}
fn (s Sss) str() string {
- return '[$s.v1, ${s.v2:.3f}]'
+ return '[${s.v1}, ${s.v2:.3f}]'
}
fn string_interpolation_str_evaluation() {
mut x := Sss{17, 13.455893}
- test_fn('$x', '[17, 13.456]')
+ test_fn('${x}', '[17, 13.456]')
}
/*
diff --git a/vlib/v/gen/js/tests/js.v b/vlib/v/gen/js/tests/js.v
index 4d4f8fb000..d729956156 100644
--- a/vlib/v/gen/js/tests/js.v
+++ b/vlib/v/gen/js/tests/js.v
@@ -36,11 +36,11 @@ fn main() {
}
ren := int(JS.eval('3'.str))
if ren != 0 {
- println('ren=$ren')
+ println('ren=${ren}')
}
res := string(JS.eval('"3"'.str))
if res != '' {
- println('res=$res')
+ println('res=${res}')
}
mut a := 1
a *= 2
@@ -63,9 +63,9 @@ fn main() {
_ = hl.Ccc.a
debugger := 'JS keywords'
// TODO: Implement interpolation
- await := '$super: $debugger'
+ await := '${super}: ${debugger}'
mut finally := 'implemented'
- println('$await $finally')
+ println('${await} ${finally}')
dun := i_am_a_const * 20 + 2
dunn := hl.hello // External constant
_ = hl1.nested()
@@ -92,7 +92,7 @@ fn main() {
}
spawn async(0, 'hello')
fn_in_var := fn (number int) {
- println('number: $number')
+ println('number: ${number}')
}
hl.debugger()
anon_consumer(hl.excited(), fn (message string) {
diff --git a/vlib/v/gen/js/tests/optional.v b/vlib/v/gen/js/tests/optional.v
index 5a6f7720f2..f2dc999ab4 100644
--- a/vlib/v/gen/js/tests/optional.v
+++ b/vlib/v/gen/js/tests/optional.v
@@ -1,7 +1,7 @@
module main
fn main() {
- try_propagation() or { println('captured: $err') }
+ try_propagation() or { println('captured: ${err}') }
}
fn try_propagation() ? {
@@ -11,7 +11,7 @@ fn try_propagation() ? {
fn try_numbers() ? {
for x in 1 .. 10 {
y := error_if_even(x) or { x + 1 }
- println('$x rounded to $y')
+ println('${x} rounded to ${y}')
error_if_prime(y)?
}
}
@@ -26,7 +26,7 @@ fn error_if_even(num int) ?int {
fn error_if_prime(num int) ?int {
for i in 2 .. num {
if num % i == 0 {
- return error('$num is prime')
+ return error('${num} is prime')
}
}
return num
diff --git a/vlib/v/gen/js/tests/simple_sourcemap.v b/vlib/v/gen/js/tests/simple_sourcemap.v
index 0dd8a64216..1b1125f11b 100644
--- a/vlib/v/gen/js/tests/simple_sourcemap.v
+++ b/vlib/v/gen/js/tests/simple_sourcemap.v
@@ -12,7 +12,7 @@ fn main() {
println('source map is working')
}
} else {
- println('skiping test! node version >=12.12.0 required. Current Version is $node_version')
+ println('skiping test! node version >=12.12.0 required. Current Version is ${node_version}')
}
}
diff --git a/vlib/v/gen/js/tests/testdata/match.v b/vlib/v/gen/js/tests/testdata/match.v
index 00d3abd920..a43419fcc5 100644
--- a/vlib/v/gen/js/tests/testdata/match.v
+++ b/vlib/v/gen/js/tests/testdata/match.v
@@ -14,10 +14,10 @@ type Vec = Vec2d | Vec3d
fn match_vec(v Vec) {
match v {
Vec2d {
- println('Vec2d($v.x,$v.y)')
+ println('Vec2d(${v.x},${v.y})')
}
Vec3d {
- println('Vec2d($v.x,$v.y,$v.z)')
+ println('Vec2d(${v.x},${v.y},${v.z})')
}
}
}
diff --git a/vlib/v/gen/native/amd64.v b/vlib/v/gen/native/amd64.v
index b4f78924ba..d92634f0e7 100644
--- a/vlib/v/gen/native/amd64.v
+++ b/vlib/v/gen/native/amd64.v
@@ -70,9 +70,9 @@ fn (mut g Gen) dec(reg Register) {
.rsi { g.write8(0xce) }
.rdi { g.write8(0xcf) }
.r12 { g.write8(0xc4) }
- else { panic('unhandled inc $reg') }
+ else { panic('unhandled inc ${reg}') }
}
- g.println('dec $reg')
+ g.println('dec ${reg}')
}
[inline]
@@ -84,7 +84,7 @@ fn (mut g Gen) inc(reg Register) {
g.write8(0x48)
g.write8(0xff)
g.write8(0xc0 + int(reg))
- g.println('inc $reg')
+ g.println('inc ${reg}')
}
fn (mut g Gen) neg(reg Register) {
@@ -92,9 +92,9 @@ fn (mut g Gen) neg(reg Register) {
g.write8(0xf7)
match reg {
.rax { g.write8(0xd8) }
- else { panic('unhandled neg $reg') }
+ else { panic('unhandled neg ${reg}') }
}
- g.println('neg $reg')
+ g.println('neg ${reg}')
}
fn (mut g Gen) cmp(reg Register, size Size, val i64) {
@@ -134,7 +134,7 @@ fn (mut g Gen) cmp(reg Register, size Size, val i64) {
panic('unhandled cmp')
}
}
- g.println('cmp $reg, $val')
+ g.println('cmp ${reg}, ${val}')
}
// `cmp rax, rbx`
@@ -149,7 +149,7 @@ fn (mut g Gen) cmp_reg(reg Register, reg2 Register) {
g.write([u8(0x48), 0x39, 0xd8])
}
else {
- g.n_error('Cannot compare $reg and $reg2')
+ g.n_error('Cannot compare ${reg} and ${reg2}')
}
}
}
@@ -159,7 +159,7 @@ fn (mut g Gen) cmp_reg(reg Register, reg2 Register) {
g.write([u8(0x48), 0x39, 0xc2])
}
else {
- g.n_error('Cannot compare $reg and $reg2')
+ g.n_error('Cannot compare ${reg} and ${reg2}')
}
}
}
@@ -169,7 +169,7 @@ fn (mut g Gen) cmp_reg(reg Register, reg2 Register) {
g.write([u8(0x48), 0x39, 0xc3])
}
else {
- g.n_error('Cannot compare $reg and $reg2')
+ g.n_error('Cannot compare ${reg} and ${reg2}')
}
}
}
@@ -179,15 +179,15 @@ fn (mut g Gen) cmp_reg(reg Register, reg2 Register) {
g.write([u8(0x48), 0x39, 0xf7])
}
else {
- g.n_error('Cannot compare $reg and $reg2')
+ g.n_error('Cannot compare ${reg} and ${reg2}')
}
}
}
else {
- g.n_error('Cannot compare $reg and $reg2')
+ g.n_error('Cannot compare ${reg} and ${reg2}')
}
}
- g.println('cmp $reg, $reg2')
+ g.println('cmp ${reg}, ${reg2}')
}
// cmp $reg, 0
@@ -203,12 +203,12 @@ fn (mut g Gen) cmp_zero(reg Register) {
g.write8(0xf8)
}
else {
- g.n_error('unhandled cmp $reg, 0')
+ g.n_error('unhandled cmp ${reg}, 0')
}
}
g.write8(0x00)
- g.println('cmp $reg, 0')
+ g.println('cmp ${reg}, 0')
}
fn (mut g Gen) cmp_var_reg(var Var, reg Register, config VarConfig) {
@@ -240,7 +240,7 @@ fn (mut g Gen) cmp_var_reg(var Var, reg Register, config VarConfig) {
} else {
g.write8((0xff - offset + 1) % 0x100)
}
- g.println('cmp var `$var.name`, $reg')
+ g.println('cmp var `${var.name}`, ${reg}')
}
GlobalVar {
// TODO
@@ -277,7 +277,7 @@ fn (mut g Gen) cmp_var(var Var, val int, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
g.write32(val)
- g.println('cmp var `$var.name` $val')
+ g.println('cmp var `${var.name}` ${val}')
}
GlobalVar {
// TODO
@@ -315,7 +315,7 @@ fn (mut g Gen) dec_var(var Var, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
g.write32(1)
- g.println('dec_var `$var.name`')
+ g.println('dec_var `${var.name}`')
}
GlobalVar {
// TODO
@@ -353,7 +353,7 @@ fn (mut g Gen) inc_var(var Var, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
g.write32(1)
- g.println('inc_var `$var.name`')
+ g.println('inc_var `${var.name}`')
}
GlobalVar {
// TODO
@@ -376,7 +376,7 @@ fn (mut g Gen) cjmp(op JumpOp) int {
g.write16(u16(op))
pos := g.pos()
g.write32(placeholder)
- g.println('$op')
+ g.println('${op}')
return int(pos)
}
@@ -408,7 +408,7 @@ enum SetOp {
fn (mut g Gen) cset(op SetOp) {
g.write16(u16(op))
g.write8(0xc0)
- g.println('set$op al')
+ g.println('set${op} al')
}
fn abs(a i64) i64 {
@@ -455,11 +455,11 @@ fn (mut g Gen) mov32(reg Register, val int) {
g.write8(0xb9)
}
else {
- panic('unhandled mov32 $reg')
+ panic('unhandled mov32 ${reg}')
}
}
g.write32(val)
- g.println('mov32 $reg, $val')
+ g.println('mov32 ${reg}, ${val}')
}
fn (mut g Gen) mov64(reg Register, val i64) {
@@ -482,7 +482,7 @@ fn (mut g Gen) mov64(reg Register, val i64) {
g.write8(0xc7)
g.write8(0xc2)
g.write32(int(val))
- g.println('mov32 $reg, $val')
+ g.println('mov32 ${reg}, ${val}')
return
}
.rbx {
@@ -502,18 +502,18 @@ fn (mut g Gen) mov64(reg Register, val i64) {
g.write8(0xbf)
}
else {
- eprintln('unhandled mov64 $reg')
+ eprintln('unhandled mov64 ${reg}')
}
}
g.write64(val)
- g.println('mov64 $reg, $val')
+ g.println('mov64 ${reg}, ${val}')
}
fn (mut g Gen) movabs(reg Register, val i64) {
g.write8(0x48 + int(reg) / 8)
g.write8(0xb8 + int(reg) % 8)
g.write64(val)
- g.println('movabs $reg, $val')
+ g.println('movabs ${reg}, ${val}')
}
fn (mut g Gen) mov_deref(reg Register, regptr Register, typ ast.Type) {
@@ -543,7 +543,7 @@ fn (mut g Gen) mov_deref(reg Register, regptr Register, typ ast.Type) {
})
}
g.write8(int(reg) % 8 * 8 + int(regptr) % 8)
- g.println('mov $reg, [$regptr]')
+ g.println('mov ${reg}, [${regptr}]')
}
fn (mut g Gen) mov_store(regptr Register, reg Register, size Size) {
@@ -555,7 +555,7 @@ fn (mut g Gen) mov_store(regptr Register, reg Register, size Size) {
}
g.write8(if size == ._8 { 0x88 } else { 0x89 })
g.write8(int(reg) % 8 * 8 + int(regptr) % 8)
- g.println('mov [$regptr], $reg')
+ g.println('mov [${regptr}], ${reg}')
}
fn (mut g Gen) mov_reg_to_var(var Var, reg Register, config VarConfig) {
@@ -637,14 +637,14 @@ fn (mut g Gen) mov_reg_to_var(var Var, reg Register, config VarConfig) {
.rsi { g.write8(0x75 + far_var_offset) }
.rdx { g.write8(0x55 + far_var_offset) }
.rcx, .r9 { g.write8(0x4d + far_var_offset) }
- else { g.n_error('mov_from_reg $reg') }
+ else { g.n_error('mov_from_reg ${reg}') }
}
if is_far_var {
g.write32(int((0xffffffff - i64(offset) + 1) % 0x100000000))
} else {
g.write8((0xff - offset + 1) % 0x100)
}
- g.println('mov $size_str PTR [rbp-$offset.hex2()],$reg')
+ g.println('mov ${size_str} PTR [rbp-${offset.hex2()}],${reg}')
}
GlobalVar {
// TODO
@@ -683,7 +683,7 @@ fn (mut g Gen) mov_int_to_var(var Var, integer int, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
g.write8(u8(integer))
- g.println('mov BYTE PTR[rbp-$offset.hex2()], $integer')
+ g.println('mov BYTE PTR[rbp-${offset.hex2()}], ${integer}')
}
ast.i16_type_idx, ast.u16_type_idx {
g.write16(0xc766)
@@ -694,7 +694,7 @@ fn (mut g Gen) mov_int_to_var(var Var, integer int, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
g.write16(u16(integer))
- g.println('mov WORD PTR[rbp-$offset.hex2()], $integer')
+ g.println('mov WORD PTR[rbp-${offset.hex2()}], ${integer}')
}
ast.int_type_idx, ast.u32_type_idx, ast.rune_type_idx {
g.write8(0xc7)
@@ -705,7 +705,7 @@ fn (mut g Gen) mov_int_to_var(var Var, integer int, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
g.write32(integer)
- g.println('mov DWORD PTR[rbp-$offset.hex2()], $integer')
+ g.println('mov DWORD PTR[rbp-${offset.hex2()}], ${integer}')
}
ast.i64_type_idx, ast.u64_type_idx, ast.isize_type_idx, ast.usize_type_idx,
ast.int_literal_type_idx {
@@ -718,10 +718,10 @@ fn (mut g Gen) mov_int_to_var(var Var, integer int, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
g.write32(integer)
- g.println('mov QWORD PTR[rbp-$offset.hex2()], $integer')
+ g.println('mov QWORD PTR[rbp-${offset.hex2()}], ${integer}')
}
else {
- g.n_error('unhandled mov int type: $typ')
+ g.n_error('unhandled mov int type: ${typ}')
}
}
}
@@ -748,14 +748,14 @@ fn (mut g Gen) lea_var_to_reg(reg Register, var_offset int) {
.rdx { g.write8(0x55 + far_var_offset) }
.rbx { g.write8(0x5d + far_var_offset) }
.rcx { g.write8(0x4d + far_var_offset) }
- else { g.n_error('lea_var_to_reg $reg') }
+ else { g.n_error('lea_var_to_reg ${reg}') }
}
if is_far_var {
g.write32(int((0xffffffff - i64(var_offset) + 1) % 0x100000000))
} else {
g.write8((0xff - var_offset + 1) % 0x100)
}
- g.println('lea $reg, [rbp-$var_offset.hex2()]')
+ g.println('lea ${reg}, [rbp-${var_offset.hex2()}]')
}
fn (mut g Gen) mov_var_to_reg(reg Register, var Var, config VarConfig) {
@@ -826,14 +826,14 @@ fn (mut g Gen) mov_var_to_reg(reg Register, var Var, config VarConfig) {
.rdx { g.write8(0x55 + far_var_offset) }
.rbx { g.write8(0x5d + far_var_offset) }
.rcx { g.write8(0x4d + far_var_offset) }
- else { g.n_error('mov_var_to_reg $reg') }
+ else { g.n_error('mov_var_to_reg ${reg}') }
}
if is_far_var {
g.write32(int((0xffffffff - i64(offset) + 1) % 0x100000000))
} else {
g.write8((0xff - offset + 1) % 0x100)
}
- g.println('$instruction $reg, $size_str PTR [rbp-$offset.hex2()]')
+ g.println('${instruction} ${reg}, ${size_str} PTR [rbp-${offset.hex2()}]')
}
GlobalVar {
// TODO
@@ -866,7 +866,7 @@ fn (mut g Gen) mov_extend_reg(a Register, b Register, typ ast.Type) {
}
g.write8(0xc0 + int(a) % 8 * 8 + int(b) % 8)
instruction := if is_signed { 's' } else { 'z' }
- g.println('mov${instruction}x $a, $b')
+ g.println('mov${instruction}x ${a}, ${b}')
}
}
@@ -890,7 +890,7 @@ fn (mut g Gen) call(addr int) i64 {
g.write8(0xe8)
g.write32(int(rel))
- g.println('call $addr')
+ g.println('call ${addr}')
return c_addr
}
@@ -904,7 +904,7 @@ fn (mut g Gen) extern_call(addr int) {
g.println('call *@GOTPCREL(%rip)')
}
else {
- g.n_error('extern calls are not implemented for $g.pref.os')
+ g.n_error('extern calls are not implemented for ${g.pref.os}')
}
}
}
@@ -942,7 +942,7 @@ pub fn (mut g Gen) push(reg Register) {
if mut g.code_gen is Amd64 {
g.code_gen.is_16bit_aligned = !g.code_gen.is_16bit_aligned
}
- g.println('push $reg')
+ g.println('push ${reg}')
}
pub fn (mut g Gen) pop(reg Register) {
@@ -953,7 +953,7 @@ pub fn (mut g Gen) pop(reg Register) {
if mut g.code_gen is Amd64 {
g.code_gen.is_16bit_aligned = !g.code_gen.is_16bit_aligned
}
- g.println('pop $reg')
+ g.println('pop ${reg}')
}
pub fn (mut g Gen) sub8(reg Register, val int) {
@@ -961,7 +961,7 @@ pub fn (mut g Gen) sub8(reg Register, val int) {
g.write8(0x83)
g.write8(0xe8 + int(reg)) // TODO rax is different?
g.write8(val)
- g.println('sub8 $reg,$val.hex2()')
+ g.println('sub8 ${reg},${val.hex2()}')
}
pub fn (mut g Gen) sub(reg Register, val int) {
@@ -973,7 +973,7 @@ pub fn (mut g Gen) sub(reg Register, val int) {
g.write8(0xe8 + int(reg))
}
g.write32(val)
- g.println('sub $reg,$val.hex2()')
+ g.println('sub ${reg},${val.hex2()}')
}
pub fn (mut g Gen) add(reg Register, val int) {
@@ -985,7 +985,7 @@ pub fn (mut g Gen) add(reg Register, val int) {
g.write8(0xc0 + int(reg))
}
g.write32(val)
- g.println('add $reg,$val.hex2()')
+ g.println('add ${reg},${val.hex2()}')
}
pub fn (mut g Gen) add8(reg Register, val int) {
@@ -993,7 +993,7 @@ pub fn (mut g Gen) add8(reg Register, val int) {
g.write8(0x83)
g.write8(0xc0 + int(reg))
g.write8(val)
- g.println('add8 $reg,$val.hex2()')
+ g.println('add8 ${reg},${val.hex2()}')
}
[deprecated: 'use add_reg']
@@ -1005,7 +1005,7 @@ fn (mut g Gen) add8_var(reg Register, var_offset int) {
else { g.n_error('add8_var') }
}
g.write8(0xff - var_offset + 1)
- g.println('add8 $reg,DWORD PTR[rbp-$var_offset.hex2()]')
+ g.println('add8 ${reg},DWORD PTR[rbp-${var_offset.hex2()}]')
}
[deprecated: 'use sub_reg']
@@ -1017,7 +1017,7 @@ fn (mut g Gen) sub8_var(reg Register, var_offset int) {
else { g.n_error('sub8_var') }
}
g.write8(0xff - var_offset + 1)
- g.println('sub8 $reg,DWORD PTR[rbp-$var_offset.hex2()]')
+ g.println('sub8 ${reg},DWORD PTR[rbp-${var_offset.hex2()}]')
}
[deprecated: 'use div_reg']
@@ -1042,7 +1042,7 @@ fn (mut g Gen) mul8_var(reg Register, var_offset int) {
else { g.n_error('mul8_var') }
}
g.write8(0xff - var_offset + 1)
- g.println('mul8 $reg,DWORD PTR[rbp-$var_offset.hex2()]')
+ g.println('mul8 ${reg},DWORD PTR[rbp-${var_offset.hex2()}]')
}
fn (mut g Gen) bitand_reg(a Register, b Register) {
@@ -1050,7 +1050,7 @@ fn (mut g Gen) bitand_reg(a Register, b Register) {
if int(b) >= int(Register.r8) { 4 } else { 0 })
g.write8(0x21)
g.write8(0xc0 + int(a) % 8 + int(b) % 8 * 8)
- g.println('and $a, $b')
+ g.println('and ${a}, ${b}')
}
fn (mut g Gen) bitor_reg(a Register, b Register) {
@@ -1058,7 +1058,7 @@ fn (mut g Gen) bitor_reg(a Register, b Register) {
if int(b) >= int(Register.r8) { 4 } else { 0 })
g.write8(0x09)
g.write8(0xc0 + int(a) % 8 + int(b) % 8 * 8)
- g.println('or $a, $b')
+ g.println('or ${a}, ${b}')
}
fn (mut g Gen) bitxor_reg(a Register, b Register) {
@@ -1066,14 +1066,14 @@ fn (mut g Gen) bitxor_reg(a Register, b Register) {
if int(b) >= int(Register.r8) { 4 } else { 0 })
g.write8(0x31)
g.write8(0xc0 + int(a) % 8 + int(b) % 8 * 8)
- g.println('xor $a, $b')
+ g.println('xor ${a}, ${b}')
}
fn (mut g Gen) bitnot_reg(a Register) {
g.write8(0x48 + if int(a) >= int(Register.r8) { 1 } else { 0 })
g.write8(0xf7)
g.write8(0xd0 + int(a) % 8)
- g.println('not $a')
+ g.println('not ${a}')
}
fn (mut g Gen) shl_reg(a Register, b Register) {
@@ -1083,7 +1083,7 @@ fn (mut g Gen) shl_reg(a Register, b Register) {
g.write8(if int(a) >= int(Register.r8) { 0x49 } else { 0x48 })
g.write8(0xd3)
g.write8(0xe0 + int(a) % 8)
- g.println('shl $a, $b')
+ g.println('shl ${a}, ${b}')
}
fn (mut g Gen) sar_reg(a Register, b Register) {
@@ -1093,7 +1093,7 @@ fn (mut g Gen) sar_reg(a Register, b Register) {
g.write8(if int(a) > 7 { 0x49 } else { 0x48 })
g.write8(0xd3)
g.write8(0xf8 + int(a) % 8)
- g.println('sar $a, $b')
+ g.println('sar ${a}, ${b}')
}
fn (mut g Gen) shr_reg(a Register, b Register) {
@@ -1103,7 +1103,7 @@ fn (mut g Gen) shr_reg(a Register, b Register) {
g.write8(if int(a) > 7 { 0x49 } else { 0x48 })
g.write8(0xd3)
g.write8(0xe8 + int(a) % 8)
- g.println('shr $a, $b')
+ g.println('shr ${a}, ${b}')
}
fn (mut g Gen) leave() {
@@ -1112,7 +1112,7 @@ fn (mut g Gen) leave() {
// save return value
g.push(.rax)
for defer_stmt in g.defer_stmts.reverse() {
- name := '_defer$defer_stmt.idx_in_fn'
+ name := '_defer${defer_stmt.idx_in_fn}'
defer_var := g.get_var_offset(name)
g.mov_var_to_reg(.rax, LocalVar{defer_var, ast.i64_type_idx, name})
g.cmp_zero(.rax)
@@ -1208,7 +1208,7 @@ pub fn (mut g Gen) test_reg(r Register) {
if int(r) >= int(Register.r8) { 4 } else { 0 })
g.write8(0x85)
g.write8(0xc0 + int(r) % 8 + int(r) % 8 * 8)
- g.println('test $r, $r')
+ g.println('test ${r}, ${r}')
}
// return length in .rax of string pointed by given register
@@ -1220,7 +1220,7 @@ pub fn (mut g Gen) inline_strlen(r Register) {
g.xor(.rcx, -1)
g.dec(.rcx)
g.mov_reg(.rax, .rcx)
- g.println('strlen rax, $r')
+ g.println('strlen rax, ${r}')
}
// TODO: strlen of string at runtime
@@ -1328,7 +1328,7 @@ pub fn (mut g Gen) gen_amd64_exit(expr ast.Expr) {
match expr {
ast.CallExpr {
right := expr.return_type
- g.n_error('native exit builtin: Unsupported call $right')
+ g.n_error('native exit builtin: Unsupported call ${right}')
}
ast.Ident {
g.mov_var_to_reg(.edi, expr as ast.Ident)
@@ -1393,7 +1393,7 @@ fn (mut g Gen) learel(reg Register, val int) {
}
}
g.write32(val)
- g.println('lea $reg, rip + $val')
+ g.println('lea ${reg}, rip + ${val}')
}
fn (mut g Gen) lea(reg Register, val int) {
@@ -1401,7 +1401,7 @@ fn (mut g Gen) lea(reg Register, val int) {
g.write8(0x8d)
g.write8(0x15)
g.write32(val)
- g.println('lea $reg, $val')
+ g.println('lea ${reg}, ${val}')
}
fn (mut g Gen) mov(reg Register, val int) {
@@ -1412,7 +1412,7 @@ fn (mut g Gen) mov(reg Register, val int) {
g.write8(0xc7)
g.write8(0xc0)
g.write32(-1)
- g.println('mov $reg, $val')
+ g.println('mov ${reg}, ${val}')
}
.rcx {
if val == -1 {
@@ -1424,13 +1424,13 @@ fn (mut g Gen) mov(reg Register, val int) {
g.write8(0xff)
g.write8(0xff) // mov rcx 0xffff5
}
- g.println('mov $reg, $val')
+ g.println('mov ${reg}, ${val}')
}
else {
- g.n_error('unhandled mov $reg, -1')
+ g.n_error('unhandled mov ${reg}, -1')
}
}
- g.println('mov $reg, $val')
+ g.println('mov ${reg}, ${val}')
return
}
if val == 0 {
@@ -1469,10 +1469,10 @@ fn (mut g Gen) mov(reg Register, val int) {
g.write8(0xe4)
}
else {
- g.n_error('unhandled mov $reg, $reg')
+ g.n_error('unhandled mov ${reg}, ${reg}')
}
}
- g.println('xor $reg, $reg')
+ g.println('xor ${reg}, ${reg}')
} else {
match reg {
.eax, .rax {
@@ -1508,11 +1508,11 @@ fn (mut g Gen) mov(reg Register, val int) {
g.write8(0xbb)
}
else {
- g.n_error('unhandled mov $reg')
+ g.n_error('unhandled mov ${reg}')
}
}
g.write32(val)
- g.println('mov $reg, $val')
+ g.println('mov ${reg}, ${val}')
}
}
@@ -1532,10 +1532,10 @@ fn (mut g Gen) mul_reg(a Register, b Register) {
g.write8(0xeb)
}
else {
- panic('unhandled div $a')
+ panic('unhandled div ${a}')
}
}
- g.println('mul $a')
+ g.println('mul ${a}')
}
fn (mut g Gen) imul_reg(r Register) {
@@ -1544,10 +1544,10 @@ fn (mut g Gen) imul_reg(r Register) {
g.write8(0x48)
g.write8(0xf7)
g.write8(0xee)
- g.println('imul $r')
+ g.println('imul ${r}')
}
else {
- panic('unhandled imul $r')
+ panic('unhandled imul ${r}')
}
}
}
@@ -1569,10 +1569,10 @@ fn (mut g Gen) div_reg(a Register, b Register) {
g.write8(0xfb) // idiv ebx
}
else {
- panic('unhandled div $a')
+ panic('unhandled div ${a}')
}
}
- g.println('div $a')
+ g.println('div ${a}')
}
fn (mut g Gen) mod_reg(a Register, b Register) {
@@ -1587,9 +1587,9 @@ fn (mut g Gen) sub_reg(a Register, b Register) {
g.write8(0x29)
g.write8(0xc0 + int(a) % 8 + int(b) % 8 * 8)
} else {
- g.n_error('unhandled sub $a, $b')
+ g.n_error('unhandled sub ${a}, ${b}')
}
- g.println('sub $a, $b')
+ g.println('sub ${a}, ${b}')
}
fn (mut g Gen) add_reg(a Register, b Register) {
@@ -1599,9 +1599,9 @@ fn (mut g Gen) add_reg(a Register, b Register) {
g.write8(0x01)
g.write8(0xc0 + int(a) % 8 + int(b) % 8 * 8)
} else {
- g.n_error('unhandled add $a, $b')
+ g.n_error('unhandled add ${a}, ${b}')
}
- g.println('add $a, $b')
+ g.println('add ${a}, ${b}')
}
fn (mut g Gen) mov_reg(a Register, b Register) {
@@ -1611,9 +1611,9 @@ fn (mut g Gen) mov_reg(a Register, b Register) {
g.write8(0x89)
g.write8(0xc0 + int(a) % 8 + int(b) % 8 * 8)
} else {
- g.n_error('unhandled mov_reg combination for $a $b')
+ g.n_error('unhandled mov_reg combination for ${a} ${b}')
}
- g.println('mov $a, $b')
+ g.println('mov ${a}, ${b}')
}
fn (mut g Gen) sar8(r Register, val u8) {
@@ -1628,21 +1628,21 @@ fn (mut g Gen) sar8(r Register, val u8) {
g.write8(0xfa)
}
else {
- panic('unhandled sar $r, $val')
+ panic('unhandled sar ${r}, ${val}')
}
}
g.write8(val)
- g.println('sar $r, $val')
+ g.println('sar ${r}, ${val}')
}
pub fn (mut g Gen) call_fn_amd64(node ast.CallExpr) {
name := node.name
mut n := name
if !n.contains('.') {
- n = 'main.$n'
+ n = 'main.${n}'
}
if node.is_method {
- n = '${g.table.get_type_name(node.receiver_type)}.$node.name'
+ n = '${g.table.get_type_name(node.receiver_type)}.${node.name}'
}
addr := g.fn_addr[n]
@@ -1848,7 +1848,7 @@ pub fn (mut g Gen) call_fn_amd64(node ast.CallExpr) {
fn (mut g Gen) call_builtin_amd64(name string) i64 {
call_addr := g.call(0)
- g.println('call builtin `$name`')
+ g.println('call builtin `${name}`')
return call_addr
}
@@ -1856,7 +1856,7 @@ fn (mut g Gen) patch_calls() {
for c in g.callpatches {
addr := g.fn_addr[c.name]
if addr == 0 {
- g.n_error('fn addr of `$c.name` = 0')
+ g.n_error('fn addr of `${c.name}` = 0')
return
}
last := g.buf.len
@@ -1934,7 +1934,7 @@ fn (mut g Gen) assign_right_expr(node ast.AssignStmt, i int, right ast.Expr, nam
else {
tn := node.left[i].type_name()
dump(node.left_types)
- g.n_error('unhandled assign type: $tn')
+ g.n_error('unhandled assign type: ${tn}')
}
}
}
@@ -2157,7 +2157,7 @@ fn (mut g Gen) assign_right_expr(node ast.AssignStmt, i int, right ast.Expr, nam
g.init_struct(ident, right)
}
else {
- g.n_error('Unexpected operator `$node.op`')
+ g.n_error('Unexpected operator `${node.op}`')
}
}
}
@@ -2190,7 +2190,7 @@ fn (mut g Gen) assign_right_expr(node ast.AssignStmt, i int, right ast.Expr, nam
// a := arr[0]
offset := g.allocate_var(name, g.get_sizeof_ident(ident), 0)
if g.pref.is_verbose {
- println('infix assignment $name offset=$offset.hex2()')
+ println('infix assignment ${name} offset=${offset.hex2()}')
}
ie := right as ast.IndexExpr
var := ie.left as ast.Ident
@@ -2253,7 +2253,7 @@ fn (mut g Gen) assign_right_expr(node ast.AssignStmt, i int, right ast.Expr, nam
// dump(node)
size := g.get_type_size(node.left_types[i])
if size !in [1, 2, 4, 8] || node.op !in [.assign, .decl_assign] {
- g.v_error('unhandled assign_stmt expression: $right.type_name()', right.pos())
+ g.v_error('unhandled assign_stmt expression: ${right.type_name()}', right.pos())
}
if node.op == .decl_assign {
g.allocate_var(name, size, 0)
@@ -2481,7 +2481,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
g.write32(0xc1c20ff3)
g.write8(if node.op == .eq { 0x00 } else { 0x04 })
inst := if node.op == .eq { 'cmpeqss' } else { 'cmpneqss' }
- g.println('$inst xmm0, xmm1')
+ g.println('${inst} xmm0, xmm1')
g.write32(0xc07e0f66)
g.println('movd eax, xmm0')
g.write([u8(0x83), 0xe0, 0x01])
@@ -2511,7 +2511,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
g.div_sse(.xmm0, .xmm1, typ)
}
else {
- g.n_error('`$node.op` expression is not supported right now')
+ g.n_error('`${node.op}` expression is not supported right now')
}
}
return
@@ -2536,7 +2536,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
}
if node.left_type !in ast.integer_type_idxs && node.left_type != ast.bool_type_idx
&& g.table.sym(node.left_type).info !is ast.Enum {
- g.n_error('unsupported type for `$node.op`: $node.left_type')
+ g.n_error('unsupported type for `${node.op}`: ${node.left_type}')
}
// left: rax, right: rdx
match node.op {
@@ -2604,7 +2604,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
g.shr_reg(.rax, .rcx)
}
else {
- g.n_error('`$node.op` expression is not supported right now')
+ g.n_error('`${node.op}` expression is not supported right now')
}
}
}
@@ -2675,7 +2675,7 @@ fn (mut g Gen) gen_asm_stmt_amd64(asm_node ast.AsmStmt) {
}
}
}
- g.println(': $line')
+ g.println(': ${line}')
match t.name {
'nop' {
g.write8(u8(0x90))
@@ -2719,10 +2719,10 @@ fn (mut g Gen) gen_asm_stmt_amd64(asm_node ast.AsmStmt) {
g.write8(byt(imm, 1))
g.write8(byt(imm, 2))
g.write8(byt(imm, 3))
- g.println('mov $reg, $imm')
+ g.println('mov ${reg}, ${imm}')
}
else {
- g.v_error('unsupported instruction $t.name', asm_node.pos)
+ g.v_error('unsupported instruction ${t.name}', asm_node.pos)
}
}
}
@@ -2737,11 +2737,11 @@ fn (mut g Gen) gen_assert(assert_node ast.AssertStmt) {
id: label
pos: cjmp_addr
}
- g.println('; jump to label $label')
+ g.println('; jump to label ${label}')
g.expr(assert_node.expr)
g.trap()
g.labels.addrs[label] = g.pos()
- g.println('; label $label')
+ g.println('; label ${label}')
}
fn (mut g Gen) cjmp_notop(op token.Kind) int {
@@ -2823,7 +2823,7 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
id: label
pos: cjmp_addr
}
- g.println('; jump to label $label')
+ g.println('; jump to label ${label}')
g.stmts(branch.stmts)
if has_endif {
jump_addr := g.jmp(0)
@@ -2831,16 +2831,16 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
id: endif_label
pos: jump_addr
}
- g.println('; jump to label $endif_label')
+ g.println('; jump to label ${endif_label}')
}
// println('after if g.pos=$g.pos() jneaddr=$cjmp_addr')
g.labels.addrs[label] = g.pos()
- g.println('; label $label')
+ g.println('; label ${label}')
}
}
if has_endif {
g.labels.addrs[endif_label] = g.pos()
- g.println('; label $endif_label')
+ g.println('; label ${endif_label}')
}
}
@@ -2864,7 +2864,7 @@ fn (mut g Gen) for_stmt(node ast.ForStmt) {
start := g.pos()
start_label := g.labels.new_label()
g.labels.addrs[start_label] = start
- g.println('; label $start_label')
+ g.println('; label ${start_label}')
end_label := g.labels.new_label()
g.labels.branches << BranchLabel{
name: node.label
@@ -2876,7 +2876,7 @@ fn (mut g Gen) for_stmt(node ast.ForStmt) {
g.jmp(int(0xffffffff - (g.pos() + 5 - start) + 1))
g.println('jmp after infinite for')
g.labels.addrs[end_label] = g.pos()
- g.println('; label $end_label')
+ g.println('; label ${end_label}')
return
}
infix_expr := node.cond as ast.InfixExpr
@@ -2884,7 +2884,7 @@ fn (mut g Gen) for_stmt(node ast.ForStmt) {
start := g.pos()
start_label := g.labels.new_label()
g.labels.addrs[start_label] = start
- g.println('; label $start_label')
+ g.println('; label ${start_label}')
match infix_expr.left {
ast.Ident {
match infix_expr.right {
@@ -2921,7 +2921,7 @@ fn (mut g Gen) for_stmt(node ast.ForStmt) {
id: end_label
pos: jump_addr
}
- g.println('; jump to label $end_label')
+ g.println('; jump to label ${end_label}')
g.labels.branches << BranchLabel{
name: node.label
start: start_label
@@ -2934,7 +2934,7 @@ fn (mut g Gen) for_stmt(node ast.ForStmt) {
g.jmp(int(0xffffffff - (g.pos() + 5 - start) + 1))
// Update the jump addr to current pos
g.labels.addrs[end_label] = g.pos()
- g.println('; label $end_label')
+ g.println('; label ${end_label}')
g.println('jmp after for')
}
@@ -3024,7 +3024,7 @@ fn (mut g Gen) fn_decl_amd64(node ast.FnDecl) {
}
// define defer vars
for i in 0 .. node.defer_stmts.len {
- name := '_defer$i'
+ name := '_defer${i}'
g.allocate_var(name, 8, 0)
}
// body
@@ -3033,7 +3033,7 @@ fn (mut g Gen) fn_decl_amd64(node ast.FnDecl) {
g.stack_var_pos += 23
g.stack_var_pos /= 16
g.stack_var_pos *= 16
- g.println('; stack frame size: $g.stack_var_pos')
+ g.println('; stack frame size: ${g.stack_var_pos}')
g.write32_at(local_alloc_pos + 3, g.stack_var_pos)
is_main := node.name == 'main.main'
if is_main && g.pref.os != .linux {
@@ -3058,7 +3058,7 @@ pub fn (mut g Gen) builtin_decl_amd64(builtin BuiltinFn) {
g.stack_var_pos += 7
g.stack_var_pos /= 16
g.stack_var_pos *= 16
- g.println('; stack frame size: $g.stack_var_pos')
+ g.println('; stack frame size: ${g.stack_var_pos}')
g.write32_at(local_alloc_pos + 3, g.stack_var_pos)
g.labels.addrs[0] = g.pos()
@@ -3105,7 +3105,7 @@ pub fn (mut g Gen) allocate_var(name string, size int, initial_val int) int {
g.write8(0x45 + far_var_offset)
}
else {
- g.n_error('allocate_var: bad size $size')
+ g.n_error('allocate_var: bad size ${size}')
}
}
// Generate N in `[rbp-N]`
@@ -3130,12 +3130,12 @@ pub fn (mut g Gen) allocate_var(name string, size int, initial_val int) int {
g.write32(initial_val) // fixme: 64-bit segfaulting
}
else {
- g.n_error('allocate_var: bad size $size')
+ g.n_error('allocate_var: bad size ${size}')
}
}
// println('allocate_var(size=$size, initial_val=$initial_val)')
- g.println('mov [rbp-$n.hex2()], $initial_val ; Allocate var `$name`')
+ g.println('mov [rbp-${n.hex2()}], ${initial_val} ; Allocate var `${name}`')
return g.stack_var_pos
}
@@ -3217,7 +3217,7 @@ fn (mut g Gen) init_struct(var Var, init ast.StructInit) {
}
for f in init.fields {
field := ts.find_field(f.name) or {
- g.n_error('Could not find field `$f.name` on init')
+ g.n_error('Could not find field `${f.name}` on init')
}
offset := g.structs[var.typ.idx()].offsets[field.i]
@@ -3240,7 +3240,7 @@ fn (mut g Gen) convert_bool_to_string(reg Register) {
id: false_label
pos: false_cjmp_addr
}
- g.println('; jump to label $false_label')
+ g.println('; jump to label ${false_label}')
g.learel(reg, g.allocate_string('true', 3, .rel32))
@@ -3250,14 +3250,14 @@ fn (mut g Gen) convert_bool_to_string(reg Register) {
id: end_label
pos: end_jmp_addr
}
- g.println('; jump to label $end_label')
+ g.println('; jump to label ${end_label}')
g.labels.addrs[false_label] = g.pos()
- g.println('; label $false_label')
+ g.println('; label ${false_label}')
g.learel(reg, g.allocate_string('false', 3, .rel32))
g.labels.addrs[end_label] = g.pos()
- g.println('; label $end_label')
+ g.println('; label ${end_label}')
}
fn (mut g Gen) convert_int_to_string(r1 Register, r2 Register) {
@@ -3277,7 +3277,7 @@ fn (mut g Gen) convert_int_to_string(r1 Register, r2 Register) {
id: skip_zero_label
pos: skip_zero_cjmp_addr
}
- g.println('; jump to label $skip_zero_label')
+ g.println('; jump to label ${skip_zero_label}')
// handle zeros seperately
// g.mov_int_to_var(LocalVar{buffer, ast.u8_type_idx, ''}, '0'[0])
@@ -3293,10 +3293,10 @@ fn (mut g Gen) convert_int_to_string(r1 Register, r2 Register) {
id: end_label
pos: end_jmp_addr
}
- g.println('; jump to label $end_label')
+ g.println('; jump to label ${end_label}')
g.labels.addrs[skip_zero_label] = g.pos()
- g.println('; label $skip_zero_label')
+ g.println('; label ${skip_zero_label}')
// load a pointer to the string to rdi
// g.lea_var_to_reg(.rdi, buffer)
@@ -3309,7 +3309,7 @@ fn (mut g Gen) convert_int_to_string(r1 Register, r2 Register) {
id: skip_minus_label
pos: skip_minus_cjmp_addr
}
- g.println('; jump to label $skip_minus_label')
+ g.println('; jump to label ${skip_minus_label}')
// add a `-` sign as the first character
g.write8(0xc6)
@@ -3320,13 +3320,13 @@ fn (mut g Gen) convert_int_to_string(r1 Register, r2 Register) {
g.neg(.rax) // negate our integer to make it positive
g.inc(.rdi) // increment rdi to skip the `-` character
g.labels.addrs[skip_minus_label] = g.pos()
- g.println('; label $skip_minus_label')
+ g.println('; label ${skip_minus_label}')
g.mov_reg(.r12, .rdi) // copy the buffer position to r12
loop_label := g.labels.new_label()
loop_start := g.pos()
- g.println('; label $loop_label')
+ g.println('; label ${loop_label}')
g.push(.rax)
@@ -3359,7 +3359,7 @@ fn (mut g Gen) convert_int_to_string(r1 Register, r2 Register) {
id: loop_label
pos: loop_cjmp_addr
}
- g.println('; jump to label $skip_minus_label')
+ g.println('; jump to label ${skip_minus_label}')
g.labels.addrs[loop_label] = loop_start
// after all was converted, reverse the string
@@ -3368,7 +3368,7 @@ fn (mut g Gen) convert_int_to_string(r1 Register, r2 Register) {
g.call_builtin('reverse_string')
g.labels.addrs[end_label] = g.pos()
- g.println('; label $end_label')
+ g.println('; label ${end_label}')
}
fn (mut g Gen) reverse_string(reg Register) {
@@ -3525,7 +3525,7 @@ fn (mut g Gen) mov_ssereg_to_var(var Var, reg SSERegister, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
inst := if typ == ast.f32_type_idx { 'movss' } else { 'movsd' }
- g.println('$inst [rbp-$offset.hex2()], $reg')
+ g.println('${inst} [rbp-${offset.hex2()}], ${reg}')
}
GlobalVar {
// TODO
@@ -3565,7 +3565,7 @@ fn (mut g Gen) mov_var_to_ssereg(reg SSERegister, var Var, config VarConfig) {
g.write8((0xff - offset + 1) % 0x100)
}
inst := if typ == ast.f32_type_idx { 'movss' } else { 'movsd' }
- g.println('$inst $reg, [rbp-$offset.hex2()]')
+ g.println('${inst} ${reg}, [rbp-${offset.hex2()}]')
}
GlobalVar {
// TODO
@@ -3580,7 +3580,7 @@ fn (mut g Gen) mov_ssereg(a SSERegister, b SSERegister) {
}
g.write16(0x100f)
g.write8(0xc0 + int(a) % 8 * 8 + int(b) % 8)
- g.println('movsd $a, $b')
+ g.println('movsd ${a}, ${b}')
}
fn (mut g Gen) add_sse(a SSERegister, b SSERegister, typ ast.Type) {
@@ -3591,7 +3591,7 @@ fn (mut g Gen) add_sse(a SSERegister, b SSERegister, typ ast.Type) {
g.write16(0x580f)
g.write8(0xc0 + int(a) % 8 * 8 + int(b) % 8)
inst := if typ == ast.f32_type_idx { 'addss' } else { 'addsd' }
- g.println('$inst $a, $b')
+ g.println('${inst} ${a}, ${b}')
}
fn (mut g Gen) sub_sse(a SSERegister, b SSERegister, typ ast.Type) {
@@ -3602,7 +3602,7 @@ fn (mut g Gen) sub_sse(a SSERegister, b SSERegister, typ ast.Type) {
g.write16(0x5c0f)
g.write8(0xc0 + int(a) % 8 * 8 + int(b) % 8)
inst := if typ == ast.f32_type_idx { 'subss' } else { 'subsd' }
- g.println('$inst $a, $b')
+ g.println('${inst} ${a}, ${b}')
}
fn (mut g Gen) mul_sse(a SSERegister, b SSERegister, typ ast.Type) {
@@ -3613,7 +3613,7 @@ fn (mut g Gen) mul_sse(a SSERegister, b SSERegister, typ ast.Type) {
g.write16(0x590f)
g.write8(0xc0 + int(a) % 8 * 8 + int(b) % 8)
inst := if typ == ast.f32_type_idx { 'mulss' } else { 'mulsd' }
- g.println('$inst $a, $b')
+ g.println('${inst} ${a}, ${b}')
}
fn (mut g Gen) div_sse(a SSERegister, b SSERegister, typ ast.Type) {
@@ -3624,7 +3624,7 @@ fn (mut g Gen) div_sse(a SSERegister, b SSERegister, typ ast.Type) {
g.write16(0x5e0f)
g.write8(0xc0 + int(a) % 8 * 8 + int(b) % 8)
inst := if typ == ast.f32_type_idx { 'divss' } else { 'divsd' }
- g.println('$inst $a, $b')
+ g.println('${inst} ${a}, ${b}')
}
fn (mut g Gen) cmp_sse(a SSERegister, b SSERegister, typ ast.Type) {
@@ -3637,7 +3637,7 @@ fn (mut g Gen) cmp_sse(a SSERegister, b SSERegister, typ ast.Type) {
g.write16(0x2e0f)
g.write8(0xc0 + int(a) % 8 * 8 + int(b) % 8)
inst := if typ == ast.f32_type_idx { 'ucomiss' } else { 'ucomisd' }
- g.println('$inst $a, $b')
+ g.println('${inst} ${a}, ${b}')
}
pub fn (mut g Gen) push_sse(reg SSERegister) {
@@ -3650,11 +3650,11 @@ pub fn (mut g Gen) push_sse(reg SSERegister) {
g.write16(0x110f)
g.write8(0x04 + int(reg) % 8 * 8)
g.write8(0x24)
- g.println('movsd [rsp], $reg')
+ g.println('movsd [rsp], ${reg}')
if mut g.code_gen is Amd64 {
g.code_gen.is_16bit_aligned = !g.code_gen.is_16bit_aligned
}
- g.println('; push $reg')
+ g.println('; push ${reg}')
}
pub fn (mut g Gen) pop_sse(reg SSERegister) {
@@ -3665,13 +3665,13 @@ pub fn (mut g Gen) pop_sse(reg SSERegister) {
g.write16(0x100f)
g.write8(0x04 + int(reg) % 8 * 8)
g.write8(0x24)
- g.println('movsd $reg, [rsp]')
+ g.println('movsd ${reg}, [rsp]')
g.write32(0x08c48348)
g.println('add rsp, 0x8')
if mut g.code_gen is Amd64 {
g.code_gen.is_16bit_aligned = !g.code_gen.is_16bit_aligned
}
- g.println('; pop $reg')
+ g.println('; pop ${reg}')
}
fn (mut g Gen) gen_cast_expr_amd64(expr ast.CastExpr) {
diff --git a/vlib/v/gen/native/arm64.v b/vlib/v/gen/native/arm64.v
index f122736587..b54a6c3f82 100644
--- a/vlib/v/gen/native/arm64.v
+++ b/vlib/v/gen/native/arm64.v
@@ -43,7 +43,7 @@ mut:
}
fn (mut x Arm64) allocate_var(name string, size int, initial_val int) {
- eprintln('TODO: allocating var on arm64 ($name) = $size = $initial_val')
+ eprintln('TODO: allocating var on arm64 (${name}) = ${size} = ${initial_val}')
}
fn (mut g Gen) mov_arm(reg Arm64Register, val u64) {
@@ -56,7 +56,7 @@ fn (mut g Gen) mov_arm(reg Arm64Register, val u64) {
r := int(reg)
if r >= 0 && r <= 16 {
g.write32(int(u32(0xd2800000 + u32(r) + (u32(val) << 5))))
- g.println('mov x$r, $val')
+ g.println('mov x${r}, ${val}')
} else {
g.n_error('mov_arm unsupported values')
}
@@ -81,9 +81,9 @@ fn (mut g Gen) neg_arm(r Arm64Register) {
fn (mut g Gen) neg_regs_arm(a Arm64Register, b Arm64Register) {
if u32(a) < 0x0f && u32(b) < 0x0f {
g.write32(int(0xe2600000 | (u32(a) << 16) | u32(b) << 12))
- g.println('neg $a, $b')
+ g.println('neg ${a}, ${b}')
} else {
- g.n_error('unhandled neg $a, $b')
+ g.n_error('unhandled neg ${a}, ${b}')
}
}
@@ -186,7 +186,7 @@ pub fn (mut g Gen) call_fn_arm64(node ast.CallExpr) {
// println('call fn $name')
addr := g.fn_addr[name]
if addr == 0 {
- g.n_error('fn addr of `$name` = 0')
+ g.n_error('fn addr of `${name}` = 0')
}
// Copy values to registers (calling convention)
// g.mov_arm(.eax, 0)
@@ -209,7 +209,7 @@ pub fn (mut g Gen) call_fn_arm64(node ast.CallExpr) {
}
*/
else {
- g.n_error('unhandled call_fn (name=$name) node: ' + expr.type_name())
+ g.n_error('unhandled call_fn (name=${name}) node: ' + expr.type_name())
}
}
}
@@ -248,7 +248,7 @@ fn (mut g Gen) gen_arm64_helloworld() {
fn (mut g Gen) adr(r Arm64Register, delta int) {
g.write32(int(0x10000000 | int(r) | int(u32(delta) << 4)))
- g.println('adr $r, $delta')
+ g.println('adr ${r}, ${delta}')
}
fn (mut g Gen) bl() {
@@ -288,7 +288,7 @@ pub fn (mut c Arm64) gen_exit(mut g Gen, expr ast.Expr) {
c.g.mov_arm(.x0, 0)
}
else {
- g.n_error('unsupported os $c.g.pref.os')
+ g.n_error('unsupported os ${c.g.pref.os}')
}
}
g.svc()
diff --git a/vlib/v/gen/native/builtins.v b/vlib/v/gen/native/builtins.v
index cb5d95d4f3..00f1e868ea 100644
--- a/vlib/v/gen/native/builtins.v
+++ b/vlib/v/gen/native/builtins.v
@@ -46,7 +46,7 @@ pub fn (mut g Gen) generate_builtins() {
}
if g.pref.is_verbose {
- println(term.green('\n(builtin) $name:'))
+ println(term.green('\n(builtin) ${name}:'))
}
g.stack_var_pos = 0
@@ -71,9 +71,9 @@ pub fn (mut g Gen) generate_builtins() {
}
pub fn (mut g Gen) get_builtin_arg_reg(name string, index int) Register {
- builtin := g.builtins[name] or { panic('undefined builtin function $name') }
+ builtin := g.builtins[name] or { panic('undefined builtin function ${name}') }
if index >= builtin.arg_regs.len {
- g.n_error('builtin $name does only have $builtin.arg_regs.len arguments, wanted $index')
+ g.n_error('builtin ${name} does only have ${builtin.arg_regs.len} arguments, wanted ${index}')
}
return builtin.arg_regs[index]
}
diff --git a/vlib/v/gen/native/comptime.v b/vlib/v/gen/native/comptime.v
index 09af47d3ea..8a24409c33 100644
--- a/vlib/v/gen/native/comptime.v
+++ b/vlib/v/gen/native/comptime.v
@@ -38,7 +38,7 @@ fn (mut g Gen) comptime_is_truthy(cond ast.Expr) bool {
return !g.comptime_is_truthy(cond.right)
}
else {
- g.n_error('Compile time infix expr `$cond` is not handled by the native backed.')
+ g.n_error('Compile time infix expr `${cond}` is not handled by the native backed.')
}
}
}
@@ -60,7 +60,7 @@ fn (mut g Gen) comptime_is_truthy(cond ast.Expr) bool {
return g.comptime_is_truthy(cond.left) != g.comptime_is_truthy(cond.right)
}
else {
- g.n_error('Compile time infix expr `$cond` is not handled by the native backend.')
+ g.n_error('Compile time infix expr `${cond}` is not handled by the native backend.')
}
}
}
@@ -72,7 +72,7 @@ fn (mut g Gen) comptime_is_truthy(cond ast.Expr) bool {
}
else {
// should be unreachable
- g.n_error('Compile time conditional `$cond` is not handled by the native backend.')
+ g.n_error('Compile time conditional `${cond}` is not handled by the native backend.')
}
}
return false
@@ -208,7 +208,7 @@ fn (mut g Gen) comptime_ident(name string, is_comptime_optional bool) bool {
|| (g.pref.compile_defines_all.len > 0 && name in g.pref.compile_defines_all) {
true
} else {
- g.n_error('Unhandled os ifdef name "$name".')
+ g.n_error('Unhandled os ifdef name "${name}".')
false
}
}
diff --git a/vlib/v/gen/native/elf.v b/vlib/v/gen/native/elf.v
index be85a920fb..f48d0956b7 100644
--- a/vlib/v/gen/native/elf.v
+++ b/vlib/v/gen/native/elf.v
@@ -560,7 +560,7 @@ fn (mut g Gen) gen_section_header(mut sh SectionHeader) {
fn (mut g Gen) gen_sections(mut sections []Section) {
for mut section in sections {
g.gen_section_header(mut section.header)
- g.println('; ^^^ section header (64) "$section.name"')
+ g.println('; ^^^ section header (64) "${section.name}"')
}
}
@@ -576,7 +576,7 @@ fn (mut g Gen) gen_symtab_data(section Section, data []SymbolTableSection) {
g.write16(symbol.shndx)
g.write64(symbol.value)
g.write64(symbol.size)
- g.println('; SHT_SYMTAB $symbol.str_name')
+ g.println('; SHT_SYMTAB ${symbol.str_name}')
}
size := native.elf_symtab_size * data.len
@@ -598,7 +598,7 @@ fn (mut g Gen) gen_section_data(sections []Section) {
for str in data.strings {
g.write(str.bytes())
g.write8(0) // null-terminate string
- g.println('; "$str"')
+ g.println('; "${str}"')
}
g.write8(0) // null-postfixed
@@ -621,7 +621,7 @@ fn (mut g Gen) gen_section_data(sections []Section) {
g.fn_addr[rela.name] = rela.offset
g.write64(rela.info)
g.write64(rela.addend)
- g.println('; SHT_RELA `$rela.name` ($rela.offset, $rela.info, $rela.addend)')
+ g.println('; SHT_RELA `${rela.name}` (${rela.offset}, ${rela.info}, ${rela.addend})')
}
size := native.elf_rela_size * data.len
@@ -635,7 +635,7 @@ fn (mut g Gen) gen_section_data(sections []Section) {
for dyn in data {
g.write64(dyn.tag)
g.write64(dyn.un)
- g.println('; SHT_DYNAMIC ($dyn.tag, $dyn.un)')
+ g.println('; SHT_DYNAMIC (${dyn.tag}, ${dyn.un})')
}
size := native.elf_dynamic_size * data.len
@@ -665,7 +665,7 @@ fn (mut g Gen) gen_section_data(sections []Section) {
RelSection {
g.write64(data.offset)
g.write64(data.info)
- g.println('; SHT_REL ($data.offset, $data.info)')
+ g.println('; SHT_REL (${data.offset}, ${data.info})')
size := native.elf_rel_size
g.write64_at(section.header.pos + 32, i64(size))
@@ -747,7 +747,7 @@ pub fn (mut g Gen) generate_linkable_elf_header() {
// user code starts here
if g.pref.is_verbose {
- eprintln('code_start_pos = $g.buf.len.hex()')
+ eprintln('code_start_pos = ${g.buf.len.hex()}')
}
g.code_start_pos = g.pos()
@@ -795,7 +795,7 @@ pub fn (mut g Gen) generate_simple_elf_header() {
// user code starts here
if g.pref.is_verbose {
- eprintln('code_start_pos = $g.buf.len.hex()')
+ eprintln('code_start_pos = ${g.buf.len.hex()}')
}
g.code_start_pos = g.pos()
@@ -817,7 +817,7 @@ pub fn (mut g Gen) generate_simple_elf_header() {
g.syscall()
}
else {
- g.n_error('unsupported platform $g.pref.arch')
+ g.n_error('unsupported platform ${g.pref.arch}')
}
}
}
@@ -908,7 +908,7 @@ pub fn (mut g Gen) find_o_path(fname string) string {
g.prepend_vobjpath(['/usr/lib/aarch64-linux-gnu', '/usr/lib'])
}
else {
- g.n_error('unknown architecture $g.pref.arch')
+ g.n_error('unknown architecture ${g.pref.arch}')
['/dev/null']
}
}
@@ -932,11 +932,11 @@ pub fn (mut g Gen) get_lpaths() string {
g.prepend_vobjpath(['/usr/lib/aarch64-linux-gnu', '/usr/lib', '/lib'])
}
else {
- g.n_error('unknown architecture $g.pref.arch')
+ g.n_error('unknown architecture ${g.pref.arch}')
['/dev/null']
}
}
- return lpaths.map('-L$it').join(' ')
+ return lpaths.map('-L${it}').join(' ')
}
pub fn (mut g Gen) link_elf_file(obj_file string) {
@@ -953,7 +953,7 @@ pub fn (mut g Gen) link_elf_file(obj_file string) {
'aarch64elf'
}
else {
- g.n_error('unknown architecture $g.pref.arch')
+ g.n_error('unknown architecture ${g.pref.arch}')
'elf_x86_64' // default to x86_64
}
}
@@ -967,7 +967,7 @@ pub fn (mut g Gen) link_elf_file(obj_file string) {
linker_args := [
'-v',
lpaths,
- '-m $arch',
+ '-m ${arch}',
'-dynamic-linker',
dynamic_linker,
crt1,
@@ -976,8 +976,8 @@ pub fn (mut g Gen) link_elf_file(obj_file string) {
'-lm',
'-lpthread',
crtn,
- '$obj_file',
- '-o $g.out_name',
+ '${obj_file}',
+ '-o ${g.out_name}',
]
slinker_args := linker_args.join(' ')
@@ -988,18 +988,18 @@ pub fn (mut g Gen) link_elf_file(obj_file string) {
ld = custom_linker
}
linker_path := os.real_path(ld)
- linker_cmd := '${os.quoted_path(linker_path)} $slinker_args'
+ linker_cmd := '${os.quoted_path(linker_path)} ${slinker_args}'
if g.pref.is_verbose {
println(linker_cmd)
}
res := os.execute(linker_cmd)
if res.exit_code != 0 {
- g.n_error('ELF linking failed ($ld):\n$res.output')
+ g.n_error('ELF linking failed (${ld}):\n${res.output}')
return
}
if g.pref.is_verbose {
- println('linking with $ld finished successfully:\n$res.output')
+ println('linking with ${ld} finished successfully:\n${res.output}')
}
}
diff --git a/vlib/v/gen/native/gen.v b/vlib/v/gen/native/gen.v
index f71eb001de..25b4e84e79 100644
--- a/vlib/v/gen/native/gen.v
+++ b/vlib/v/gen/native/gen.v
@@ -153,7 +153,7 @@ type IdentVar = GlobalVar | LocalVar | Register
fn (mut g Gen) get_var_from_ident(ident ast.Ident) IdentVar {
mut obj := ident.obj
if obj !in [ast.Var, ast.ConstField, ast.GlobalField, ast.AsmRegister] {
- obj = ident.scope.find(ident.name) or { g.n_error('unknown variable $ident.name') }
+ obj = ident.scope.find(ident.name) or { g.n_error('unknown variable ${ident.name}') }
}
match obj {
ast.Var {
@@ -166,7 +166,7 @@ fn (mut g Gen) get_var_from_ident(ident ast.Ident) IdentVar {
}
}
else {
- g.n_error('unsupported variable type type:$obj name:$ident.name')
+ g.n_error('unsupported variable type type:${obj} name:${ident.name}')
}
}
}
@@ -341,7 +341,7 @@ pub fn (mut g Gen) create_executable() {
os.chmod(g.out_name, 0o775) or { panic(err) } // make it executable
if g.pref.is_verbose {
- eprintln('\n$g.out_name: native binary has been successfully generated')
+ eprintln('\n${g.out_name}: native binary has been successfully generated')
}
}
@@ -373,7 +373,7 @@ pub fn (mut g Gen) link(obj_name string) {
g.link_elf_file(obj_name)
}
else {
- g.n_error('native linking is not implemented for $g.pref.os')
+ g.n_error('native linking is not implemented for ${g.pref.os}')
}
}
}
@@ -512,14 +512,14 @@ fn (mut g Gen) try_var_offset(var_name string) int {
fn (mut g Gen) get_var_offset(var_name string) int {
r := g.try_var_offset(var_name)
if r == -1 {
- g.n_error('unknown variable `$var_name`')
+ g.n_error('unknown variable `${var_name}`')
}
return r
}
fn (mut g Gen) get_field_offset(typ ast.Type, name string) int {
ts := g.table.sym(typ)
- field := ts.find_field(name) or { g.n_error('Could not find field `$name` on init') }
+ field := ts.find_field(name) or { g.n_error('Could not find field `${name}` on init') }
return g.structs[typ.idx()].offsets[field.i]
}
@@ -613,7 +613,7 @@ fn (mut g Gen) get_sizeof_ident(ident ast.Ident) int {
return g.get_type_size(typ)
}
size := g.var_alloc_size[ident.name] or {
- g.n_error('unknown variable `$ident`')
+ g.n_error('unknown variable `${ident}`')
return 0
}
return size
@@ -622,7 +622,7 @@ fn (mut g Gen) get_sizeof_ident(ident ast.Ident) int {
fn (mut g Gen) gen_typeof_expr(it ast.TypeOf, newline bool) {
nl := if newline { '\n' } else { '' }
r := g.typ(it.expr_type).name
- g.learel(.rax, g.allocate_string('$r$nl', 3, .rel32))
+ g.learel(.rax, g.allocate_string('${r}${nl}', 3, .rel32))
}
fn (mut g Gen) call_fn(node ast.CallExpr) {
@@ -743,7 +743,7 @@ fn (mut g Gen) gen_to_string(reg Register, typ ast.Type) {
g.mov_reg(.rax, reg)
}
} else {
- g.n_error('int-to-string conversion not implemented for type $typ')
+ g.n_error('int-to-string conversion not implemented for type ${typ}')
}
}
@@ -761,7 +761,7 @@ fn (mut g Gen) gen_var_to_string(reg Register, var Var, config VarConfig) {
} else if typ.is_string() {
g.mov_var_to_reg(.rax, var, config)
} else {
- g.n_error('int-to-string conversion not implemented for type $typ')
+ g.n_error('int-to-string conversion not implemented for type ${typ}')
}
}
@@ -792,9 +792,9 @@ pub fn (mut g Gen) gen_print_from_expr(expr ast.Expr, typ ast.Type, name string)
}
ast.IntegerLiteral {
if newline {
- g.gen_print('$expr.val\n', fd)
+ g.gen_print('${expr.val}\n', fd)
} else {
- g.gen_print('$expr.val', fd)
+ g.gen_print('${expr.val}', fd)
}
}
ast.BoolLiteral {
@@ -811,9 +811,9 @@ pub fn (mut g Gen) gen_print_from_expr(expr ast.Expr, typ ast.Type, name string)
ast.SizeOf {
size := g.get_type_size(expr.typ)
if newline {
- g.gen_print('$size\n', fd)
+ g.gen_print('${size}\n', fd)
} else {
- g.gen_print('$size', fd)
+ g.gen_print('${size}', fd)
}
}
ast.OffsetOf {
@@ -822,9 +822,9 @@ pub fn (mut g Gen) gen_print_from_expr(expr ast.Expr, typ ast.Type, name string)
if styp.kind == .struct_ {
off := g.get_field_offset(expr.struct_type, field_name)
if newline {
- g.gen_print('$off\n', fd)
+ g.gen_print('${off}\n', fd)
} else {
- g.gen_print('$off', fd)
+ g.gen_print('${off}', fd)
}
} else {
g.v_error('_offsetof expects a struct Type as first argument', expr.pos)
@@ -877,7 +877,7 @@ pub fn (mut g Gen) gen_print_from_expr(expr ast.Expr, typ ast.Type, name string)
fn (mut g Gen) fn_decl(node ast.FnDecl) {
name := if node.is_method {
- '${g.table.get_type_name(node.receiver.typ)}.$node.name'
+ '${g.table.get_type_name(node.receiver.typ)}.${node.name}'
} else {
node.name
}
@@ -885,13 +885,13 @@ fn (mut g Gen) fn_decl(node ast.FnDecl) {
return
}
if g.pref.is_verbose {
- println(term.green('\n$name:'))
+ println(term.green('\n${name}:'))
}
if node.is_deprecated {
- g.warning('fn_decl: $name is deprecated', node.pos)
+ g.warning('fn_decl: ${name} is deprecated', node.pos)
}
if node.is_builtin {
- g.warning('fn_decl: $name is builtin', node.pos)
+ g.warning('fn_decl: ${name} is builtin', node.pos)
}
g.stack_var_pos = 0
@@ -938,7 +938,7 @@ fn (mut g Gen) println(comment string) {
colored := sb.str()
plain := term.strip_ansi(colored)
padding := ' '.repeat(mu.max(1, 40 - plain.len))
- final := '$colored$padding$comment'
+ final := '${colored}${padding}${comment}'
println(final)
}
@@ -985,7 +985,7 @@ fn (mut g Gen) gen_forc_stmt(node ast.ForCStmt) {
id: end_label
pos: int(jump_addr)
}
- g.println('; jump to label $end_label')
+ g.println('; jump to label ${end_label}')
g.labels.branches << BranchLabel{
name: node.label
start: start_label
@@ -993,14 +993,14 @@ fn (mut g Gen) gen_forc_stmt(node ast.ForCStmt) {
}
g.stmts(node.stmts)
g.labels.addrs[start_label] = g.pos()
- g.println('; label $start_label')
+ g.println('; label ${start_label}')
if node.has_inc {
g.stmts([node.inc])
}
g.labels.branches.pop()
g.jmp(int(0xffffffff - (g.pos() + 5 - start) + 1))
g.labels.addrs[end_label] = g.pos()
- g.println('; jump to label $end_label')
+ g.println('; jump to label ${end_label}')
// loop back
}
@@ -1026,7 +1026,7 @@ fn (mut g Gen) for_in_stmt(node ast.ForInStmt) {
id: end_label
pos: jump_addr
}
- g.println('; jump to label $end_label')
+ g.println('; jump to label ${end_label}')
g.labels.branches << BranchLabel{
name: node.label
start: start_label
@@ -1034,12 +1034,12 @@ fn (mut g Gen) for_in_stmt(node ast.ForInStmt) {
}
g.stmts(node.stmts)
g.labels.addrs[start_label] = g.pos()
- g.println('; label $start_label')
+ g.println('; label ${start_label}')
g.inc_var(LocalVar{i, ast.i64_type_idx, node.val_var})
g.labels.branches.pop()
g.jmp(int(0xffffffff - (g.pos() + 5 - start) + 1))
g.labels.addrs[end_label] = g.pos()
- g.println('; label $end_label')
+ g.println('; label ${end_label}')
/*
} else if node.kind == .array {
} else if node.kind == .array_fixed {
@@ -1082,14 +1082,14 @@ fn (mut g Gen) stmt(node ast.Stmt) {
id: label
pos: jump_addr
}
- g.println('; jump to $label: $node.kind')
+ g.println('; jump to ${label}: ${node.kind}')
break
}
}
}
ast.ConstDecl {}
ast.DeferStmt {
- name := '_defer$g.defer_stmts.len'
+ name := '_defer${g.defer_stmts.len}'
defer_var := g.get_var_offset(name)
g.mov_int_to_var(LocalVar{defer_var, ast.i64_type_idx, name}, 1)
g.defer_stmts << node
@@ -1204,7 +1204,7 @@ fn (mut g Gen) stmt(node ast.Stmt) {
id: label
pos: pos
}
- g.println('; jump to label $label')
+ g.println('; jump to label ${label}')
}
ast.AsmStmt {
g.gen_asm_stmt(node)
@@ -1261,7 +1261,7 @@ fn (mut g Gen) gen_syscall(node ast.CallExpr) {
}
}
if !done {
- g.v_error('Unknown selector in syscall argument type $expr', node.pos)
+ g.v_error('Unknown selector in syscall argument type ${expr}', node.pos)
}
}
ast.StringLiteral {
@@ -1274,7 +1274,8 @@ fn (mut g Gen) gen_syscall(node ast.CallExpr) {
g.mov64(ra[i], 1)
}
else {
- g.v_error('Unknown syscall $expr.type_name() argument type $expr', node.pos)
+ g.v_error('Unknown syscall ${expr.type_name()} argument type ${expr}',
+ node.pos)
return
}
}
@@ -1321,7 +1322,7 @@ fn (mut g Gen) expr(node ast.Expr) {
if g.pref.arch == .arm64 {
} else {
g.movabs(.rax, val)
- g.println('; $node.val')
+ g.println('; ${node.val}')
g.push(.rax)
g.pop_sse(.xmm0)
}
@@ -1416,7 +1417,7 @@ fn (mut g Gen) expr(node ast.Expr) {
g.expr(node.expr)
}
else {
- g.n_error('expr: unhandled node type: $node.type_name()')
+ g.n_error('expr: unhandled node type: ${node.type_name()}')
}
}
}
diff --git a/vlib/v/gen/native/macho.v b/vlib/v/gen/native/macho.v
index eac5ce8968..19d556e4d7 100644
--- a/vlib/v/gen/native/macho.v
+++ b/vlib/v/gen/native/macho.v
@@ -340,7 +340,7 @@ pub fn (mut g Gen) generate_macho_object_header() {
g.write32(0)
}
if g.pref.is_verbose {
- println('commands size = $g.buf.len')
+ println('commands size = ${g.buf.len}')
if g.buf.len != 0x138 {
println('macho: invalid header size')
}
@@ -374,7 +374,7 @@ pub fn (mut g Gen) generate_macho_footer() {
} else {
call_delta := int(g.main_fn_addr - g.code_start_pos)
if (call_delta % 4) != 0 || call_delta < 0 {
- g.n_error('Invalid entrypoint->main delta ($call_delta)')
+ g.n_error('Invalid entrypoint->main delta (${call_delta})')
} else {
blop := (0x94 << 24) | (call_delta / 4)
g.write32_at(g.code_start_pos, int(blop))
@@ -441,7 +441,7 @@ pub fn (mut g Gen) zeroes(n int) {
fn (mut g Gen) write_relocs() {
if g.pref.is_verbose {
- println('relocs at $g.buf.len should be 0x160')
+ println('relocs at ${g.buf.len} should be 0x160')
}
g.write32(0x8)
g.write32(0x2d000003)
diff --git a/vlib/v/gen/native/pe.v b/vlib/v/gen/native/pe.v
index 23fee81c32..dd306fedfd 100644
--- a/vlib/v/gen/native/pe.v
+++ b/vlib/v/gen/native/pe.v
@@ -136,7 +136,7 @@ pub fn (mut g Gen) write_pe_header() {
// optional here comes here
p_opthdr := g.buf.len // should be 0x110
if p_opthdr != 0x98 {
- eprintln('Invalid optdr location $p_opthdr != 0x98')
+ eprintln('Invalid optdr location ${p_opthdr} != 0x98')
}
g.write16(0x20b) // magic (0x10b=pe32, 0x20b=pe32+)
g.write8(0x1) // major linker version
diff --git a/vlib/v/gen/native/tests/native_test.v b/vlib/v/gen/native/tests/native_test.v
index b99cc572fd..7248626018 100644
--- a/vlib/v/gen/native/tests/native_test.v
+++ b/vlib/v/gen/native/tests/native_test.v
@@ -37,9 +37,9 @@ fn test_native() {
full_test_path := os.real_path(os.join_path(dir, test))
test_file_name := os.file_name(test)
relative_test_path := full_test_path.replace(vroot + '/', '')
- work_test_path := '$wrkdir/$test_file_name'
- exe_test_path := '$wrkdir/${test_file_name}.exe'
- tmperrfile := '$dir/${test}.tmperr'
+ work_test_path := '${wrkdir}/${test_file_name}'
+ exe_test_path := '${wrkdir}/${test_file_name}.exe'
+ tmperrfile := '${dir}/${test}.tmperr'
cmd := '${os.quoted_path(vexe)} -o ${os.quoted_path(exe_test_path)} -b native ${os.quoted_path(full_test_path)} -d custom_define 2> ${os.quoted_path(tmperrfile)}'
if is_verbose {
println(cmd)
@@ -59,23 +59,23 @@ fn test_native() {
res := os.execute('${os.quoted_path(exe_test_path)} 2> ${os.quoted_path(tmperrfile)}')
if res.exit_code != 0 {
bench.fail()
- eprintln(bench.step_message_fail('$full_test_path failed to run'))
+ eprintln(bench.step_message_fail('${full_test_path} failed to run'))
eprintln(res.output)
continue
}
- mut expected := os.read_file('$dir/${test}.out') or { panic(err) }
- errfile := '$dir/${test}.err'
+ mut expected := os.read_file('${dir}/${test}.out') or { panic(err) }
+ errfile := '${dir}/${test}.err'
if os.exists(errfile) {
- mut err_expected := os.read_file('$dir/${test}.err') or { panic(err) }
+ mut err_expected := os.read_file('${dir}/${test}.err') or { panic(err) }
err_expected = err_expected.trim_right('\r\n').replace('\r\n', '\n')
errstr := os.read_file(tmperrfile) or { panic(err) }
mut err_found := errstr.trim_right('\r\n').replace('\r\n', '\n')
if err_expected != err_found {
println(term.red('FAIL'))
println('============')
- println('stderr expected: "$err_expected" len=$err_expected.len')
+ println('stderr expected: "${err_expected}" len=${err_expected.len}')
println('============')
- println('stderr found:"$err_found" len=$err_found.len')
+ println('stderr found:"${err_found}" len=${err_found.len}')
println('============\n')
bench.fail()
continue
@@ -88,9 +88,9 @@ fn test_native() {
if expected != found {
println(term.red('FAIL'))
println('============')
- println('expected: "$expected" len=$expected.len')
+ println('expected: "${expected}" len=${expected.len}')
println('============')
- println('found:"$found" len=$found.len')
+ println('found:"${found}" len=${found.len}')
println('============\n')
bench.fail()
continue
diff --git a/vlib/v/live/executable/reloader.v b/vlib/v/live/executable/reloader.v
index f8be274123..95f464c020 100644
--- a/vlib/v/live/executable/reloader.v
+++ b/vlib/v/live/executable/reloader.v
@@ -24,7 +24,7 @@ pub fn new_live_reload_info(original string, vexe string, vopts string, live_fn_
live_fn_mutex: live_fn_mutex
live_linkfn: live_linkfn
so_extension: so_extension
- so_name_template: '$so_dir/tmp.%d.$file_base'
+ so_name_template: '${so_dir}/tmp.%d.${file_base}'
live_lib: 0
reloads: 0
reload_time_ms: 0
@@ -53,7 +53,7 @@ pub fn start_reloader(mut r live.LiveReloadInfo) {
pub fn add_live_monitored_file(mut lri live.LiveReloadInfo, path string) {
mtime := os.file_last_mod_unix(path)
lri.monitored_files << path
- elog(lri, '${@FN} mtime: ${mtime:12} path: $path')
+ elog(lri, '${@FN} mtime: ${mtime:12} path: ${path}')
if lri.last_mod_ts < mtime {
lri.last_mod_ts = mtime
}
@@ -66,8 +66,8 @@ fn elog(r &live.LiveReloadInfo, s string) {
fn compile_and_reload_shared_lib(mut r live.LiveReloadInfo) !bool {
sw := time.new_stopwatch()
- new_lib_path := compile_lib(mut r) or { return error('errors while compiling $r.original') }
- elog(r, '> compile_and_reload_shared_lib compiled: $new_lib_path')
+ new_lib_path := compile_lib(mut r) or { return error('errors while compiling ${r.original}') }
+ elog(r, '> compile_and_reload_shared_lib compiled: ${new_lib_path}')
load_lib(mut r, new_lib_path)
r.reload_time_ms = int(sw.elapsed().milliseconds())
return true
@@ -75,8 +75,8 @@ fn compile_and_reload_shared_lib(mut r live.LiveReloadInfo) !bool {
fn compile_lib(mut r live.LiveReloadInfo) ?string {
new_lib_path, new_lib_path_with_extension := current_shared_library_path(mut r)
- cmd := '${os.quoted_path(r.vexe)} $r.vopts -o ${os.quoted_path(new_lib_path)} ${os.quoted_path(r.original)}'
- elog(r, '> compilation cmd: $cmd')
+ cmd := '${os.quoted_path(r.vexe)} ${r.vopts} -o ${os.quoted_path(new_lib_path)} ${os.quoted_path(r.original)}'
+ elog(r, '> compilation cmd: ${cmd}')
cwatch := time.new_stopwatch()
recompilation_result := os.execute(cmd)
elog(r, 'compilation took: ${cwatch.elapsed().milliseconds()}ms')
@@ -86,7 +86,7 @@ fn compile_lib(mut r live.LiveReloadInfo) ?string {
return none
}
if !os.exists(new_lib_path_with_extension) {
- eprintln('new_lib_path: $new_lib_path_with_extension does not exist')
+ eprintln('new_lib_path: ${new_lib_path_with_extension} does not exist')
return none
}
return new_lib_path_with_extension
@@ -126,11 +126,11 @@ fn protected_load_lib(mut r live.LiveReloadInfo, new_lib_path string) {
}
r.live_lib = dl.open(new_lib_path, dl.rtld_lazy)
if r.live_lib == 0 {
- eprintln('opening $new_lib_path failed')
+ eprintln('opening ${new_lib_path} failed')
exit(1)
}
r.live_linkfn(r.live_lib)
- elog(r, '> load_lib OK, new live_lib: $r.live_lib')
+ elog(r, '> load_lib OK, new live_lib: ${r.live_lib}')
// removing the .so file from the filesystem after dlopen-ing
// it is safe, since it will still be mapped in memory
os.rm(new_lib_path) or {}
@@ -151,7 +151,7 @@ fn reloader(mut r live.LiveReloadInfo) {
sw := time.new_stopwatch()
now_ts := get_latest_ts_from_monitored_files(monitored_file_paths, last_ts)
$if trace_check_monitored_files ? {
- eprintln('check if last_ts: $last_ts < now_ts: $now_ts , took $sw.elapsed().microseconds() microseconds')
+ eprintln('check if last_ts: ${last_ts} < now_ts: ${now_ts} , took ${sw.elapsed().microseconds()} microseconds')
}
if last_ts < now_ts {
r.reloads++
diff --git a/vlib/v/live/live_test.v b/vlib/v/live/live_test.v
index c0a2f4489c..a670b4b1f5 100644
--- a/vlib/v/live/live_test.v
+++ b/vlib/v/live/live_test.v
@@ -88,18 +88,18 @@ fn vprintln(s string) {
fn testsuite_end() {
// os.system('tree $vtmp_folder') exit(1)
- vprintln('source: $source_file')
- vprintln('output: $output_file')
+ vprintln('source: ${source_file}')
+ vprintln('output: ${output_file}')
vprintln('---------------------------------------------------------------------------')
output_lines := os.read_lines(output_file) or {
- panic('could not read $output_file, error: $err')
+ panic('could not read ${output_file}, error: ${err}')
}
mut histogram := map[string]int{}
for line in output_lines {
histogram[line] = histogram[line] + 1
}
for k, v in histogram {
- eprintln('> found ${v:5d} times: $k')
+ eprintln('> found ${v:5d} times: ${k}')
}
vprintln('---------------------------------------------------------------------------')
assert histogram['START'] > 0
@@ -111,7 +111,7 @@ fn testsuite_end() {
fn change_source(new string) {
time.sleep(100 * time.millisecond)
- vprintln('> change ORIGINAL to: $new')
+ vprintln('> change ORIGINAL to: ${new}')
atomic_write_source(live_program_source.replace('ORIGINAL', new))
wait_for_file(new)
}
@@ -119,12 +119,12 @@ fn change_source(new string) {
fn wait_for_file(new string) {
time.sleep(100 * time.millisecond)
expected_file := os.join_path(vtmp_folder, new + '.txt')
- eprintln('waiting for $expected_file ...')
+ eprintln('waiting for ${expected_file} ...')
// os.system('tree $vtmp_folder')
max_wait_cycles := os.getenv_opt('WAIT_CYCLES') or { '1' }.int()
for i := 0; i <= max_wait_cycles; i++ {
if i % 25 == 0 {
- vprintln(' checking ${i:-10d} for $expected_file ...')
+ vprintln(' checking ${i:-10d} for ${expected_file} ...')
}
if os.exists(expected_file) {
assert true
@@ -143,8 +143,8 @@ fn setup_cycles_environment() {
// max_live_cycles *= 5
// max_wait_cycles *= 5
}
- os.setenv('LIVE_CYCLES', '$max_live_cycles', true)
- os.setenv('WAIT_CYCLES', '$max_wait_cycles', true)
+ os.setenv('LIVE_CYCLES', '${max_live_cycles}', true)
+ os.setenv('WAIT_CYCLES', '${max_wait_cycles}', true)
}
//
@@ -152,11 +152,11 @@ fn test_live_program_can_be_compiled() {
setup_cycles_environment()
eprintln('Compiling...')
compile_cmd := '${os.quoted_path(vexe)} -cg -keepc -nocolor -live -o ${os.quoted_path(genexe_file)} ${os.quoted_path(main_source_file)}'
- eprintln('> compile_cmd: $compile_cmd')
+ eprintln('> compile_cmd: ${compile_cmd}')
os.system(compile_cmd)
//
cmd := '${os.quoted_path(genexe_file)} > /dev/null &'
- eprintln('Running with: $cmd')
+ eprintln('Running with: ${cmd}')
res := os.system(cmd)
assert res == 0
eprintln('... running in the background')
diff --git a/vlib/v/markused/markused.v b/vlib/v/markused/markused.v
index a52761cfbc..5642e5306e 100644
--- a/vlib/v/markused/markused.v
+++ b/vlib/v/markused/markused.v
@@ -141,7 +141,7 @@ pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.F
for k, mut mfn in all_fns {
$if trace_skip_unused_all_fns ? {
- println('k: $k | mfn: $mfn.name')
+ println('k: ${k} | mfn: ${mfn.name}')
}
// _noscan functions/methods are selected when the `-gc boehm` is on:
if is_noscan_whitelisted && mfn.name.ends_with('_noscan') {
@@ -275,9 +275,9 @@ pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.F
interface_types := [ptype, ntype]
for method in interface_info.methods {
for typ in interface_types {
- interface_implementation_method_name := '${int(typ)}.$method.name'
+ interface_implementation_method_name := '${int(typ)}.${method.name}'
$if trace_skip_unused_interface_methods ? {
- eprintln('>> isym.name: $isym.name | interface_implementation_method_name: $interface_implementation_method_name')
+ eprintln('>> isym.name: ${isym.name} | interface_implementation_method_name: ${interface_implementation_method_name}')
}
all_fn_root_names << interface_implementation_method_name
}
@@ -308,7 +308,7 @@ pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.F
}
pvgt := vgt.set_nr_muls(1)
// eprintln('vgt: $vgt | pvgt: $pvgt | sym_app.name: $sym_app.name | m.name: $m.name')
- all_fn_root_names << '${int(pvgt)}.$m.name'
+ all_fn_root_names << '${int(pvgt)}.${m.name}'
}
}
}
@@ -392,7 +392,7 @@ pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.F
$if trace_skip_unused_fn_names ? {
for key, _ in walker.used_fns {
- println('> used fn key: $key')
+ println('> used fn key: ${key}')
}
}
@@ -410,10 +410,10 @@ pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.F
table.used_globals = walker.used_globals.move()
$if trace_skip_unused ? {
- eprintln('>> t.used_fns: $table.used_fns.keys()')
- eprintln('>> t.used_consts: $table.used_consts.keys()')
- eprintln('>> t.used_globals: $table.used_globals.keys()')
- eprintln('>> walker.table.used_maps: $walker.table.used_maps')
+ eprintln('>> t.used_fns: ${table.used_fns.keys()}')
+ eprintln('>> t.used_consts: ${table.used_consts.keys()}')
+ eprintln('>> t.used_globals: ${table.used_globals.keys()}')
+ eprintln('>> walker.table.used_maps: ${walker.table.used_maps}')
}
}
diff --git a/vlib/v/markused/walker.v b/vlib/v/markused/walker.v
index bc2ba19c76..3647c8957e 100644
--- a/vlib/v/markused/walker.v
+++ b/vlib/v/markused/walker.v
@@ -25,14 +25,14 @@ mut:
pub fn (mut w Walker) mark_fn_as_used(fkey string) {
$if trace_skip_unused_marked ? {
- eprintln(' fn > |$fkey|')
+ eprintln(' fn > |${fkey}|')
}
w.used_fns[fkey] = true
}
pub fn (mut w Walker) mark_const_as_used(ckey string) {
$if trace_skip_unused_marked ? {
- eprintln(' const > |$ckey|')
+ eprintln(' const > |${ckey}|')
}
if w.used_consts[ckey] {
return
@@ -44,7 +44,7 @@ pub fn (mut w Walker) mark_const_as_used(ckey string) {
pub fn (mut w Walker) mark_global_as_used(ckey string) {
$if trace_skip_unused_marked ? {
- eprintln(' global > |$ckey|')
+ eprintln(' global > |${ckey}|')
}
if w.used_globals[ckey] {
return
@@ -58,7 +58,7 @@ pub fn (mut w Walker) mark_root_fns(all_fn_root_names []string) {
for fn_name in all_fn_root_names {
if fn_name !in w.used_fns {
$if trace_skip_unused_roots ? {
- println('>>>> $fn_name uses: ')
+ println('>>>> ${fn_name} uses: ')
}
w.fn_decl(mut w.all_fns[fn_name])
}
diff --git a/vlib/v/parser/assign.v b/vlib/v/parser/assign.v
index 9e1acc0a19..c5129dc4f6 100644
--- a/vlib/v/parser/assign.v
+++ b/vlib/v/parser/assign.v
@@ -26,14 +26,14 @@ fn (mut p Parser) check_undefined_variables(names []string, val ast.Expr) ! {
p.expr_level--
}
if p.expr_level > parser.max_expr_level {
- return error('expr level > $parser.max_expr_level')
+ return error('expr level > ${parser.max_expr_level}')
}
match val {
ast.Ident {
for name in names {
if name == val.name && val.kind != .blank_ident {
- p.error_with_pos('undefined variable: `$val.name`', val.pos)
- return error('undefined variable: `$val.name`')
+ p.error_with_pos('undefined variable: `${val.name}`', val.pos)
+ return error('undefined variable: `${val.name}`')
}
}
}
@@ -203,7 +203,7 @@ fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comme
ast.Ident {
if op == .decl_assign {
if p.scope.known_var(lx.name) {
- return p.error_with_pos('redefinition of `$lx.name`', lx.pos)
+ return p.error_with_pos('redefinition of `${lx.name}`', lx.pos)
}
mut share := unsafe { ast.ShareType(0) }
if mut lx.info is ast.IdentVar {
@@ -247,7 +247,7 @@ fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comme
}
ast.IndexExpr {
if op == .decl_assign {
- return p.error_with_pos('non-name `$lx.left[$lx.index]` on left side of `:=`',
+ return p.error_with_pos('non-name `${lx.left}[${lx.index}]` on left side of `:=`',
lx.pos)
}
lx.is_setter = true
diff --git a/vlib/v/parser/comptime.v b/vlib/v/parser/comptime.v
index 9f52bb92b0..0e2784eff7 100644
--- a/vlib/v/parser/comptime.v
+++ b/vlib/v/parser/comptime.v
@@ -21,7 +21,7 @@ pub fn (mut p Parser) parse_comptime_type() ast.ComptimeType {
p.check(.dollar)
name := p.check_name()
if name !in parser.comptime_types {
- p.error('unsupported compile-time type `$name`: only $parser.comptime_types are supported')
+ p.error('unsupported compile-time type `${name}`: only ${parser.comptime_types} are supported')
}
mut cty := ast.ComptimeTypeKind.map_
match name {
@@ -63,7 +63,7 @@ fn (mut p Parser) hash() ast.HashStmt {
p.next()
mut main_str := ''
mut msg := ''
- content := val.all_after('$kind ').all_before('//')
+ content := val.all_after('${kind} ').all_before('//')
if content.contains(' #') {
main_str = content.all_before(' #').trim_space()
msg = content.all_after(' #').trim_space()
@@ -190,9 +190,9 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
}
}
if is_html {
- p.error_with_pos('vweb HTML template "$tmpl_path" not found', arg_pos)
+ p.error_with_pos('vweb HTML template "${tmpl_path}" not found', arg_pos)
} else {
- p.error_with_pos('template file "$tmpl_path" not found', arg_pos)
+ p.error_with_pos('template file "${tmpl_path}" not found', arg_pos)
}
return err_node
}
@@ -200,18 +200,18 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
}
tmp_fn_name := p.cur_fn_name.replace('.', '__') + start_pos.pos.str()
$if trace_comptime ? {
- println('>>> compiling comptime template file "$path" for $tmp_fn_name')
+ println('>>> compiling comptime template file "${path}" for ${tmp_fn_name}')
}
v_code := p.compile_template_file(path, tmp_fn_name)
$if print_vweb_template_expansions ? {
lines := v_code.split('\n')
for i, line in lines {
- println('$path:${i + 1}: $line')
+ println('${path}:${i + 1}: ${line}')
}
}
$if trace_comptime ? {
println('')
- println('>>> template for $path:')
+ println('>>> template for ${path}:')
println(v_code)
println('>>> end of template END')
println('')
@@ -269,7 +269,7 @@ fn (mut p Parser) comptime_for() ast.ComptimeFor {
})
kind = .attributes
} else {
- p.error_with_pos('unknown kind `$for_val`, available are: `methods`, `fields` or `attributes`',
+ p.error_with_pos('unknown kind `${for_val}`, available are: `methods`, `fields` or `attributes`',
p.prev_tok.pos())
return ast.ComptimeFor{}
}
diff --git a/vlib/v/parser/containers.v b/vlib/v/parser/containers.v
index 7f3c624888..790d9b5646 100644
--- a/vlib/v/parser/containers.v
+++ b/vlib/v/parser/containers.v
@@ -84,7 +84,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
p.error_with_pos('`len` and `cap` are invalid attributes for fixed array dimension',
pos)
} else {
- p.error_with_pos('expected `init:`, not `$n`', pos)
+ p.error_with_pos('expected `init:`, not `${n}`', pos)
}
return ast.ArrayInit{}
}
@@ -163,7 +163,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
p.close_scope()
}
else {
- p.error('wrong field `$key`, expecting `len`, `cap`, or `init`')
+ p.error('wrong field `${key}`, expecting `len`, `cap`, or `init`')
return ast.ArrayInit{}
}
}
diff --git a/vlib/v/parser/expr.v b/vlib/v/parser/expr.v
index 0517506c11..10becc96c1 100644
--- a/vlib/v/parser/expr.v
+++ b/vlib/v/parser/expr.v
@@ -17,7 +17,7 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
}
pub fn (mut p Parser) check_expr(precedence int) !ast.Expr {
- p.trace_parser('expr($precedence)')
+ p.trace_parser('expr(${precedence})')
mut node := ast.empty_expr
is_stmt_ident := p.is_stmt_ident
p.is_stmt_ident = false
@@ -294,7 +294,7 @@ pub fn (mut p Parser) check_expr(precedence int) !ast.Expr {
st := p.parse_type()
p.check(.comma)
if p.tok.kind != .name {
- return p.unexpected(got: '`$p.tok.lit`', additional_msg: 'expecting struct field')
+ return p.unexpected(got: '`${p.tok.lit}`', additional_msg: 'expecting struct field')
}
field := p.tok.lit
p.next()
@@ -464,12 +464,12 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
// detect `f(x++)`, `a[x++]`
if p.peek_tok.kind in [.rpar, .rsbr] {
if !p.inside_ct_if_expr {
- p.warn_with_pos('`$p.tok.kind` operator can only be used as a statement',
+ p.warn_with_pos('`${p.tok.kind}` operator can only be used as a statement',
p.tok.pos())
}
}
if p.tok.kind in [.inc, .dec] && p.prev_tok.line_nr != p.tok.line_nr {
- p.error_with_pos('$p.tok must be on the same line as the previous token',
+ p.error_with_pos('${p.tok} must be on the same line as the previous token',
p.tok.pos())
}
if mut node is ast.IndexExpr {
@@ -598,7 +598,7 @@ fn (mut p Parser) prefix_expr() ast.Expr {
}
if mut right is ast.ParExpr {
if right.expr is ast.StructInit {
- p.note_with_pos('unnecessary `()`, use `&$right.expr` instead of `&($right.expr)`',
+ p.note_with_pos('unnecessary `()`, use `&${right.expr}` instead of `&(${right.expr})`',
right.pos)
right = right.expr
}
diff --git a/vlib/v/parser/fn.v b/vlib/v/parser/fn.v
index 1302977232..103b5daa9e 100644
--- a/vlib/v/parser/fn.v
+++ b/vlib/v/parser/fn.v
@@ -11,11 +11,11 @@ import os
pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr {
first_pos := p.tok.pos()
mut fn_name := if language == .c {
- 'C.$p.check_name()'
+ 'C.${p.check_name()}'
} else if language == .js {
- 'JS.$p.check_js_name()'
+ 'JS.${p.check_js_name()}'
} else if mod.len > 0 {
- '${mod}.$p.check_name()'
+ '${mod}.${p.check_name()}'
} else {
p.check_name()
}
@@ -303,7 +303,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
}
}
if is_duplicate {
- p.error_with_pos('duplicate method `$name`', name_pos)
+ p.error_with_pos('duplicate method `${name}`', name_pos)
return ast.FnDecl{
scope: 0
}
@@ -311,7 +311,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
}
if !p.pref.is_fmt {
if name in p.imported_symbols {
- p.error_with_pos('cannot redefine imported function `$name`', name_pos)
+ p.error_with_pos('cannot redefine imported function `${name}`', name_pos)
return ast.FnDecl{
scope: 0
}
@@ -357,7 +357,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
if !are_args_type_only {
for param in params {
if p.scope.known_var(param.name) {
- p.error_with_pos('redefinition of parameter `$param.name`', param.pos)
+ p.error_with_pos('redefinition of parameter `${param.name}`', param.pos)
return ast.FnDecl{
scope: 0
}
@@ -424,7 +424,7 @@ run them via `v file.v` instead',
&& elem_type_sym.language == .v
}
if is_non_local {
- p.error_with_pos('cannot define new methods on non-local type $type_sym.name',
+ p.error_with_pos('cannot define new methods on non-local type ${type_sym.name}',
rec.type_pos)
return ast.FnDecl{
scope: 0
@@ -459,9 +459,9 @@ run them via `v file.v` instead',
})
} else {
if language == .c {
- name = 'C.$name'
+ name = 'C.${name}'
} else if language == .js {
- name = 'JS.$name'
+ name = 'JS.${name}'
} else {
name = p.prepend_mod(name)
}
@@ -471,7 +471,7 @@ run them via `v file.v` instead',
if file_mode == .v && existing.file_mode != .v {
// a definition made in a .c.v file, should have a priority over a .v file definition of the same function
if !p.pref.is_fmt {
- name = p.prepend_mod('pure_v_but_overriden_by_${existing.file_mode}_$short_fn_name')
+ name = p.prepend_mod('pure_v_but_overriden_by_${existing.file_mode}_${short_fn_name}')
}
} else {
p.table.redefined_fns << name
@@ -715,7 +715,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
return_type = p.parse_type()
return_type_pos = return_type_pos.extend(p.tok.pos())
} else if p.tok.kind != .lcbr {
- p.error_with_pos('expected return type, not $p.tok for anonymous function',
+ p.error_with_pos('expected return type, not ${p.tok} for anonymous function',
p.tok.pos())
}
}
@@ -723,7 +723,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
no_body := p.tok.kind != .lcbr
same_line = p.tok.line_nr == p.prev_tok.line_nr
if no_body && same_line {
- p.unexpected(got: '$p.tok after anonymous function signature', expecting: '`{`')
+ p.unexpected(got: '${p.tok} after anonymous function signature', expecting: '`{`')
}
mut label_names := []string{}
mut func := ast.Fn{
@@ -732,7 +732,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
return_type: return_type
is_method: false
}
- name := 'anon_fn_${p.unique_prefix}_${p.table.fn_type_signature(func)}_$p.tok.pos'
+ name := 'anon_fn_${p.unique_prefix}_${p.table.fn_type_signature(func)}_${p.tok.pos}'
keep_fn_name := p.cur_fn_name
p.cur_fn_name = name
if p.tok.kind == .lcbr {
@@ -752,7 +752,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
for arg in args {
for var in inherited_vars {
if arg.name == var.name {
- p.error_with_pos('the parameter name `$arg.name` conflicts with the captured value name',
+ p.error_with_pos('the parameter name `${arg.name}` conflicts with the captured value name',
arg.pos)
break
}
@@ -868,7 +868,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
if p.tok.kind == .comma {
if is_variadic {
- p.error_with_pos('cannot use ...(variadic) with non-final parameter no $arg_no',
+ p.error_with_pos('cannot use ...(variadic) with non-final parameter no ${arg_no}',
pos)
return []ast.Param{}, false, false
}
@@ -916,7 +916,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
if !p.pref.is_fmt {
p.warn(
'`fn f(x, y Type)` syntax has been deprecated and will soon be removed. ' +
- 'Use `fn f(x Type, y Type)` instead. You can run `v fmt -w "$p.scanner.file_path"` to automatically fix your code.')
+ 'Use `fn f(x Type, y Type)` instead. You can run `v fmt -w "${p.scanner.file_path}"` to automatically fix your code.')
}
p.next()
arg_pos << p.tok.pos()
@@ -984,7 +984,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
}
// if typ.typ.kind == .variadic && p.tok.kind == .comma {
if is_variadic && p.tok.kind == .comma && p.peek_tok.kind != .rpar {
- p.error_with_pos('cannot use ...(variadic) with non-final parameter $arg_name',
+ p.error_with_pos('cannot use ...(variadic) with non-final parameter ${arg_name}',
arg_pos[i])
return []ast.Param{}, false, false
}
@@ -1039,11 +1039,11 @@ fn (mut p Parser) closure_vars() []ast.Param {
var_name := p.prev_tok.lit
mut var := p.scope.parent.find_var(var_name) or {
if p.table.global_scope.known_global(var_name) {
- p.error_with_pos('no need to capture global variable `$var_name` in closure',
+ p.error_with_pos('no need to capture global variable `${var_name}` in closure',
p.prev_tok.pos())
continue
}
- p.error_with_pos('undefined ident: `$var_name`', p.prev_tok.pos())
+ p.error_with_pos('undefined ident: `${var_name}`', p.prev_tok.pos())
continue
}
var.is_used = true
@@ -1092,7 +1092,7 @@ fn (mut p Parser) check_fn_mutable_arguments(typ ast.Type, pos token.Pos) {
}
p.error_with_pos(
'mutable arguments are only allowed for arrays, interfaces, maps, pointers, structs or their aliases\n' +
- 'return values instead: `fn foo(mut n $sym.name) {` => `fn foo(n $sym.name) $sym.name {`',
+ 'return values instead: `fn foo(mut n ${sym.name}) {` => `fn foo(n ${sym.name}) ${sym.name} {`',
pos)
}
@@ -1108,7 +1108,7 @@ fn (mut p Parser) check_fn_atomic_arguments(typ ast.Type, pos token.Pos) {
sym := p.table.sym(typ)
if sym.kind !in [.u32, .int, .u64] {
p.error_with_pos('atomic arguments are only allowed for 32/64 bit integers\n' +
- 'use shared arguments instead: `fn foo(atomic n $sym.name) {` => `fn foo(shared n $sym.name) {`',
+ 'use shared arguments instead: `fn foo(atomic n ${sym.name}) {` => `fn foo(shared n ${sym.name}) {`',
pos)
}
}
diff --git a/vlib/v/parser/for.v b/vlib/v/parser/for.v
index f17b155d9c..bb995da7a3 100644
--- a/vlib/v/parser/for.v
+++ b/vlib/v/parser/for.v
@@ -56,7 +56,7 @@ fn (mut p Parser) for_stmt() ast.Stmt {
if p.tok.kind != .semicolon {
// Disallow `for i := 0; i++; i < ...`
if p.tok.kind == .name && p.peek_tok.kind in [.inc, .dec] {
- return p.error('cannot use $p.tok.lit$p.peek_tok.kind as value')
+ return p.error('cannot use ${p.tok.lit}${p.peek_tok.kind} as value')
}
comments << p.eat_comments()
cond = p.expr(0)
@@ -121,10 +121,10 @@ fn (mut p Parser) for_stmt() ast.Stmt {
val_var_pos)
}
if p.scope.known_var(key_var_name) {
- return p.error('redefinition of key iteration variable `$key_var_name`')
+ return p.error('redefinition of key iteration variable `${key_var_name}`')
}
if p.scope.known_var(val_var_name) {
- return p.error('redefinition of value iteration variable `$val_var_name`')
+ return p.error('redefinition of value iteration variable `${val_var_name}`')
}
p.scope.register(ast.Var{
name: key_var_name
@@ -134,13 +134,13 @@ fn (mut p Parser) for_stmt() ast.Stmt {
is_stack_obj: true
})
} else if p.scope.known_var(val_var_name) {
- return p.error_with_pos('redefinition of value iteration variable `$val_var_name`, use `for ($val_var_name in array) {` if you want to check for a condition instead',
+ return p.error_with_pos('redefinition of value iteration variable `${val_var_name}`, use `for (${val_var_name} in array) {` if you want to check for a condition instead',
val_var_pos)
}
comments << p.eat_comments()
p.check(.key_in)
if p.tok.kind == .name && p.tok.lit in [key_var_name, val_var_name] {
- return p.error('in a `for x in array` loop, the key or value iteration variable `$p.tok.lit` can not be the same as the array variable')
+ return p.error('in a `for x in array` loop, the key or value iteration variable `${p.tok.lit}` can not be the same as the array variable')
}
comments << p.eat_comments()
// arr_expr
diff --git a/vlib/v/parser/if_match.v b/vlib/v/parser/if_match.v
index 908ef9a7f0..47eae874f1 100644
--- a/vlib/v/parser/if_match.v
+++ b/vlib/v/parser/if_match.v
@@ -101,7 +101,7 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
var_names << var.name
if p.scope.known_var(var.name) {
- p.error_with_pos('redefinition of `$var.name`', var.pos)
+ p.error_with_pos('redefinition of `${var.name}`', var.pos)
}
vars << var
if p.tok.kind != .comma {
diff --git a/vlib/v/parser/lock.v b/vlib/v/parser/lock.v
index 1c828337fa..1f5dbe0025 100644
--- a/vlib/v/parser/lock.v
+++ b/vlib/v/parser/lock.v
@@ -10,7 +10,7 @@ fn (mut p Parser) lockable() ast.Expr {
mut pos := p.tok.pos()
for {
if p.tok.kind != .name {
- p.unexpected(got: '`$p.tok.lit`', expecting: 'field or variable name')
+ p.unexpected(got: '`${p.tok.lit}`', expecting: 'field or variable name')
}
names << p.tok.lit
positions << pos
@@ -83,7 +83,7 @@ fn (mut p Parser) lock_expr() ast.LockExpr {
exprs, comms := p.lockable_list()
for e in exprs {
if !e.is_lockable() {
- p.error_with_pos('`$e` cannot be locked - only `x` or `x.y` are supported',
+ p.error_with_pos('`${e}` cannot be locked - only `x` or `x.y` are supported',
e.pos())
}
lockeds << e
diff --git a/vlib/v/parser/module.v b/vlib/v/parser/module.v
index f6f37110b6..ff86640841 100644
--- a/vlib/v/parser/module.v
+++ b/vlib/v/parser/module.v
@@ -59,8 +59,8 @@ fn (mut p Parser) check_unused_imports() {
alias := import_m.alias
mod := import_m.mod
if !p.is_used_import(alias) {
- mod_alias := if alias == mod { alias } else { '$alias ($mod)' }
- p.warn_with_pos("module '$mod_alias' is imported but never used", import_m.mod_pos)
+ mod_alias := if alias == mod { alias } else { '${alias} (${mod})' }
+ p.warn_with_pos("module '${mod_alias}' is imported but never used", import_m.mod_pos)
}
}
}
diff --git a/vlib/v/parser/parse_type.v b/vlib/v/parser/parse_type.v
index f56955de7f..40ff213e4a 100644
--- a/vlib/v/parser/parse_type.v
+++ b/vlib/v/parser/parse_type.v
@@ -25,7 +25,7 @@ pub fn (mut p Parser) parse_array_type(expecting token.Kind) ast.Type {
}
ast.Ident {
mut show_non_const_error := false
- if mut const_field := p.table.global_scope.find_const('${p.mod}.$size_expr.name') {
+ if mut const_field := p.table.global_scope.find_const('${p.mod}.${size_expr.name}') {
if mut const_field.expr is ast.IntegerLiteral {
fixed_size = const_field.expr.val.int()
} else {
@@ -53,7 +53,7 @@ pub fn (mut p Parser) parse_array_type(expecting token.Kind) ast.Type {
}
}
if show_non_const_error {
- p.error_with_pos('non-constant array bound `$size_expr.name`',
+ p.error_with_pos('non-constant array bound `${size_expr.name}`',
size_expr.pos)
}
}
@@ -125,7 +125,7 @@ pub fn (mut p Parser) parse_map_type() ast.Type {
return 0
}
s := p.table.type_to_str(key_type)
- p.error_with_pos('maps only support string, integer, float, rune, enum or voidptr keys for now (not `$s`)',
+ p.error_with_pos('maps only support string, integer, float, rune, enum or voidptr keys for now (not `${s}`)',
p.tok.pos())
return 0
}
@@ -311,7 +311,7 @@ pub fn (mut p Parser) parse_inline_sum_type() ast.Type {
if variants.len > 1 {
if variants.len > parser.maximum_inline_sum_type_variants {
pos := variants[0].pos.extend(variants.last().pos)
- p.warn_with_pos('an inline sum type expects a maximum of $parser.maximum_inline_sum_type_variants types ($variants.len were given)',
+ p.warn_with_pos('an inline sum type expects a maximum of ${parser.maximum_inline_sum_type_variants} types (${variants.len} were given)',
pos)
}
mut variant_names := variants.map(p.table.sym(it.typ).name)
@@ -483,9 +483,9 @@ If you need to modify an array in a function, use a mutable argument instead: `f
pub fn (mut p Parser) parse_any_type(language ast.Language, is_ptr bool, check_dot bool) ast.Type {
mut name := p.tok.lit
if language == .c {
- name = 'C.$name'
+ name = 'C.${name}'
} else if language == .js {
- name = 'JS.$name'
+ name = 'JS.${name}'
} else if p.peek_tok.kind == .dot && check_dot {
// `module.Type`
mut mod := name
@@ -496,14 +496,14 @@ pub fn (mut p Parser) parse_any_type(language ast.Language, is_ptr bool, check_d
for p.peek_tok.kind == .dot {
mod_pos = mod_pos.extend(p.tok.pos())
mod_last_part = p.tok.lit
- mod += '.$mod_last_part'
+ mod += '.${mod_last_part}'
p.next()
p.check(.dot)
}
if !p.known_import(mod) && !p.pref.is_fmt {
- mut msg := 'unknown module `$mod`'
+ mut msg := 'unknown module `${mod}`'
if mod.len > mod_last_part.len && p.known_import(mod_last_part) {
- msg += '; did you mean `$mod_last_part`?'
+ msg += '; did you mean `${mod_last_part}`?'
}
p.error_with_pos(msg, mod_pos)
return 0
@@ -513,7 +513,7 @@ pub fn (mut p Parser) parse_any_type(language ast.Language, is_ptr bool, check_d
mod = p.imports[mod]
}
// prefix with full module
- name = '${mod}.$p.tok.lit'
+ name = '${mod}.${p.tok.lit}'
if p.tok.lit.len > 0 && !p.tok.lit[0].is_capital() {
p.error('imported types must start with a capital letter')
return 0
@@ -725,28 +725,28 @@ pub fn (mut p Parser) parse_generic_inst_type(name string) ast.Type {
match parent_sym.info {
ast.Struct {
if parent_sym.info.generic_types.len == 0 {
- p.error_with_pos('struct `$parent_sym.name` is not a generic struct, cannot instantiate to the concrete types',
+ p.error_with_pos('struct `${parent_sym.name}` is not a generic struct, cannot instantiate to the concrete types',
concrete_types_pos)
} else if parent_sym.info.generic_types.len != concrete_types.len {
- p.error_with_pos('the number of generic types of struct `$parent_sym.name` is inconsistent with the concrete types',
+ p.error_with_pos('the number of generic types of struct `${parent_sym.name}` is inconsistent with the concrete types',
concrete_types_pos)
}
}
ast.Interface {
if parent_sym.info.generic_types.len == 0 {
- p.error_with_pos('interface `$parent_sym.name` is not a generic interface, cannot instantiate to the concrete types',
+ p.error_with_pos('interface `${parent_sym.name}` is not a generic interface, cannot instantiate to the concrete types',
concrete_types_pos)
} else if parent_sym.info.generic_types.len != concrete_types.len {
- p.error_with_pos('the number of generic types of interfce `$parent_sym.name` is inconsistent with the concrete types',
+ p.error_with_pos('the number of generic types of interfce `${parent_sym.name}` is inconsistent with the concrete types',
concrete_types_pos)
}
}
ast.SumType {
if parent_sym.info.generic_types.len == 0 {
- p.error_with_pos('sumtype `$parent_sym.name` is not a generic sumtype, cannot instantiate to the concrete types',
+ p.error_with_pos('sumtype `${parent_sym.name}` is not a generic sumtype, cannot instantiate to the concrete types',
concrete_types_pos)
} else if parent_sym.info.generic_types.len != concrete_types.len {
- p.error_with_pos('the number of generic types of sumtype `$parent_sym.name` is inconsistent with the concrete types',
+ p.error_with_pos('the number of generic types of sumtype `${parent_sym.name}` is inconsistent with the concrete types',
concrete_types_pos)
}
}
diff --git a/vlib/v/parser/parser.v b/vlib/v/parser/parser.v
index e77be9842e..c8811e865e 100644
--- a/vlib/v/parser/parser.v
+++ b/vlib/v/parser/parser.v
@@ -105,7 +105,7 @@ __global codegen_files = unsafe { []&ast.File{} }
// for tests
pub fn parse_stmt(text string, table &ast.Table, scope &ast.Scope) ast.Stmt {
$if trace_parse_stmt ? {
- eprintln('> ${@MOD}.${@FN} text: $text')
+ eprintln('> ${@MOD}.${@FN} text: ${text}')
}
mut p := Parser{
scanner: scanner.new_scanner(text, .skip_comments, &pref.Preferences{})
@@ -125,7 +125,7 @@ pub fn parse_stmt(text string, table &ast.Table, scope &ast.Scope) ast.Stmt {
pub fn parse_comptime(tmpl_path string, text string, table &ast.Table, pref &pref.Preferences, scope &ast.Scope) &ast.File {
$if trace_parse_comptime ? {
- eprintln('> ${@MOD}.${@FN} text: $text')
+ eprintln('> ${@MOD}.${@FN} text: ${text}')
}
mut p := Parser{
file_name: tmpl_path
@@ -143,7 +143,7 @@ pub fn parse_comptime(tmpl_path string, text string, table &ast.Table, pref &pre
pub fn parse_text(text string, path string, table &ast.Table, comments_mode scanner.CommentsMode, pref &pref.Preferences) &ast.File {
$if trace_parse_text ? {
- eprintln('> ${@MOD}.${@FN} comments_mode: ${comments_mode:-20} | path: ${path:-20} | text: $text')
+ eprintln('> ${@MOD}.${@FN} comments_mode: ${comments_mode:-20} | path: ${path:-20} | text: ${text}')
}
mut p := Parser{
scanner: scanner.new_scanner(text, comments_mode, pref)
@@ -220,7 +220,7 @@ pub fn parse_file(path string, table &ast.Table, comments_mode scanner.CommentsM
// all the tricky inner comments. This is needed because we do not have a good general solution
// for handling them, and should be removed when we do (the general solution is also needed for vfmt)
$if trace_parse_file ? {
- eprintln('> ${@MOD}.${@FN} comments_mode: ${comments_mode:-20} | path: $path')
+ eprintln('> ${@MOD}.${@FN} comments_mode: ${comments_mode:-20} | path: ${path}')
}
mut p := Parser{
scanner: scanner.new_scanner_file(path, comments_mode, pref) or { panic(err) }
@@ -242,7 +242,7 @@ pub fn parse_file(path string, table &ast.Table, comments_mode scanner.CommentsM
pub fn parse_vet_file(path string, table_ &ast.Table, pref &pref.Preferences) (&ast.File, []vet.Error) {
$if trace_parse_vet_file ? {
- eprintln('> ${@MOD}.${@FN} path: $path')
+ eprintln('> ${@MOD}.${@FN} path: ${path}')
}
global_scope := &ast.Scope{
parent: 0
@@ -399,7 +399,7 @@ fn (mut q Queue) run() {
}
*/
pub fn parse_files(paths []string, table &ast.Table, pref &pref.Preferences) []&ast.File {
- mut timers := util.new_timers(should_print: false, label: 'parse_files: $paths')
+ mut timers := util.new_timers(should_print: false, label: 'parse_files: ${paths}')
$if time_parsing ? {
timers.should_print = true
}
@@ -429,9 +429,9 @@ pub fn parse_files(paths []string, table &ast.Table, pref &pref.Preferences) []&
unsafe {
mut files := []&ast.File{cap: paths.len}
for path in paths {
- timers.start('parse_file $path')
+ timers.start('parse_file ${path}')
files << parse_file(path, table, .skip_comments, pref)
- timers.show('parse_file $path')
+ timers.show('parse_file ${path}')
}
if codegen_files.len > 0 {
files << codegen_files
@@ -445,7 +445,7 @@ pub fn parse_files(paths []string, table &ast.Table, pref &pref.Preferences) []&
// checked, markused, cgen-ed etc further, just like user's V code.
pub fn (mut p Parser) codegen(code string) {
$if debug_codegen ? {
- eprintln('parser.codegen:\n $code')
+ eprintln('parser.codegen:\n ${code}')
}
p.codegen_text += '\n' + code
}
@@ -575,10 +575,10 @@ pub fn (mut p Parser) parse_block_no_scope(is_top_level bool) []ast.Stmt {
stmts << p.stmt(is_top_level)
count++
if count % 100000 == 0 {
- eprintln('parsed $count statements so far from fn $p.cur_fn_name ...')
+ eprintln('parsed ${count} statements so far from fn ${p.cur_fn_name} ...')
}
if count > 1000000 {
- p.error_with_pos('parsed over $count statements from fn $p.cur_fn_name, the parser is probably stuck',
+ p.error_with_pos('parsed over ${count} statements from fn ${p.cur_fn_name}, the parser is probably stuck',
p.tok.pos())
return []
}
@@ -608,7 +608,7 @@ fn (mut p Parser) check(expected token.Kind) {
mut s := expected.str()
// quote keywords, punctuation, operators
if token.is_key(s) || (s.len > 0 && !s[0].is_letter()) {
- s = '`$s`'
+ s = '`${s}`'
}
p.unexpected(expecting: s)
}
@@ -628,18 +628,18 @@ fn (mut p Parser) unexpected(params ParamsForUnexpected) ast.NodeError {
fn (mut p Parser) unexpected_with_pos(pos token.Pos, params ParamsForUnexpected) ast.NodeError {
mut msg := if params.got != '' {
- 'unexpected $params.got'
+ 'unexpected ${params.got}'
} else {
- 'unexpected $p.tok'
+ 'unexpected ${p.tok}'
}
if params.expecting != '' {
- msg += ', expecting $params.expecting'
+ msg += ', expecting ${params.expecting}'
}
if params.prepend_msg != '' {
- msg = '$params.prepend_msg ' + msg
+ msg = '${params.prepend_msg} ' + msg
}
if params.additional_msg != '' {
- msg += ', $params.additional_msg'
+ msg += ', ${params.additional_msg}'
}
return p.error_with_pos(msg, pos)
}
@@ -670,7 +670,7 @@ fn (mut p Parser) check_name() string {
[if trace_parser ?]
fn (p &Parser) trace_parser(label string) {
- eprintln('parsing: ${p.file_name:-30}|tok.pos: ${p.tok.pos().line_str():-39}|tok.kind: ${p.tok.kind:-10}|tok.lit: ${p.tok.lit:-10}|$label')
+ eprintln('parsing: ${p.file_name:-30}|tok.pos: ${p.tok.pos().line_str():-39}|tok.kind: ${p.tok.kind:-10}|tok.lit: ${p.tok.lit:-10}|${label}')
}
pub fn (mut p Parser) top_stmt() ast.Stmt {
@@ -893,7 +893,7 @@ pub fn (mut p Parser) eat_comments(cfg EatCommentsConfig) []ast.Comment {
}
pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
- p.trace_parser('stmt($is_top_level)')
+ p.trace_parser('stmt(${is_top_level})')
p.is_stmt_ident = p.tok.kind == .name
match p.tok.kind {
.lcbr {
@@ -939,7 +939,7 @@ pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
spos := p.tok.pos()
name := p.check_name()
if name in p.label_names {
- return p.error_with_pos('duplicate label `$name`', spos)
+ return p.error_with_pos('duplicate label `${name}`', spos)
}
p.label_names << name
p.next()
@@ -969,10 +969,10 @@ pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
pos: spos.extend(p.tok.pos())
}
} else if p.peek_tok.kind == .name {
- return p.unexpected(got: 'name `$p.tok.lit`')
+ return p.unexpected(got: 'name `${p.tok.lit}`')
} else if !p.inside_if_expr && !p.inside_match_body && !p.inside_or_expr
&& p.peek_tok.kind in [.rcbr, .eof] && !p.mark_var_as_used(p.tok.lit) {
- return p.error_with_pos('`$p.tok.lit` evaluated but not used', p.tok.pos())
+ return p.error_with_pos('`${p.tok.lit}` evaluated but not used', p.tok.pos())
}
return p.parse_multi_expr(is_top_level)
}
@@ -1211,7 +1211,7 @@ fn (mut p Parser) asm_stmt(is_top_level bool) ast.AsmStmt {
}
}
else {
- verror('p.parse_number_literal() invalid output: `$number_lit`')
+ verror('p.parse_number_literal() invalid output: `${number_lit}`')
}
}
}
@@ -1738,12 +1738,12 @@ fn (mut p Parser) attributes() {
start_pos := p.tok.pos()
attr := p.parse_attr()
if p.attrs.contains(attr.name) && attr.name != 'wasm_export' {
- p.error_with_pos('duplicate attribute `$attr.name`', start_pos.extend(p.prev_tok.pos()))
+ p.error_with_pos('duplicate attribute `${attr.name}`', start_pos.extend(p.prev_tok.pos()))
return
}
if attr.kind == .comptime_define {
if has_ctdefine {
- p.error_with_pos('only one `[if flag]` may be applied at a time `$attr.name`',
+ p.error_with_pos('only one `[if flag]` may be applied at a time `${attr.name}`',
start_pos.extend(p.prev_tok.pos()))
return
} else {
@@ -1845,13 +1845,13 @@ fn (mut p Parser) parse_attr() ast.Attr {
pub fn (mut p Parser) language_not_allowed_error(language ast.Language, pos token.Pos) {
upcase_language := language.str().to_upper()
- p.error_with_pos('$upcase_language code is not allowed in .${p.file_backend_mode}.v files, please move it to a .${language}.v file',
+ p.error_with_pos('${upcase_language} code is not allowed in .${p.file_backend_mode}.v files, please move it to a .${language}.v file',
pos)
}
pub fn (mut p Parser) language_not_allowed_warning(language ast.Language, pos token.Pos) {
upcase_language := language.str().to_upper()
- p.warn_with_pos('$upcase_language code will not be allowed in pure .v files, please move it to a .${language}.v file instead',
+ p.warn_with_pos('${upcase_language} code will not be allowed in pure .v files, please move it to a .${language}.v file instead',
pos)
}
@@ -2108,9 +2108,9 @@ pub fn (mut p Parser) parse_ident(language ast.Language) ast.Ident {
}
if p.tok.kind != .name {
if is_mut || is_static || is_volatile {
- p.error_with_pos('the `$modifier_kind` keyword is invalid here', mut_pos)
+ p.error_with_pos('the `${modifier_kind}` keyword is invalid here', mut_pos)
} else {
- p.unexpected(got: 'token `$p.tok.lit`')
+ p.unexpected(got: 'token `${p.tok.lit}`')
}
return ast.Ident{
scope: p.scope
@@ -2137,7 +2137,7 @@ pub fn (mut p Parser) parse_ident(language ast.Language) ast.Ident {
// p.warn('it')
}
if p.expr_mod.len > 0 {
- name = '${p.expr_mod}.$name'
+ name = '${p.expr_mod}.${name}'
}
return ast.Ident{
tok_kind: p.tok.kind
@@ -2331,10 +2331,10 @@ pub fn (mut p Parser) name_expr() ast.Expr {
cap_expr = p.expr(0)
}
'len', 'init' {
- return p.error('`$key` cannot be initialized for `chan`. Did you mean `cap`?')
+ return p.error('`${key}` cannot be initialized for `chan`. Did you mean `cap`?')
}
else {
- return p.error('wrong field `$key`, expecting `cap`')
+ return p.error('wrong field `${key}`, expecting `cap`')
}
}
last_pos = p.tok.pos()
@@ -2354,13 +2354,13 @@ pub fn (mut p Parser) name_expr() ast.Expr {
return p.string_expr()
} else {
// don't allow any other string prefix except `r`, `js` and `c`
- return p.error('only `c`, `r`, `js` are recognized string prefixes, but you tried to use `$p.tok.lit`')
+ return p.error('only `c`, `r`, `js` are recognized string prefixes, but you tried to use `${p.tok.lit}`')
}
}
// don't allow r`byte` and c`byte`
if p.peek_tok.kind == .chartoken && p.tok.lit.len == 1 && p.tok.lit[0] in [`r`, `c`] {
opt := if p.tok.lit == 'r' { '`r` (raw string)' } else { '`c` (c string)' }
- return p.error('cannot use $opt with `byte` and `rune`')
+ return p.error('cannot use ${opt} with `byte` and `rune`')
}
// Make sure that the var is not marked as used in assignments: `x = 1`, `x += 2` etc
// but only when it's actually used (e.g. `println(x)`)
@@ -2431,7 +2431,7 @@ pub fn (mut p Parser) name_expr() ast.Expr {
// foo(), foo() or type() cast
mut name := if is_optional { p.peek_tok.lit } else { p.tok.lit }
if mod.len > 0 {
- name = '${mod}.$name'
+ name = '${mod}.${name}'
}
name_w_mod := p.prepend_mod(name)
// type cast. TODO: finish
@@ -2477,7 +2477,7 @@ pub fn (mut p Parser) name_expr() ast.Expr {
// fn call
if is_optional {
p.unexpected_with_pos(p.prev_tok.pos(),
- got: '$p.prev_tok'
+ got: '${p.prev_tok}'
)
}
node = p.call_expr(language, mod)
@@ -2971,10 +2971,10 @@ fn (mut p Parser) parse_generic_types() ([]ast.Type, []string) {
p.error('generic parameter name needs to be exactly one char')
}
if !util.is_generic_type_name(p.tok.lit) {
- p.error('`$p.tok.lit` is a reserved name and cannot be used for generics')
+ p.error('`${p.tok.lit}` is a reserved name and cannot be used for generics')
}
if name in param_names {
- p.error('duplicated generic parameter `$name`')
+ p.error('duplicated generic parameter `${name}`')
}
if count > 8 {
p.error('cannot have more than 9 generic parameters')
@@ -3206,7 +3206,7 @@ fn (mut p Parser) module_decl() ast.Module {
pos: module_pos
}
if module_pos.line_nr != name_pos.line_nr {
- p.error_with_pos('`module` and `$name` must be at same line', name_pos)
+ p.error_with_pos('`module` and `${name}` must be at same line', name_pos)
return mod_node
}
// Note: this shouldn't be reassigned into name_pos
@@ -3216,14 +3216,14 @@ fn (mut p Parser) module_decl() ast.Module {
if module_pos.line_nr == n_pos.line_nr && p.tok.kind != .comment && p.tok.kind != .eof {
if p.tok.kind == .name {
p.unexpected_with_pos(n_pos,
- prepend_msg: '`module $name`, you can only declare one module,'
- got: '`$p.tok.lit`'
+ prepend_msg: '`module ${name}`, you can only declare one module,'
+ got: '`${p.tok.lit}`'
)
return mod_node
} else {
p.unexpected_with_pos(n_pos,
- prepend_msg: '`module $name`,'
- got: '`$p.tok.kind` after module name'
+ prepend_msg: '`module ${name}`,'
+ got: '`${p.tok.kind}` after module name'
)
return mod_node
}
@@ -3266,7 +3266,7 @@ fn (mut p Parser) module_decl() ast.Module {
p.is_translated = true
}
else {
- p.error_with_pos('unknown module attribute `[$ma.name]`', ma.pos)
+ p.error_with_pos('unknown module attribute `[${ma.name}]`', ma.pos)
return mod_node
}
}
@@ -3336,7 +3336,8 @@ fn (mut p Parser) import_stmt() ast.Import {
alias_pos := p.tok.pos()
mod_alias = p.check_name()
if mod_alias == mod_name_arr.last() {
- p.error_with_pos('import alias `$mod_name as $mod_alias` is redundant', p.prev_tok.pos())
+ p.error_with_pos('import alias `${mod_name} as ${mod_alias}` is redundant',
+ p.prev_tok.pos())
return import_node
}
import_node = ast.Import{
@@ -3380,7 +3381,7 @@ fn (mut p Parser) import_syms(mut parent ast.Import) {
p.next()
pos_t := p.tok.pos()
if p.tok.kind == .rcbr { // closed too early
- p.error_with_pos('empty `$parent.mod` import set, remove `{}`', pos_t)
+ p.error_with_pos('empty `${parent.mod}` import set, remove `{}`', pos_t)
return
}
if p.tok.kind != .name { // not a valid inner name
@@ -3660,7 +3661,7 @@ fn (mut p Parser) enum_decl() ast.EnumDecl {
return ast.EnumDecl{}
}
if enum_name in p.imported_symbols {
- p.error_with_pos('cannot register enum `$enum_name`, this type was already imported',
+ p.error_with_pos('cannot register enum `${enum_name}`, this type was already imported',
end_pos)
return ast.EnumDecl{}
}
@@ -3718,12 +3719,12 @@ fn (mut p Parser) enum_decl() ast.EnumDecl {
pubfn := if p.mod == 'main' { 'fn' } else { 'pub fn' }
p.codegen('
//
-[inline] $pubfn ( e &$enum_name) is_empty() bool { return ${senum_type}(*e) == 0 }
-[inline] $pubfn ( e &$enum_name) has(flag $enum_name) bool { return (${senum_type}(*e) & (${senum_type}(flag))) != 0 }
-[inline] $pubfn ( e &$enum_name) all(flag $enum_name) bool { return (${senum_type}(*e) & (${senum_type}(flag))) == ${senum_type}(flag) }
-[inline] $pubfn (mut e $enum_name) set(flag $enum_name) { unsafe{ *e = ${enum_name}(${senum_type}(*e) | (${senum_type}(flag))) } }
-[inline] $pubfn (mut e $enum_name) clear(flag $enum_name) { unsafe{ *e = ${enum_name}(${senum_type}(*e) & ~(${senum_type}(flag))) } }
-[inline] $pubfn (mut e $enum_name) toggle(flag $enum_name) { unsafe{ *e = ${enum_name}(${senum_type}(*e) ^ (${senum_type}(flag))) } }
+[inline] ${pubfn} ( e &${enum_name}) is_empty() bool { return ${senum_type}(*e) == 0 }
+[inline] ${pubfn} ( e &${enum_name}) has(flag ${enum_name}) bool { return (${senum_type}(*e) & (${senum_type}(flag))) != 0 }
+[inline] ${pubfn} ( e &${enum_name}) all(flag ${enum_name}) bool { return (${senum_type}(*e) & (${senum_type}(flag))) == ${senum_type}(flag) }
+[inline] ${pubfn} (mut e ${enum_name}) set(flag ${enum_name}) { unsafe{ *e = ${enum_name}(${senum_type}(*e) | (${senum_type}(flag))) } }
+[inline] ${pubfn} (mut e ${enum_name}) clear(flag ${enum_name}) { unsafe{ *e = ${enum_name}(${senum_type}(*e) & ~(${senum_type}(flag))) } }
+[inline] ${pubfn} (mut e ${enum_name}) toggle(flag ${enum_name}) { unsafe{ *e = ${enum_name}(${senum_type}(*e) ^ (${senum_type}(flag))) } }
//
')
}
@@ -3741,7 +3742,7 @@ fn (mut p Parser) enum_decl() ast.EnumDecl {
is_pub: is_pub
})
if idx == -1 {
- p.error_with_pos('cannot register enum `$name`, another type with this name exists',
+ p.error_with_pos('cannot register enum `${name}`, another type with this name exists',
end_pos)
}
@@ -3782,7 +3783,7 @@ fn (mut p Parser) type_decl() ast.TypeDecl {
return ast.FnTypeDecl{}
}
if name in p.imported_symbols {
- p.error_with_pos('cannot register alias `$name`, this type was already imported',
+ p.error_with_pos('cannot register alias `${name}`, this type was already imported',
end_pos)
return ast.AliasTypeDecl{}
}
@@ -3837,7 +3838,7 @@ fn (mut p Parser) type_decl() ast.TypeDecl {
is_pub: is_pub
})
if typ == ast.invalid_type_idx {
- p.error_with_pos('cannot register sum type `$name`, another type with this name exists',
+ p.error_with_pos('cannot register sum type `${name}`, another type with this name exists',
name_pos)
return ast.SumTypeDecl{}
}
@@ -3879,13 +3880,13 @@ fn (mut p Parser) type_decl() ast.TypeDecl {
})
type_end_pos := p.prev_tok.pos()
if idx == ast.invalid_type_idx {
- p.error_with_pos('cannot register alias `$name`, another type with this name exists',
+ p.error_with_pos('cannot register alias `${name}`, another type with this name exists',
name_pos)
return ast.AliasTypeDecl{}
}
if idx == pidx {
type_alias_pos := sum_variants[0].pos
- p.error_with_pos('a type alias can not refer to itself: $name', decl_pos.extend(type_alias_pos))
+ p.error_with_pos('a type alias can not refer to itself: ${name}', decl_pos.extend(type_alias_pos))
return ast.AliasTypeDecl{}
}
comments = p.eat_comments(same_line: true)
@@ -3903,7 +3904,7 @@ fn (mut p Parser) assoc() ast.Assoc {
var_name := p.check_name()
pos := p.tok.pos()
mut v := p.scope.find_var(var_name) or {
- p.error('unknown variable `$var_name`')
+ p.error('unknown variable `${var_name}`')
return ast.Assoc{
scope: 0
}
@@ -3950,7 +3951,7 @@ fn (mut p Parser) top_level_statement_start() {
p.scanner.set_is_inside_toplevel_statement(true)
p.rewind_scanner_to_current_token_in_new_mode()
$if debugscanner ? {
- eprintln('>> p.top_level_statement_start | tidx:${p.tok.tidx:-5} | p.tok.kind: ${p.tok.kind:-10} | p.tok.lit: $p.tok.lit $p.peek_tok.lit ${p.peek_token(2).lit} ${p.peek_token(3).lit} ...')
+ eprintln('>> p.top_level_statement_start | tidx:${p.tok.tidx:-5} | p.tok.kind: ${p.tok.kind:-10} | p.tok.lit: ${p.tok.lit} ${p.peek_tok.lit} ${p.peek_token(2).lit} ${p.peek_token(3).lit} ...')
}
}
}
@@ -3960,7 +3961,7 @@ fn (mut p Parser) top_level_statement_end() {
p.scanner.set_is_inside_toplevel_statement(false)
p.rewind_scanner_to_current_token_in_new_mode()
$if debugscanner ? {
- eprintln('>> p.top_level_statement_end | tidx:${p.tok.tidx:-5} | p.tok.kind: ${p.tok.kind:-10} | p.tok.lit: $p.tok.lit $p.peek_tok.lit ${p.peek_token(2).lit} ${p.peek_token(3).lit} ...')
+ eprintln('>> p.top_level_statement_end | tidx:${p.tok.tidx:-5} | p.tok.kind: ${p.tok.kind:-10} | p.tok.lit: ${p.tok.lit} ${p.peek_tok.lit} ${p.peek_token(2).lit} ${p.peek_token(3).lit} ...')
}
}
}
@@ -4078,6 +4079,6 @@ fn (mut p Parser) disallow_declarations_in_script_mode() bool {
fn (mut p Parser) trace(fbase string, message string) {
if p.file_base == fbase {
- println('> p.trace | ${fbase:-10s} | $message')
+ println('> p.trace | ${fbase:-10s} | ${message}')
}
}
diff --git a/vlib/v/parser/sql.v b/vlib/v/parser/sql.v
index ebd36cfc6f..8f08ed5d19 100644
--- a/vlib/v/parser/sql.v
+++ b/vlib/v/parser/sql.v
@@ -196,7 +196,7 @@ fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine {
kind = .create
table := p.check_name()
if table != 'table' {
- p.error('expected `table` got `$table`')
+ p.error('expected `table` got `${table}`')
return ast.SqlStmtLine{}
}
typ := p.parse_type()
@@ -213,7 +213,7 @@ fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine {
kind = .drop
table := p.check_name()
if table != 'table' {
- p.error('expected `table` got `$table`')
+ p.error('expected `table` got `${table}`')
return ast.SqlStmtLine{}
}
typ := p.parse_type()
@@ -299,7 +299,7 @@ fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine {
fn (mut p Parser) check_sql_keyword(name string) ?bool {
if p.check_name() != name {
- p.error('orm: expecting `$name`')
+ p.error('orm: expecting `${name}`')
return none
}
return true
diff --git a/vlib/v/parser/struct.v b/vlib/v/parser/struct.v
index 77e058dcc6..a6a5235830 100644
--- a/vlib/v/parser/struct.v
+++ b/vlib/v/parser/struct.v
@@ -44,7 +44,7 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
}
mut name := if is_anon {
p.table.anon_struct_counter++
- '_VAnonStruct$p.table.anon_struct_counter'
+ '_VAnonStruct${p.table.anon_struct_counter}'
} else {
p.check_name()
}
@@ -65,12 +65,12 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
generic_types, _ := p.parse_generic_types()
no_body := p.tok.kind != .lcbr
if language == .v && no_body {
- p.error('`$p.tok.lit` lacks body')
+ p.error('`${p.tok.lit}` lacks body')
return ast.StructDecl{}
}
if language == .v && !p.builtin_mod && !p.is_translated && name.len > 0 && !name[0].is_capital()
&& !p.pref.translated && !p.is_translated && !is_anon {
- p.error_with_pos('struct name `$name` must begin with capital letter', name_pos)
+ p.error_with_pos('struct name `${name}` must begin with capital letter', name_pos)
return ast.StructDecl{}
}
if name.len == 1 {
@@ -78,16 +78,16 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
return ast.StructDecl{}
}
if name in p.imported_symbols {
- p.error_with_pos('cannot register struct `$name`, this type was already imported',
+ p.error_with_pos('cannot register struct `${name}`, this type was already imported',
name_pos)
return ast.StructDecl{}
}
mut orig_name := name
if language == .c {
- name = 'C.$name'
+ name = 'C.${name}'
orig_name = name
} else if language == .js {
- name = 'JS.$name'
+ name = 'JS.${name}'
orig_name = name
} else {
name = p.prepend_mod(name)
@@ -215,12 +215,12 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
}
sym := p.table.sym(typ)
if typ in embed_types {
- p.error_with_pos('cannot embed `$sym.name` more than once', type_pos)
+ p.error_with_pos('cannot embed `${sym.name}` more than once', type_pos)
return ast.StructDecl{}
}
field_name = sym.embed_name()
if field_name in embed_field_names {
- p.error_with_pos('duplicate field `$field_name`', type_pos)
+ p.error_with_pos('duplicate field `${field_name}`', type_pos)
return ast.StructDecl{}
}
if p.tok.kind == .lsbr {
@@ -351,7 +351,7 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
is_pub: is_pub
}
if p.table.has_deep_child_no_ref(&sym, name) {
- p.error_with_pos('invalid recursive struct `$orig_name`', name_pos)
+ p.error_with_pos('invalid recursive struct `${orig_name}`', name_pos)
return ast.StructDecl{}
}
mut ret := p.table.register_sym(sym)
@@ -360,7 +360,7 @@ fn (mut p Parser) struct_decl(is_anon bool) ast.StructDecl {
}
// allow duplicate c struct declarations
if ret == -1 && language != .c {
- p.error_with_pos('cannot register struct `$name`, another type with this name exists',
+ p.error_with_pos('cannot register struct `${name}`, another type with this name exists',
name_pos)
return ast.StructDecl{}
}
@@ -526,7 +526,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
p.check(.lcbr)
pre_comments := p.eat_comments()
if modless_name in p.imported_symbols {
- p.error_with_pos('cannot register interface `$interface_name`, this type was already imported',
+ p.error_with_pos('cannot register interface `${interface_name}`, this type was already imported',
name_pos)
return ast.InterfaceDecl{}
}
@@ -545,7 +545,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
language: language
)
if reg_idx == -1 {
- p.error_with_pos('cannot register interface `$interface_name`, another type with this name exists',
+ p.error_with_pos('cannot register interface `${interface_name}`, another type with this name exists',
name_pos)
return ast.InterfaceDecl{}
}
@@ -586,12 +586,12 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
// Check embedded interface from external module
if p.tok.kind == .name && p.peek_tok.kind == .dot {
if p.tok.lit !in p.imports {
- p.error_with_pos('mod `$p.tok.lit` not imported', p.tok.pos())
+ p.error_with_pos('mod `${p.tok.lit}` not imported', p.tok.pos())
break
}
mod_name := p.tok.lit
from_mod_typ := p.parse_type()
- from_mod_name := '${mod_name}.$p.prev_tok.lit'
+ from_mod_name := '${mod_name}.${p.prev_tok.lit}'
if from_mod_name.is_lower() {
p.error_with_pos('The interface name need to have the pascal case', p.prev_tok.pos())
break
@@ -629,11 +629,11 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
name := p.check_name()
if name in ['type_name', 'type_idx'] {
- p.error_with_pos('cannot override built-in method `$name`', method_start_pos)
+ p.error_with_pos('cannot override built-in method `${name}`', method_start_pos)
return ast.InterfaceDecl{}
}
if ts.has_method(name) {
- p.error_with_pos('duplicate method `$name`', method_start_pos)
+ p.error_with_pos('duplicate method `${name}`', method_start_pos)
return ast.InterfaceDecl{}
}
// field_names << name
diff --git a/vlib/v/parser/tmpl.v b/vlib/v/parser/tmpl.v
index 58936e8477..b117fbf062 100644
--- a/vlib/v/parser/tmpl.v
+++ b/vlib/v/parser/tmpl.v
@@ -66,7 +66,7 @@ fn is_html_open_tag(name string, s string) bool {
if sub.len <= len { // `` or ``
return false
}
- if sub[..len + 1] != '$name ' { // not ``
+ if sub[..len + 1] != '${name} ' { // not ``
return false
}
return true
@@ -94,7 +94,7 @@ fn insert_template_code(fn_name string, tmpl_str_start string, line string) stri
// compile_file compiles the content of a file by the given path as a template
pub fn (mut p Parser) compile_template_file(template_file string, fn_name string) string {
mut lines := os.read_lines(template_file) or {
- p.error('reading from $template_file failed')
+ p.error('reading from ${template_file} failed')
return ''
}
basepath := os.dir(template_file)
@@ -105,7 +105,7 @@ pub fn (mut p Parser) compile_template_file(template_file string, fn_name string
import strings
// === vweb html template ===
fn vweb_tmpl_${fn_name}() string {
- mut sb_$fn_name := strings.new_builder($lstartlength)\n
+ mut sb_${fn_name} := strings.new_builder(${lstartlength})\n
')
source.write_string(tmpl_str_start)
@@ -129,7 +129,7 @@ fn vweb_tmpl_${fn_name}() string {
state.update(line)
}
$if trace_tmpl ? {
- eprintln('>>> tfile: $template_file, spos: ${start_of_line_pos:6}, epos:${end_of_line_pos:6}, fi: ${tline_number:5}, i: ${i:5}, state: ${state:10}, line: $line')
+ eprintln('>>> tfile: ${template_file}, spos: ${start_of_line_pos:6}, epos:${end_of_line_pos:6}, fi: ${tline_number:5}, i: ${i:5}, state: ${state:10}, line: ${line}')
}
if line.contains('@header') {
position := line.index('@header') or { 0 }
@@ -175,15 +175,15 @@ fn vweb_tmpl_${fn_name}() string {
// an absolute path
templates_folder = ''
}
- file_path := os.real_path(os.join_path_single(templates_folder, '$file_name$file_ext'))
+ file_path := os.real_path(os.join_path_single(templates_folder, '${file_name}${file_ext}'))
$if trace_tmpl ? {
- eprintln('>>> basepath: "$basepath" , template_file: "$template_file" , fn_name: "$fn_name" , @include line: "$line" , file_name: "$file_name" , file_ext: "$file_ext" , templates_folder: "$templates_folder" , file_path: "$file_path"')
+ eprintln('>>> basepath: "${basepath}" , template_file: "${template_file}" , fn_name: "${fn_name}" , @include line: "${line}" , file_name: "${file_name}" , file_ext: "${file_ext}" , templates_folder: "${templates_folder}" , file_path: "${file_path}"')
}
file_content := os.read_file(file_path) or {
position := line.index('@include ') or { 0 } + '@include '.len
p.error_with_error(errors.Error{
- message: 'Reading file $file_name from path: $file_path failed'
- details: "Failed to @include '$file_name'"
+ message: 'Reading file ${file_name} from path: ${file_path} failed'
+ details: "Failed to @include '${file_name}'"
file_path: template_file
pos: token.Pos{
len: '@include '.len + file_name.len
@@ -263,7 +263,7 @@ fn vweb_tmpl_${fn_name}() string {
if line_t.starts_with('span.') && line.ends_with('{') {
// `span.header {` => `