mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
vfmt: change all '$expr' to '${expr}' (#16428)
This commit is contained in:
parent
56239b4a23
commit
017ace6ea7
@ -10,7 +10,7 @@ fn main() {
|
||||
sample_size := 10000000
|
||||
min_str_len := 20
|
||||
max_str_len := 40
|
||||
println('Generating $sample_size strings between $min_str_len - $max_str_len chars long...')
|
||||
println('Generating ${sample_size} strings between ${min_str_len} - ${max_str_len} chars long...')
|
||||
mut checksum := u64(0)
|
||||
mut start_pos := 0
|
||||
mut bgenerating := benchmark.start()
|
||||
|
@ -52,7 +52,7 @@ fn main() {
|
||||
}
|
||||
for mname in app.modules {
|
||||
if !app.is_verbose {
|
||||
eprintln('Checking module: $mname ...')
|
||||
eprintln('Checking module: ${mname} ...')
|
||||
}
|
||||
api_base := app.gen_api_for_module_in_os(mname, base_os)
|
||||
for oname in os_names {
|
||||
@ -65,9 +65,9 @@ fn main() {
|
||||
}
|
||||
howmany := app.api_differences.len
|
||||
if howmany > 0 {
|
||||
eprintln(term.header('Found $howmany modules with different APIs', '='))
|
||||
eprintln(term.header('Found ${howmany} modules with different APIs', '='))
|
||||
for m in app.api_differences.keys() {
|
||||
eprintln('Module: $m')
|
||||
eprintln('Module: ${m}')
|
||||
}
|
||||
exit(1)
|
||||
}
|
||||
@ -108,7 +108,7 @@ fn (app App) gen_api_for_module_in_os(mod_name string, os_name string) string {
|
||||
fn_signature := s.stringify(b.table, mod_name, map[string]string{})
|
||||
fn_mod := s.modname()
|
||||
if fn_mod == mod_name {
|
||||
fline := '$fn_mod: $fn_signature'
|
||||
fline := '${fn_mod}: ${fn_signature}'
|
||||
res << fline
|
||||
}
|
||||
}
|
||||
@ -122,7 +122,7 @@ fn (app App) gen_api_for_module_in_os(mod_name string, os_name string) string {
|
||||
fn (mut app App) compare_api(api_base string, api_os string, mod_name string, os_base string, os_target string) {
|
||||
res := diff.color_compare_strings(app.diff_cmd, rand.ulid(), api_base, api_os)
|
||||
if res.len > 0 {
|
||||
summary := 'Different APIs found for module: `$mod_name`, between OS base: `$os_base` and OS: `$os_target`'
|
||||
summary := 'Different APIs found for module: `${mod_name}`, between OS base: `${os_base}` and OS: `${os_target}`'
|
||||
eprintln(term.header(summary, '-'))
|
||||
eprintln(res)
|
||||
eprintln(term.h_divider('-'))
|
||||
|
@ -18,7 +18,7 @@ const fast_dir = os.dir(@FILE)
|
||||
const vdir = os.dir(os.dir(os.dir(fast_dir)))
|
||||
|
||||
fn elog(msg string) {
|
||||
eprintln('$time.now().format_ss_micro() $msg')
|
||||
eprintln('${time.now().format_ss_micro()} ${msg}')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
@ -35,10 +35,10 @@ fn main() {
|
||||
if os.args.contains('-clang') {
|
||||
ccompiler_path = 'clang'
|
||||
}
|
||||
elog('fast_dir: $fast_dir | vdir: $vdir | compiler: $ccompiler_path')
|
||||
elog('fast_dir: ${fast_dir} | vdir: ${vdir} | compiler: ${ccompiler_path}')
|
||||
|
||||
os.chdir(fast_dir)!
|
||||
if !os.exists('$vdir/v') && !os.is_dir('$vdir/vlib') {
|
||||
if !os.exists('${vdir}/v') && !os.is_dir('${vdir}/vlib') {
|
||||
elog('fast.html generator needs to be located in `v/cmd/tools/fast`')
|
||||
exit(1)
|
||||
}
|
||||
@ -48,9 +48,9 @@ fn main() {
|
||||
|
||||
if !os.args.contains('-noupdate') {
|
||||
elog('Fetching updates...')
|
||||
ret := os.system('$vdir/v up')
|
||||
ret := os.system('${vdir}/v up')
|
||||
if ret != 0 {
|
||||
elog('failed to update V, exit_code: $ret')
|
||||
elog('failed to update V, exit_code: ${ret}')
|
||||
return
|
||||
}
|
||||
}
|
||||
@ -59,8 +59,8 @@ fn main() {
|
||||
commit := exec('git rev-parse HEAD')[..8]
|
||||
if os.exists('website/index.html') {
|
||||
uploaded_index := os.read_file('website/index.html')!
|
||||
if uploaded_index.contains('>$commit<') {
|
||||
elog('NOTE: commit $commit had been benchmarked already.')
|
||||
if uploaded_index.contains('>${commit}<') {
|
||||
elog('NOTE: commit ${commit} had been benchmarked already.')
|
||||
if !os.args.contains('-force') {
|
||||
elog('nothing more to do')
|
||||
return
|
||||
@ -69,16 +69,16 @@ fn main() {
|
||||
}
|
||||
|
||||
os.chdir(vdir)!
|
||||
message := exec('git log --pretty=format:"%s" -n1 $commit')
|
||||
commit_date := exec('git log -n1 --pretty="format:%at" $commit')
|
||||
message := exec('git log --pretty=format:"%s" -n1 ${commit}')
|
||||
commit_date := exec('git log -n1 --pretty="format:%at" ${commit}')
|
||||
date := time.unix(commit_date.i64())
|
||||
|
||||
elog('Benchmarking commit $commit , with commit message: "$message", commit_date: $commit_date, date: $date')
|
||||
elog('Benchmarking commit ${commit} , with commit message: "${message}", commit_date: ${commit_date}, date: ${date}')
|
||||
|
||||
// build an optimized V
|
||||
if os.args.contains('-do-not-rebuild-vprod') {
|
||||
if !os.exists('vprod') {
|
||||
elog('Exiting, since if you use `-do-not-rebuild-vprod`, you should already have a `$vdir/vprod` executable, but it is missing!')
|
||||
elog('Exiting, since if you use `-do-not-rebuild-vprod`, you should already have a `${vdir}/vprod` executable, but it is missing!')
|
||||
return
|
||||
}
|
||||
} else {
|
||||
@ -93,15 +93,15 @@ fn main() {
|
||||
if !os.args.contains('-do-not-rebuild-caches') {
|
||||
elog('clearing caches...')
|
||||
// cache vlib modules
|
||||
exec('$vdir/v wipe-cache')
|
||||
exec('$vdir/v -o vwarm_caches -cc $ccompiler_path cmd/v')
|
||||
exec('${vdir}/v wipe-cache')
|
||||
exec('${vdir}/v -o vwarm_caches -cc ${ccompiler_path} cmd/v')
|
||||
}
|
||||
|
||||
// measure
|
||||
diff1 := measure('$vdir/vprod $voptions -o v.c cmd/v', 'v.c')
|
||||
diff2 := measure('$vdir/vprod $voptions -cc $ccompiler_path -o v2 cmd/v', 'v2')
|
||||
diff1 := measure('${vdir}/vprod ${voptions} -o v.c cmd/v', 'v.c')
|
||||
diff2 := measure('${vdir}/vprod ${voptions} -cc ${ccompiler_path} -o v2 cmd/v', 'v2')
|
||||
diff3 := 0 // measure('$vdir/vprod -native $vdir/cmd/tools/1mil.v', 'native 1mil')
|
||||
diff4 := measure('$vdir/vprod $voptions -cc $ccompiler_path examples/hello_world.v',
|
||||
diff4 := measure('${vdir}/vprod ${voptions} -cc ${ccompiler_path} examples/hello_world.v',
|
||||
'hello.v')
|
||||
vc_size := os.file_size('v.c') / 1000
|
||||
scan, parse, check, cgen, vlines := measure_steps_minimal(vdir)!
|
||||
@ -113,19 +113,19 @@ fn main() {
|
||||
table := os.read_file('table.html')!
|
||||
new_table :=
|
||||
' <tr>
|
||||
<td>$date.format()</td>
|
||||
<td><a target=_blank href="https://github.com/vlang/v/commit/$commit">$commit</a></td>
|
||||
<td>$html_message</td>
|
||||
<td>${date.format()}</td>
|
||||
<td><a target=_blank href="https://github.com/vlang/v/commit/${commit}">${commit}</a></td>
|
||||
<td>${html_message}</td>
|
||||
<td>${diff1}ms</td>
|
||||
<td>${diff2}ms</td>
|
||||
<td>${diff3}ms</td>
|
||||
<td>${diff4}ms</td>
|
||||
<td>$vc_size KB</td>
|
||||
<td>${vc_size} KB</td>
|
||||
<td>${parse}ms</td>
|
||||
<td>${check}ms</td>
|
||||
<td>${cgen}ms</td>
|
||||
<td>${scan}ms</td>
|
||||
<td>$vlines</td>
|
||||
<td>${vlines}</td>
|
||||
<td>${int(f64(vlines) / f64(diff1) * 1000.0)}</td>
|
||||
</tr>\n' +
|
||||
table.trim_space() + '\n'
|
||||
@ -159,7 +159,7 @@ fn exec(s string) string {
|
||||
|
||||
// measure returns milliseconds
|
||||
fn measure(cmd string, description string) int {
|
||||
elog(' Measuring $description, warmups: $warmup_samples, samples: $max_samples, discard: $discard_highest_samples, with cmd: `$cmd`')
|
||||
elog(' Measuring ${description}, warmups: ${warmup_samples}, samples: ${max_samples}, discard: ${discard_highest_samples}, with cmd: `${cmd}`')
|
||||
for _ in 0 .. warmup_samples {
|
||||
exec(cmd)
|
||||
}
|
||||
@ -170,23 +170,23 @@ fn measure(cmd string, description string) int {
|
||||
exec(cmd)
|
||||
sample := int(sw.elapsed().milliseconds())
|
||||
runs << sample
|
||||
println('$sample ms')
|
||||
println('${sample} ms')
|
||||
flush_stdout()
|
||||
}
|
||||
runs.sort()
|
||||
elog(' runs before discarding: $runs, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
|
||||
elog(' runs before discarding: ${runs}, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
|
||||
// Discard the highest times, since on AWS, they are caused by random load spikes,
|
||||
// that are unpredictable, add noise and skew the statistics, without adding useful
|
||||
// insights:
|
||||
for _ in 0 .. discard_highest_samples {
|
||||
runs.pop()
|
||||
}
|
||||
elog(' runs after discarding: $runs, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
|
||||
elog(' runs after discarding: ${runs}, avg: ${f64(arrays.sum(runs) or { 0 }) / runs.len:5.2f}')
|
||||
return int(f64(arrays.sum(runs) or { 0 }) / runs.len)
|
||||
}
|
||||
|
||||
fn measure_steps_minimal(vdir string) !(int, int, int, int, int) {
|
||||
elog('measure_steps_minimal $vdir, samples: $max_samples')
|
||||
elog('measure_steps_minimal ${vdir}, samples: ${max_samples}')
|
||||
mut scans, mut parses, mut checks, mut cgens, mut vliness := []int{}, []int{}, []int{}, []int{}, []int{}
|
||||
for i in 0 .. max_samples {
|
||||
scan, parse, check, cgen, vlines := measure_steps_one_sample(vdir)
|
||||
@ -195,15 +195,15 @@ fn measure_steps_minimal(vdir string) !(int, int, int, int, int) {
|
||||
checks << check
|
||||
cgens << cgen
|
||||
vliness << vlines
|
||||
elog(' [${i:2}/${max_samples:2}] scan: $scan ms, min parse: $parse ms, min check: $check ms, min cgen: $cgen ms, min vlines: $vlines ms')
|
||||
elog(' [${i:2}/${max_samples:2}] scan: ${scan} ms, min parse: ${parse} ms, min check: ${check} ms, min cgen: ${cgen} ms, min vlines: ${vlines} ms')
|
||||
}
|
||||
scan, parse, check, cgen, vlines := arrays.min(scans)!, arrays.min(parses)!, arrays.min(checks)!, arrays.min(cgens)!, arrays.min(vliness)!
|
||||
elog('measure_steps_minimal => min scan: $scan ms, min parse: $parse ms, min check: $check ms, min cgen: $cgen ms, min vlines: $vlines ms')
|
||||
elog('measure_steps_minimal => min scan: ${scan} ms, min parse: ${parse} ms, min check: ${check} ms, min cgen: ${cgen} ms, min vlines: ${vlines} ms')
|
||||
return scan, parse, check, cgen, vlines
|
||||
}
|
||||
|
||||
fn measure_steps_one_sample(vdir string) (int, int, int, int, int) {
|
||||
resp := os.execute_or_exit('$vdir/vprod $voptions -o v.c cmd/v')
|
||||
resp := os.execute_or_exit('${vdir}/vprod ${voptions} -o v.c cmd/v')
|
||||
|
||||
mut scan, mut parse, mut check, mut cgen, mut vlines := 0, 0, 0, 0, 0
|
||||
lines := resp.output.split_into_lines()
|
||||
|
@ -13,11 +13,11 @@ const vexe = os.join_path(vdir, 'v')
|
||||
const sleep_period = 120
|
||||
|
||||
fn elog(msg string) {
|
||||
eprintln('$time.now().format_ss_micro() $msg')
|
||||
eprintln('${time.now().format_ss_micro()} ${msg}')
|
||||
}
|
||||
|
||||
fn delay() {
|
||||
elog('Sleeping for $sleep_period seconds...')
|
||||
elog('Sleeping for ${sleep_period} seconds...')
|
||||
time.sleep(sleep_period * time.second)
|
||||
}
|
||||
|
||||
@ -25,11 +25,11 @@ fn delay() {
|
||||
// runs fast.v, pushes the HTML result to the fast.vlang.io GH pages repo.
|
||||
fn main() {
|
||||
os.setenv('LANG', 'C', true)
|
||||
elog('fast_job fast_dir: $fast_dir | vdir: $vdir | vexe: $vexe')
|
||||
elog('fast_job fast_dir: ${fast_dir} | vdir: ${vdir} | vexe: ${vexe}')
|
||||
|
||||
os.chdir(fast_dir)!
|
||||
|
||||
elog('fast_job start in os.getwd(): $os.getwd()')
|
||||
elog('fast_job start in os.getwd(): ${os.getwd()}')
|
||||
defer {
|
||||
elog('fast_job end')
|
||||
}
|
||||
@ -41,7 +41,7 @@ fn main() {
|
||||
for {
|
||||
elog('------------------- Checking for updates ... -------------------')
|
||||
res_pull := os.execute('git pull --rebase')
|
||||
elog('> res_pull.output: $res_pull.output')
|
||||
elog('> res_pull.output: ${res_pull.output}')
|
||||
if res_pull.exit_code != 0 {
|
||||
elog('Git pull failed. You may have uncommitted changes?')
|
||||
delay()
|
||||
@ -72,7 +72,7 @@ fn main() {
|
||||
elog('running ./fast -upload')
|
||||
fast_exit_code := os.system('./fast -upload')
|
||||
if fast_exit_code != 0 {
|
||||
println('fast_exit_code = $fast_exit_code, != 0')
|
||||
println('fast_exit_code = ${fast_exit_code}, != 0')
|
||||
}
|
||||
|
||||
delay()
|
||||
|
@ -133,7 +133,7 @@ fn fuzz6() {
|
||||
|
||||
fn main() {
|
||||
seed := u32(time.ticks())
|
||||
println('seed: $seed.hex()')
|
||||
println('seed: ${seed.hex()}')
|
||||
rand.seed([seed, seed])
|
||||
fuzz1()
|
||||
fuzz2()
|
||||
|
@ -2,7 +2,7 @@ fn main() {
|
||||
for i in 0 .. 100000 {
|
||||
println('
|
||||
fn foo${i}() {
|
||||
x := $i
|
||||
x := ${i}
|
||||
mut a := 1 + x
|
||||
a += 2
|
||||
print(a)
|
||||
|
@ -64,7 +64,7 @@ const (
|
||||
// server port
|
||||
server_port = 7171
|
||||
// log file
|
||||
log_file = '$work_dir/log.txt'
|
||||
log_file = '${work_dir}/log.txt'
|
||||
// log_to is either 'file' or 'terminal'
|
||||
log_to = 'terminal'
|
||||
)
|
||||
@ -209,7 +209,7 @@ fn (mut gen_vc GenVC) generate() {
|
||||
os.mkdir(gen_vc.options.work_dir) or { panic(err) }
|
||||
// still dosen't exist... we have a problem
|
||||
if !os.is_dir(gen_vc.options.work_dir) {
|
||||
gen_vc.logger.error('error creating directory: $gen_vc.options.work_dir')
|
||||
gen_vc.logger.error('error creating directory: ${gen_vc.options.work_dir}')
|
||||
gen_vc.gen_error = true
|
||||
return
|
||||
}
|
||||
@ -221,10 +221,10 @@ fn (mut gen_vc GenVC) generate() {
|
||||
// first check to see if the local v repo is behind master
|
||||
// if it isn't behind theres no point continuing further
|
||||
if !gen_vc.options.serve && os.is_dir(git_repo_dir_v) {
|
||||
gen_vc.cmd_exec('git -C $git_repo_dir_v checkout master')
|
||||
gen_vc.cmd_exec('git -C ${git_repo_dir_v} checkout master')
|
||||
// fetch the remote repo just in case there are newer commits there
|
||||
gen_vc.cmd_exec('git -C $git_repo_dir_v fetch')
|
||||
git_status := gen_vc.cmd_exec('git -C $git_repo_dir_v status')
|
||||
gen_vc.cmd_exec('git -C ${git_repo_dir_v} fetch')
|
||||
git_status := gen_vc.cmd_exec('git -C ${git_repo_dir_v} status')
|
||||
if !git_status.contains('behind') && !gen_vc.options.force {
|
||||
gen_vc.logger.warn('v repository is already up to date.')
|
||||
return
|
||||
@ -233,11 +233,11 @@ fn (mut gen_vc GenVC) generate() {
|
||||
// delete repos
|
||||
gen_vc.purge_repos()
|
||||
// clone repos
|
||||
gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_v $git_repo_dir_v')
|
||||
gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_vc $git_repo_dir_vc')
|
||||
gen_vc.cmd_exec('git clone --depth 1 https://${git_repo_v} ${git_repo_dir_v}')
|
||||
gen_vc.cmd_exec('git clone --depth 1 https://${git_repo_vc} ${git_repo_dir_vc}')
|
||||
// get output of git log -1 (last commit)
|
||||
git_log_v := gen_vc.cmd_exec('git -C $git_repo_dir_v log -1 --format="commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
|
||||
git_log_vc := gen_vc.cmd_exec('git -C $git_repo_dir_vc log -1 --format="Commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
|
||||
git_log_v := gen_vc.cmd_exec('git -C ${git_repo_dir_v} log -1 --format="commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
|
||||
git_log_vc := gen_vc.cmd_exec('git -C ${git_repo_dir_vc} log -1 --format="Commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
|
||||
// date of last commit in each repo
|
||||
ts_v := git_log_v.find_between('Date:', '\n').trim_space()
|
||||
ts_vc := git_log_vc.find_between('Date:', '\n').trim_space()
|
||||
@ -255,45 +255,45 @@ fn (mut gen_vc GenVC) generate() {
|
||||
last_commit_subject := git_log_v.find_between('Subject:', '\n').trim_space().replace("'",
|
||||
'"')
|
||||
// log some info
|
||||
gen_vc.logger.debug('last commit time ($git_repo_v): ' + last_commit_time_v.format_ss())
|
||||
gen_vc.logger.debug('last commit time ($git_repo_vc): ' + last_commit_time_vc.format_ss())
|
||||
gen_vc.logger.debug('last commit hash ($git_repo_v): $last_commit_hash_v')
|
||||
gen_vc.logger.debug('last commit subject ($git_repo_v): $last_commit_subject')
|
||||
gen_vc.logger.debug('last commit time (${git_repo_v}): ' + last_commit_time_v.format_ss())
|
||||
gen_vc.logger.debug('last commit time (${git_repo_vc}): ' + last_commit_time_vc.format_ss())
|
||||
gen_vc.logger.debug('last commit hash (${git_repo_v}): ${last_commit_hash_v}')
|
||||
gen_vc.logger.debug('last commit subject (${git_repo_v}): ${last_commit_subject}')
|
||||
// if vc repo already has a newer commit than the v repo, assume it's up to date
|
||||
if t_unix_vc >= t_unix_v && !gen_vc.options.force {
|
||||
gen_vc.logger.warn('vc repository is already up to date.')
|
||||
return
|
||||
}
|
||||
// try build v for current os (linux in this case)
|
||||
gen_vc.cmd_exec('make -C $git_repo_dir_v')
|
||||
v_exec := '$git_repo_dir_v/v'
|
||||
gen_vc.cmd_exec('make -C ${git_repo_dir_v}')
|
||||
v_exec := '${git_repo_dir_v}/v'
|
||||
// check if make was successful
|
||||
gen_vc.assert_file_exists_and_is_not_too_short(v_exec, err_msg_make)
|
||||
// build v.c for each os
|
||||
for os_name in vc_build_oses {
|
||||
c_file := if os_name == 'nix' { 'v.c' } else { 'v_win.c' }
|
||||
v_flags := if os_name == 'nix' { '-os cross' } else { '-os $os_name' }
|
||||
v_flags := if os_name == 'nix' { '-os cross' } else { '-os ${os_name}' }
|
||||
// try generate .c file
|
||||
gen_vc.cmd_exec('$v_exec $v_flags -o $c_file $git_repo_dir_v/cmd/v')
|
||||
gen_vc.cmd_exec('${v_exec} ${v_flags} -o ${c_file} ${git_repo_dir_v}/cmd/v')
|
||||
// check if the c file seems ok
|
||||
gen_vc.assert_file_exists_and_is_not_too_short(c_file, err_msg_gen_c)
|
||||
// embed the latest v commit hash into the c file
|
||||
gen_vc.cmd_exec('sed -i \'1s/^/#define V_COMMIT_HASH "$last_commit_hash_v_short"\\n/\' $c_file')
|
||||
gen_vc.cmd_exec('sed -i \'1s/^/#define V_COMMIT_HASH "${last_commit_hash_v_short}"\\n/\' ${c_file}')
|
||||
// move to vc repo
|
||||
gen_vc.cmd_exec('mv $c_file $git_repo_dir_vc/$c_file')
|
||||
gen_vc.cmd_exec('mv ${c_file} ${git_repo_dir_vc}/${c_file}')
|
||||
// add new .c file to local vc repo
|
||||
gen_vc.cmd_exec('git -C $git_repo_dir_vc add $c_file')
|
||||
gen_vc.cmd_exec('git -C ${git_repo_dir_vc} add ${c_file}')
|
||||
}
|
||||
// check if the vc repo actually changed
|
||||
git_status := gen_vc.cmd_exec('git -C $git_repo_dir_vc status')
|
||||
git_status := gen_vc.cmd_exec('git -C ${git_repo_dir_vc} status')
|
||||
if git_status.contains('nothing to commit') {
|
||||
gen_vc.logger.error('no changes to vc repo: something went wrong.')
|
||||
gen_vc.gen_error = true
|
||||
}
|
||||
// commit changes to local vc repo
|
||||
gen_vc.cmd_exec_safe("git -C $git_repo_dir_vc commit -m '[v:master] $last_commit_hash_v_short - $last_commit_subject'")
|
||||
gen_vc.cmd_exec_safe("git -C ${git_repo_dir_vc} commit -m '[v:master] ${last_commit_hash_v_short} - ${last_commit_subject}'")
|
||||
// push changes to remote vc repo
|
||||
gen_vc.cmd_exec_safe('git -C $git_repo_dir_vc push https://${urllib.query_escape(git_username)}:${urllib.query_escape(git_password)}@$git_repo_vc master')
|
||||
gen_vc.cmd_exec_safe('git -C ${git_repo_dir_vc} push https://${urllib.query_escape(git_username)}:${urllib.query_escape(git_password)}@${git_repo_vc} master')
|
||||
}
|
||||
|
||||
// only execute when dry_run option is false, otherwise just log
|
||||
@ -312,10 +312,10 @@ fn (mut gen_vc GenVC) command_execute(cmd string, dry bool) string {
|
||||
if dry {
|
||||
return gen_vc.command_execute_dry(cmd)
|
||||
}
|
||||
gen_vc.logger.info('cmd: $cmd')
|
||||
gen_vc.logger.info('cmd: ${cmd}')
|
||||
r := os.execute(cmd)
|
||||
if r.exit_code < 0 {
|
||||
gen_vc.logger.error('$err_msg_cmd_x: "$cmd" could not start.')
|
||||
gen_vc.logger.error('${err_msg_cmd_x}: "${cmd}" could not start.')
|
||||
gen_vc.logger.error(r.output)
|
||||
// something went wrong, better start fresh next time
|
||||
gen_vc.purge_repos()
|
||||
@ -323,7 +323,7 @@ fn (mut gen_vc GenVC) command_execute(cmd string, dry bool) string {
|
||||
return ''
|
||||
}
|
||||
if r.exit_code != 0 {
|
||||
gen_vc.logger.error('$err_msg_cmd_x: "$cmd" failed.')
|
||||
gen_vc.logger.error('${err_msg_cmd_x}: "${cmd}" failed.')
|
||||
gen_vc.logger.error(r.output)
|
||||
// something went wrong, better start fresh next time
|
||||
gen_vc.purge_repos()
|
||||
@ -335,35 +335,35 @@ fn (mut gen_vc GenVC) command_execute(cmd string, dry bool) string {
|
||||
|
||||
// just log cmd, dont execute
|
||||
fn (mut gen_vc GenVC) command_execute_dry(cmd string) string {
|
||||
gen_vc.logger.info('cmd (dry): "$cmd"')
|
||||
gen_vc.logger.info('cmd (dry): "${cmd}"')
|
||||
return ''
|
||||
}
|
||||
|
||||
// delete repo directories
|
||||
fn (mut gen_vc GenVC) purge_repos() {
|
||||
// delete old repos (better to be fully explicit here, since these are destructive operations)
|
||||
mut repo_dir := '$gen_vc.options.work_dir/$git_repo_dir_v'
|
||||
mut repo_dir := '${gen_vc.options.work_dir}/${git_repo_dir_v}'
|
||||
if os.is_dir(repo_dir) {
|
||||
gen_vc.logger.info('purging local repo: "$repo_dir"')
|
||||
gen_vc.cmd_exec('rm -rf $repo_dir')
|
||||
gen_vc.logger.info('purging local repo: "${repo_dir}"')
|
||||
gen_vc.cmd_exec('rm -rf ${repo_dir}')
|
||||
}
|
||||
repo_dir = '$gen_vc.options.work_dir/$git_repo_dir_vc'
|
||||
repo_dir = '${gen_vc.options.work_dir}/${git_repo_dir_vc}'
|
||||
if os.is_dir(repo_dir) {
|
||||
gen_vc.logger.info('purging local repo: "$repo_dir"')
|
||||
gen_vc.cmd_exec('rm -rf $repo_dir')
|
||||
gen_vc.logger.info('purging local repo: "${repo_dir}"')
|
||||
gen_vc.cmd_exec('rm -rf ${repo_dir}')
|
||||
}
|
||||
}
|
||||
|
||||
// check if file size is too short
|
||||
fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f string, emsg string) {
|
||||
if !os.exists(f) {
|
||||
gen_vc.logger.error('$err_msg_build: $emsg .')
|
||||
gen_vc.logger.error('${err_msg_build}: ${emsg} .')
|
||||
gen_vc.gen_error = true
|
||||
return
|
||||
}
|
||||
fsize := os.file_size(f)
|
||||
if fsize < too_short_file_limit {
|
||||
gen_vc.logger.error('$err_msg_build: $f exists, but is too short: only $fsize bytes.')
|
||||
gen_vc.logger.error('${err_msg_build}: ${f} exists, but is too short: only ${fsize} bytes.')
|
||||
gen_vc.gen_error = true
|
||||
return
|
||||
}
|
||||
|
@ -39,12 +39,14 @@ fn main() {
|
||||
}
|
||||
exit(verify_result.exit_code)
|
||||
} else {
|
||||
eprintln('The V pre commit hook will format $vfiles.len V file(s):')
|
||||
eprintln('The V pre commit hook will format ${vfiles.len} V file(s):')
|
||||
// vfmt off
|
||||
for vfile in vfiles {
|
||||
eprintln(' ${term.bold('$vfile')}')
|
||||
}
|
||||
// vfmt on
|
||||
all_vfiles_on_a_line := vfiles.map(os.quoted_path(it)).join(' ')
|
||||
os.system('v fmt -w $all_vfiles_on_a_line')
|
||||
os.system('git add $all_vfiles_on_a_line')
|
||||
os.system('v fmt -w ${all_vfiles_on_a_line}')
|
||||
os.system('git add ${all_vfiles_on_a_line}')
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ fn process_files(files []string) ! {
|
||||
total_us += f_us
|
||||
total_bytes += p.scanner.text.len
|
||||
total_tokens += p.scanner.all_tokens.len
|
||||
println('${f_us:10}us ${p.scanner.all_tokens.len:10} ${p.scanner.text.len:10} ${(f64(p.scanner.text.len) / p.scanner.all_tokens.len):7.3} ${p.errors.len:4} $f')
|
||||
println('${f_us:10}us ${p.scanner.all_tokens.len:10} ${p.scanner.text.len:10} ${(f64(p.scanner.text.len) / p.scanner.all_tokens.len):7.3} ${p.errors.len:4} ${f}')
|
||||
}
|
||||
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ fn process_files(files []string) ! {
|
||||
total_us += f_us
|
||||
total_bytes += s.text.len
|
||||
total_tokens += s.all_tokens.len
|
||||
println('${f_us:10}us ${s.all_tokens.len:10} ${s.text.len:10} ${(f64(s.text.len) / s.all_tokens.len):7.3f} $f')
|
||||
println('${f_us:10}us ${s.all_tokens.len:10} ${s.text.len:10} ${(f64(s.text.len) / s.all_tokens.len):7.3f} ${f}')
|
||||
}
|
||||
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3f} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
|
||||
}
|
||||
|
@ -50,19 +50,19 @@ pub fn cprintln_strong(omessage string) {
|
||||
|
||||
pub fn verbose_trace(label string, message string) {
|
||||
if os.getenv('VERBOSE').len > 0 {
|
||||
slabel := '$time.now().format_ss_milli() $label'
|
||||
cprintln('# ${slabel:-43s} : $message')
|
||||
slabel := '${time.now().format_ss_milli()} ${label}'
|
||||
cprintln('# ${slabel:-43s} : ${message}')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn verbose_trace_strong(label string, omessage string) {
|
||||
if os.getenv('VERBOSE').len > 0 {
|
||||
slabel := '$time.now().format_ss_milli() $label'
|
||||
slabel := '${time.now().format_ss_milli()} ${label}'
|
||||
mut message := omessage
|
||||
if scripting.term_colors {
|
||||
message = term.bright_green(message)
|
||||
}
|
||||
cprintln('# ${slabel:-43s} : $message')
|
||||
cprintln('# ${slabel:-43s} : ${message}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,7 +76,7 @@ pub fn verbose_trace_exec_result(x os.Result) {
|
||||
if scripting.term_colors {
|
||||
line = term.bright_green(line)
|
||||
}
|
||||
cprintln('# ${lnum:3d}: $line')
|
||||
cprintln('# ${lnum:3d}: ${line}')
|
||||
lnum++
|
||||
}
|
||||
cprintln('# ----------------------------------------------------------------------')
|
||||
@ -84,11 +84,11 @@ pub fn verbose_trace_exec_result(x os.Result) {
|
||||
}
|
||||
|
||||
fn modfn(mname string, fname string) string {
|
||||
return '${mname}.$fname'
|
||||
return '${mname}.${fname}'
|
||||
}
|
||||
|
||||
pub fn chdir(path string) {
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'cd $path')
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'cd ${path}')
|
||||
os.chdir(path) or {
|
||||
verbose_trace(modfn(@MOD, @FN), '## failed.')
|
||||
return
|
||||
@ -96,7 +96,7 @@ pub fn chdir(path string) {
|
||||
}
|
||||
|
||||
pub fn mkdir(path string) ? {
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'mkdir $path')
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'mkdir ${path}')
|
||||
os.mkdir(path) or {
|
||||
verbose_trace(modfn(@MOD, @FN), '## failed.')
|
||||
return err
|
||||
@ -104,7 +104,7 @@ pub fn mkdir(path string) ? {
|
||||
}
|
||||
|
||||
pub fn mkdir_all(path string) ? {
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'mkdir -p $path')
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'mkdir -p ${path}')
|
||||
os.mkdir_all(path) or {
|
||||
verbose_trace(modfn(@MOD, @FN), '## failed.')
|
||||
return err
|
||||
@ -112,7 +112,7 @@ pub fn mkdir_all(path string) ? {
|
||||
}
|
||||
|
||||
pub fn rmrf(path string) {
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'rm -rf $path')
|
||||
verbose_trace_strong(modfn(@MOD, @FN), 'rm -rf ${path}')
|
||||
if os.exists(path) {
|
||||
if os.is_dir(path) {
|
||||
os.rmdir_all(path) or { panic(err) }
|
||||
@ -165,10 +165,10 @@ pub fn exit_0_status(cmd string) bool {
|
||||
|
||||
pub fn tool_must_exist(toolcmd string) {
|
||||
verbose_trace(modfn(@MOD, @FN), toolcmd)
|
||||
if exit_0_status('type $toolcmd') {
|
||||
if exit_0_status('type ${toolcmd}') {
|
||||
return
|
||||
}
|
||||
eprintln('Missing tool: $toolcmd')
|
||||
eprintln('Missing tool: ${toolcmd}')
|
||||
eprintln('Please try again after you install it.')
|
||||
exit(1)
|
||||
}
|
||||
@ -182,6 +182,6 @@ pub fn used_tools_must_exist(tools []string) {
|
||||
pub fn show_sizes_of_files(files []string) {
|
||||
for f in files {
|
||||
size := os.file_size(f)
|
||||
println('$size $f') // println('${size:10d} $f')
|
||||
println('${size} ${f}') // println('${size:10d} $f')
|
||||
}
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ pub fn (mut ts TestSession) add_failed_cmd(cmd string) {
|
||||
|
||||
pub fn (mut ts TestSession) show_list_of_failed_tests() {
|
||||
for i, cmd in ts.failed_cmds {
|
||||
eprintln(term.failed('Failed command ${i + 1}:') + ' $cmd')
|
||||
eprintln(term.failed('Failed command ${i + 1}:') + ' ${cmd}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -132,12 +132,12 @@ pub fn (mut ts TestSession) print_messages() {
|
||||
if ts.progress_mode {
|
||||
// progress mode, the last line is rewritten many times:
|
||||
if is_ok && !ts.silent_mode {
|
||||
print('\r$empty\r$msg')
|
||||
print('\r${empty}\r${msg}')
|
||||
flush_stdout()
|
||||
} else {
|
||||
// the last \n is needed, so SKIP/FAIL messages
|
||||
// will not get overwritten by the OK ones
|
||||
eprint('\r$empty\r$msg\n')
|
||||
eprint('\r${empty}\r${msg}\n')
|
||||
}
|
||||
continue
|
||||
}
|
||||
@ -393,10 +393,10 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
mut status := os.system(cmd)
|
||||
if status != 0 {
|
||||
details := get_test_details(file)
|
||||
os.setenv('VTEST_RETRY_MAX', '$details.retry', true)
|
||||
os.setenv('VTEST_RETRY_MAX', '${details.retry}', true)
|
||||
for retry := 1; retry <= details.retry; retry++ {
|
||||
ts.append_message(.info, ' [stats] retrying $retry/$details.retry of $relative_file ; known flaky: $details.flaky ...')
|
||||
os.setenv('VTEST_RETRY', '$retry', true)
|
||||
ts.append_message(.info, ' [stats] retrying ${retry}/${details.retry} of ${relative_file} ; known flaky: ${details.flaky} ...')
|
||||
os.setenv('VTEST_RETRY', '${retry}', true)
|
||||
status = os.system(cmd)
|
||||
if status == 0 {
|
||||
unsafe {
|
||||
@ -406,7 +406,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
time.sleep(500 * time.millisecond)
|
||||
}
|
||||
if details.flaky && !testing.fail_flaky {
|
||||
ts.append_message(.info, ' *FAILURE* of the known flaky test file $relative_file is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: $details.retry .')
|
||||
ts.append_message(.info, ' *FAILURE* of the known flaky test file ${relative_file} is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: ${details.retry} .')
|
||||
unsafe {
|
||||
goto test_passed_system
|
||||
}
|
||||
@ -422,7 +422,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
}
|
||||
} else {
|
||||
if testing.show_start {
|
||||
ts.append_message(.info, ' starting $relative_file ...')
|
||||
ts.append_message(.info, ' starting ${relative_file} ...')
|
||||
}
|
||||
mut r := os.execute(cmd)
|
||||
if r.exit_code < 0 {
|
||||
@ -434,10 +434,10 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
}
|
||||
if r.exit_code != 0 {
|
||||
details := get_test_details(file)
|
||||
os.setenv('VTEST_RETRY_MAX', '$details.retry', true)
|
||||
os.setenv('VTEST_RETRY_MAX', '${details.retry}', true)
|
||||
for retry := 1; retry <= details.retry; retry++ {
|
||||
ts.append_message(.info, ' retrying $retry/$details.retry of $relative_file ; known flaky: $details.flaky ...')
|
||||
os.setenv('VTEST_RETRY', '$retry', true)
|
||||
ts.append_message(.info, ' retrying ${retry}/${details.retry} of ${relative_file} ; known flaky: ${details.flaky} ...')
|
||||
os.setenv('VTEST_RETRY', '${retry}', true)
|
||||
r = os.execute(cmd)
|
||||
if r.exit_code == 0 {
|
||||
unsafe {
|
||||
@ -446,7 +446,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
}
|
||||
}
|
||||
if details.flaky && !testing.fail_flaky {
|
||||
ts.append_message(.info, ' *FAILURE* of the known flaky test file $relative_file is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: $details.retry .')
|
||||
ts.append_message(.info, ' *FAILURE* of the known flaky test file ${relative_file} is ignored, since VTEST_FAIL_FLAKY is 0 . Retry count: ${details.retry} .')
|
||||
unsafe {
|
||||
goto test_passed_execute
|
||||
}
|
||||
@ -454,7 +454,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
ts.benchmark.fail()
|
||||
tls_bench.fail()
|
||||
ending_newline := if r.output.ends_with('\n') { '\n' } else { '' }
|
||||
ts.append_message(.fail, tls_bench.step_message_fail('$normalised_relative_file\n$r.output.trim_space()$ending_newline'))
|
||||
ts.append_message(.fail, tls_bench.step_message_fail('${normalised_relative_file}\n${r.output.trim_space()}${ending_newline}'))
|
||||
ts.add_failed_cmd(cmd)
|
||||
} else {
|
||||
test_passed_execute:
|
||||
@ -474,7 +474,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
pub fn vlib_should_be_present(parent_dir string) {
|
||||
vlib_dir := os.join_path_single(parent_dir, 'vlib')
|
||||
if !os.is_dir(vlib_dir) {
|
||||
eprintln('$vlib_dir is missing, it must be next to the V executable')
|
||||
eprintln('${vlib_dir} is missing, it must be next to the V executable')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
@ -486,7 +486,7 @@ pub fn prepare_test_session(zargs string, folder string, oskipped []string, main
|
||||
vargs := zargs.replace(vexe, '')
|
||||
eheader(main_label)
|
||||
if vargs.len > 0 {
|
||||
eprintln('v compiler args: "$vargs"')
|
||||
eprintln('v compiler args: "${vargs}"')
|
||||
}
|
||||
mut session := new_test_session(vargs, true)
|
||||
files := os.walk_ext(os.join_path_single(parent_dir, folder), '.v')
|
||||
@ -532,8 +532,8 @@ pub fn prepare_test_session(zargs string, folder string, oskipped []string, main
|
||||
pub type FnTestSetupCb = fn (mut session TestSession)
|
||||
|
||||
pub fn v_build_failing_skipped(zargs string, folder string, oskipped []string, cb FnTestSetupCb) bool {
|
||||
main_label := 'Building $folder ...'
|
||||
finish_label := 'building $folder'
|
||||
main_label := 'Building ${folder} ...'
|
||||
finish_label := 'building ${folder}'
|
||||
mut session := prepare_test_session(zargs, folder, oskipped, main_label)
|
||||
cb(mut session)
|
||||
session.test()
|
||||
@ -562,22 +562,22 @@ pub fn building_any_v_binaries_failed() bool {
|
||||
vlib_should_be_present(parent_dir)
|
||||
os.chdir(parent_dir) or { panic(err) }
|
||||
mut failed := false
|
||||
v_build_commands := ['$vexe -o v_g -g cmd/v', '$vexe -o v_prod_g -prod -g cmd/v',
|
||||
'$vexe -o v_cg -cg cmd/v', '$vexe -o v_prod_cg -prod -cg cmd/v',
|
||||
'$vexe -o v_prod -prod cmd/v']
|
||||
v_build_commands := ['${vexe} -o v_g -g cmd/v',
|
||||
'${vexe} -o v_prod_g -prod -g cmd/v', '${vexe} -o v_cg -cg cmd/v',
|
||||
'${vexe} -o v_prod_cg -prod -cg cmd/v', '${vexe} -o v_prod -prod cmd/v']
|
||||
mut bmark := benchmark.new_benchmark()
|
||||
for cmd in v_build_commands {
|
||||
bmark.step()
|
||||
if build_v_cmd_failed(cmd) {
|
||||
bmark.fail()
|
||||
failed = true
|
||||
eprintln(bmark.step_message_fail('command: $cmd . See details above ^^^^^^^'))
|
||||
eprintln(bmark.step_message_fail('command: ${cmd} . See details above ^^^^^^^'))
|
||||
eprintln('')
|
||||
continue
|
||||
}
|
||||
bmark.ok()
|
||||
if !testing.hide_oks {
|
||||
eprintln(bmark.step_message_ok('command: $cmd'))
|
||||
eprintln(bmark.step_message_ok('command: ${cmd}'))
|
||||
}
|
||||
}
|
||||
bmark.stop()
|
||||
@ -600,7 +600,7 @@ pub fn header(msg string) {
|
||||
// The new nested folder, and its contents, will get removed after all tests/programs succeed.
|
||||
pub fn setup_new_vtmp_folder() string {
|
||||
now := time.sys_mono_now()
|
||||
new_vtmp_dir := os.join_path(os.vtmp_dir(), 'tsession_${sync.thread_id().hex()}_$now')
|
||||
new_vtmp_dir := os.join_path(os.vtmp_dir(), 'tsession_${sync.thread_id().hex()}_${now}')
|
||||
os.mkdir_all(new_vtmp_dir) or { panic(err) }
|
||||
os.setenv('VTMP', new_vtmp_dir, true)
|
||||
return new_vtmp_dir
|
||||
@ -632,5 +632,5 @@ pub fn find_started_process(pname string) ?string {
|
||||
return line
|
||||
}
|
||||
}
|
||||
return error('could not find process matching $pname')
|
||||
return error('could not find process matching ${pname}')
|
||||
}
|
||||
|
@ -22,9 +22,9 @@ pub fn validate_commit_exists(commit string) {
|
||||
if commit.len == 0 {
|
||||
return
|
||||
}
|
||||
cmd := "git cat-file -t '$commit' "
|
||||
cmd := "git cat-file -t '${commit}' "
|
||||
if !scripting.exit_0_status(cmd) {
|
||||
eprintln('Commit: "$commit" does not exist in the current repository.')
|
||||
eprintln('Commit: "${commit}" does not exist in the current repository.')
|
||||
exit(3)
|
||||
}
|
||||
}
|
||||
@ -50,25 +50,25 @@ pub fn prepare_vc_source(vcdir string, cdir string, commit string) (string, stri
|
||||
// Building a historic v with the latest vc is not always possible ...
|
||||
// It is more likely, that the vc *at the time of the v commit*,
|
||||
// or slightly before that time will be able to build the historic v:
|
||||
vline := scripting.run('git rev-list -n1 --timestamp "$commit" ')
|
||||
vline := scripting.run('git rev-list -n1 --timestamp "${commit}" ')
|
||||
v_timestamp, v_commithash := line_to_timestamp_and_commit(vline)
|
||||
scripting.verbose_trace(@FN, 'v_timestamp: $v_timestamp | v_commithash: $v_commithash')
|
||||
scripting.verbose_trace(@FN, 'v_timestamp: ${v_timestamp} | v_commithash: ${v_commithash}')
|
||||
check_v_commit_timestamp_before_self_rebuilding(v_timestamp)
|
||||
scripting.chdir(vcdir)
|
||||
scripting.run('git checkout --quiet master')
|
||||
//
|
||||
mut vccommit := ''
|
||||
vcbefore_subject_match := scripting.run('git rev-list HEAD -n1 --timestamp --grep=${v_commithash[0..7]} ')
|
||||
scripting.verbose_trace(@FN, 'vcbefore_subject_match: $vcbefore_subject_match')
|
||||
scripting.verbose_trace(@FN, 'vcbefore_subject_match: ${vcbefore_subject_match}')
|
||||
if vcbefore_subject_match.len > 3 {
|
||||
_, vccommit = line_to_timestamp_and_commit(vcbefore_subject_match)
|
||||
} else {
|
||||
scripting.verbose_trace(@FN, 'the v commit did not match anything in the vc log; try --timestamp instead.')
|
||||
vcbefore := scripting.run('git rev-list HEAD -n1 --timestamp --before=$v_timestamp ')
|
||||
vcbefore := scripting.run('git rev-list HEAD -n1 --timestamp --before=${v_timestamp} ')
|
||||
_, vccommit = line_to_timestamp_and_commit(vcbefore)
|
||||
}
|
||||
scripting.verbose_trace(@FN, 'vccommit: $vccommit')
|
||||
scripting.run('git checkout --quiet "$vccommit" ')
|
||||
scripting.verbose_trace(@FN, 'vccommit: ${vccommit}')
|
||||
scripting.run('git checkout --quiet "${vccommit}" ')
|
||||
scripting.run('wc *.c')
|
||||
scripting.chdir(cdir)
|
||||
return v_commithash, vccommit
|
||||
@ -78,11 +78,11 @@ pub fn clone_or_pull(remote_git_url string, local_worktree_path string) {
|
||||
// Note: after clone_or_pull, the current repo branch is === HEAD === master
|
||||
if os.is_dir(local_worktree_path) && os.is_dir(os.join_path_single(local_worktree_path, '.git')) {
|
||||
// Already existing ... Just pulling in this case is faster usually.
|
||||
scripting.run('git -C "$local_worktree_path" checkout --quiet master')
|
||||
scripting.run('git -C "$local_worktree_path" pull --quiet ')
|
||||
scripting.run('git -C "${local_worktree_path}" checkout --quiet master')
|
||||
scripting.run('git -C "${local_worktree_path}" pull --quiet ')
|
||||
} else {
|
||||
// Clone a fresh
|
||||
scripting.run('git clone --quiet "$remote_git_url" "$local_worktree_path" ')
|
||||
scripting.run('git clone --quiet "${remote_git_url}" "${local_worktree_path}" ')
|
||||
}
|
||||
}
|
||||
|
||||
@ -111,17 +111,17 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
|
||||
mut command_for_building_v_from_c_source := ''
|
||||
mut command_for_selfbuilding := ''
|
||||
if 'windows' == os.user_os() {
|
||||
command_for_building_v_from_c_source = '$vgit_context.cc -std=c99 -I ./thirdparty/stdatomic/win -municode -w -o cv.exe "$vgit_context.path_vc/v_win.c" '
|
||||
command_for_selfbuilding = './cv.exe -o $vgit_context.vexename {SOURCE}'
|
||||
command_for_building_v_from_c_source = '${vgit_context.cc} -std=c99 -I ./thirdparty/stdatomic/win -municode -w -o cv.exe "${vgit_context.path_vc}/v_win.c" '
|
||||
command_for_selfbuilding = './cv.exe -o ${vgit_context.vexename} {SOURCE}'
|
||||
} else {
|
||||
command_for_building_v_from_c_source = '$vgit_context.cc -std=gnu11 -I ./thirdparty/stdatomic/nix -w -o cv "$vgit_context.path_vc/v.c" -lm -lpthread'
|
||||
command_for_selfbuilding = './cv -o $vgit_context.vexename {SOURCE}'
|
||||
command_for_building_v_from_c_source = '${vgit_context.cc} -std=gnu11 -I ./thirdparty/stdatomic/nix -w -o cv "${vgit_context.path_vc}/v.c" -lm -lpthread'
|
||||
command_for_selfbuilding = './cv -o ${vgit_context.vexename} {SOURCE}'
|
||||
}
|
||||
scripting.chdir(vgit_context.workdir)
|
||||
clone_or_pull(vgit_context.v_repo_url, vgit_context.path_v)
|
||||
clone_or_pull(vgit_context.vc_repo_url, vgit_context.path_vc)
|
||||
scripting.chdir(vgit_context.path_v)
|
||||
scripting.run('git checkout --quiet $vgit_context.commit_v')
|
||||
scripting.run('git checkout --quiet ${vgit_context.commit_v}')
|
||||
if os.is_dir(vgit_context.path_v) && os.exists(vgit_context.vexepath) {
|
||||
// already compiled, so no need to compile v again
|
||||
vgit_context.commit_v__hash = get_current_folder_commit_hash()
|
||||
@ -163,7 +163,7 @@ pub mut:
|
||||
|
||||
pub fn add_common_tool_options(mut context VGitOptions, mut fp flag.FlagParser) []string {
|
||||
tdir := os.temp_dir()
|
||||
context.workdir = os.real_path(fp.string('workdir', `w`, context.workdir, 'A writable base folder. Default: $tdir'))
|
||||
context.workdir = os.real_path(fp.string('workdir', `w`, context.workdir, 'A writable base folder. Default: ${tdir}'))
|
||||
context.v_repo_url = fp.string('vrepo', 0, context.v_repo_url, 'The url of the V repository. You can clone it locally too. See also --vcrepo below.')
|
||||
context.vc_repo_url = fp.string('vcrepo', 0, context.vc_repo_url, 'The url of the vc repository. You can clone it
|
||||
${flag.space}beforehand, and then just give the local folder
|
||||
@ -187,7 +187,7 @@ ${flag.space}to script it/run it in a restrictive vps/docker.
|
||||
context.vc_repo_url = os.real_path(context.vc_repo_url)
|
||||
}
|
||||
commits := fp.finalize() or {
|
||||
eprintln('Error: $err')
|
||||
eprintln('Error: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
for commit in commits {
|
||||
|
@ -7,7 +7,7 @@ pub fn show_topic(topic string) {
|
||||
vroot := os.dir(vexe)
|
||||
target_topic := os.join_path(vroot, 'cmd', 'v', 'help', '${topic}.txt')
|
||||
content := os.read_file(target_topic) or {
|
||||
eprintln('Unknown topic: $topic')
|
||||
eprintln('Unknown topic: ${topic}')
|
||||
exit(1)
|
||||
}
|
||||
println(content)
|
||||
|
@ -72,7 +72,7 @@ const cache_oldv_folder_vc = os.join_path(cache_oldv_folder, 'vc')
|
||||
fn sync_cache() {
|
||||
scripting.verbose_trace(@FN, 'start')
|
||||
if !os.exists(cache_oldv_folder) {
|
||||
scripting.verbose_trace(@FN, 'creating $cache_oldv_folder')
|
||||
scripting.verbose_trace(@FN, 'creating ${cache_oldv_folder}')
|
||||
scripting.mkdir_all(cache_oldv_folder) or {
|
||||
scripting.verbose_trace(@FN, '## failed.')
|
||||
exit(1)
|
||||
@ -82,16 +82,16 @@ fn sync_cache() {
|
||||
for reponame in ['v', 'vc'] {
|
||||
repofolder := os.join_path(cache_oldv_folder, reponame)
|
||||
if !os.exists(repofolder) {
|
||||
scripting.verbose_trace(@FN, 'cloning to $repofolder')
|
||||
scripting.exec('git clone --quiet https://github.com/vlang/$reponame $repofolder') or {
|
||||
scripting.verbose_trace(@FN, '## error during clone: $err')
|
||||
scripting.verbose_trace(@FN, 'cloning to ${repofolder}')
|
||||
scripting.exec('git clone --quiet https://github.com/vlang/${reponame} ${repofolder}') or {
|
||||
scripting.verbose_trace(@FN, '## error during clone: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
scripting.chdir(repofolder)
|
||||
scripting.exec('git pull --quiet') or {
|
||||
scripting.verbose_trace(@FN, 'pulling to $repofolder')
|
||||
scripting.verbose_trace(@FN, '## error during pull: $err')
|
||||
scripting.verbose_trace(@FN, 'pulling to ${repofolder}')
|
||||
scripting.verbose_trace(@FN, '## error during pull: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
@ -150,11 +150,11 @@ fn main() {
|
||||
} else {
|
||||
context.commit_v = scripting.run('git rev-list -n1 HEAD')
|
||||
}
|
||||
scripting.cprintln('################# context.commit_v: $context.commit_v #####################')
|
||||
scripting.cprintln('################# context.commit_v: ${context.commit_v} #####################')
|
||||
context.path_v = vgit.normalized_workpath_for_commit(context.vgo.workdir, context.commit_v)
|
||||
context.path_vc = vgit.normalized_workpath_for_commit(context.vgo.workdir, 'vc')
|
||||
if !os.is_dir(context.vgo.workdir) {
|
||||
eprintln('Work folder: $context.vgo.workdir , does not exist.')
|
||||
eprintln('Work folder: ${context.vgo.workdir} , does not exist.')
|
||||
exit(2)
|
||||
}
|
||||
ecc := os.getenv('CC')
|
||||
@ -168,7 +168,7 @@ fn main() {
|
||||
context.compile_oldv_if_needed()
|
||||
scripting.chdir(context.path_v)
|
||||
shorter_hash := context.commit_v_hash[0..10]
|
||||
scripting.cprintln('# v commit hash: $shorter_hash | folder: $context.path_v')
|
||||
scripting.cprintln('# v commit hash: ${shorter_hash} | folder: ${context.path_v}')
|
||||
if context.cmd_to_run.len > 0 {
|
||||
scripting.cprintln_strong('# command: ${context.cmd_to_run:-34s}')
|
||||
cmdres := os.execute_or_exit(context.cmd_to_run)
|
||||
|
@ -37,10 +37,10 @@ fn (c Context) compare_versions() {
|
||||
// Input is validated at this point...
|
||||
// Cleanup artifacts from previous runs of this tool:
|
||||
scripting.chdir(c.vgo.workdir)
|
||||
scripting.run('rm -rf "$c.a" "$c.b" "$c.vc" ')
|
||||
scripting.run('rm -rf "${c.a}" "${c.b}" "${c.vc}" ')
|
||||
// clone the VC source *just once per comparison*, and reuse it:
|
||||
scripting.run('git clone --quiet "$c.vgo.vc_repo_url" "$c.vc" ')
|
||||
println('Comparing V performance of commit $c.commit_before (before) vs commit $c.commit_after (after) ...')
|
||||
scripting.run('git clone --quiet "${c.vgo.vc_repo_url}" "${c.vc}" ')
|
||||
println('Comparing V performance of commit ${c.commit_before} (before) vs commit ${c.commit_after} (after) ...')
|
||||
c.prepare_v(c.b, c.commit_before)
|
||||
c.prepare_v(c.a, c.commit_after)
|
||||
scripting.chdir(c.vgo.workdir)
|
||||
@ -72,7 +72,7 @@ fn (c Context) compare_versions() {
|
||||
])
|
||||
println('All performance files:')
|
||||
for f in perf_files {
|
||||
println(' $f')
|
||||
println(' ${f}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -92,11 +92,11 @@ fn (c &Context) prepare_v(cdir string, commit string) {
|
||||
}
|
||||
vgit_context.compile_oldv_if_needed()
|
||||
scripting.chdir(cdir)
|
||||
println('Making a v compiler in $cdir')
|
||||
scripting.run('./v -cc $cc -o v $vgit_context.vvlocation')
|
||||
println('Making a vprod compiler in $cdir')
|
||||
scripting.run('./v -cc $cc -prod -o vprod $vgit_context.vvlocation')
|
||||
println('Stripping and compressing cv v and vprod binaries in $cdir')
|
||||
println('Making a v compiler in ${cdir}')
|
||||
scripting.run('./v -cc ${cc} -o v ${vgit_context.vvlocation}')
|
||||
println('Making a vprod compiler in ${cdir}')
|
||||
scripting.run('./v -cc ${cc} -prod -o vprod ${vgit_context.vvlocation}')
|
||||
println('Stripping and compressing cv v and vprod binaries in ${cdir}')
|
||||
scripting.run('cp cv cv_stripped')
|
||||
scripting.run('cp v v_stripped')
|
||||
scripting.run('cp vprod vprod_stripped')
|
||||
@ -107,13 +107,13 @@ fn (c &Context) prepare_v(cdir string, commit string) {
|
||||
scripting.run('upx -qqq --lzma cv_stripped_upxed')
|
||||
scripting.run('upx -qqq --lzma v_stripped_upxed')
|
||||
scripting.run('upx -qqq --lzma vprod_stripped_upxed')
|
||||
scripting.show_sizes_of_files(['$cdir/cv', '$cdir/cv_stripped', '$cdir/cv_stripped_upxed'])
|
||||
scripting.show_sizes_of_files(['$cdir/v', '$cdir/v_stripped', '$cdir/v_stripped_upxed'])
|
||||
scripting.show_sizes_of_files(['$cdir/vprod', '$cdir/vprod_stripped',
|
||||
'$cdir/vprod_stripped_upxed'])
|
||||
vversion := scripting.run('$cdir/v -version')
|
||||
scripting.show_sizes_of_files(['${cdir}/cv', '${cdir}/cv_stripped', '${cdir}/cv_stripped_upxed'])
|
||||
scripting.show_sizes_of_files(['${cdir}/v', '${cdir}/v_stripped', '${cdir}/v_stripped_upxed'])
|
||||
scripting.show_sizes_of_files(['${cdir}/vprod', '${cdir}/vprod_stripped',
|
||||
'${cdir}/vprod_stripped_upxed'])
|
||||
vversion := scripting.run('${cdir}/v -version')
|
||||
vcommit := scripting.run('git rev-parse --short --verify HEAD')
|
||||
println('V version is: $vversion , local source commit: $vcommit')
|
||||
println('V version is: ${vversion} , local source commit: ${vcommit}')
|
||||
if vgit_context.vvlocation == 'cmd/v' {
|
||||
if os.exists('vlib/v/ast/ast.v') {
|
||||
println('Source lines of the compiler: ' +
|
||||
@ -132,21 +132,21 @@ fn (c &Context) prepare_v(cdir string, commit string) {
|
||||
|
||||
fn (c Context) compare_v_performance(label string, commands []string) string {
|
||||
println('---------------------------------------------------------------------------------')
|
||||
println('Compare v performance when doing the following commands ($label):')
|
||||
println('Compare v performance when doing the following commands (${label}):')
|
||||
mut source_location_a := ''
|
||||
mut source_location_b := ''
|
||||
if os.exists('$c.a/cmd/v') {
|
||||
if os.exists('${c.a}/cmd/v') {
|
||||
source_location_a = 'cmd/v'
|
||||
} else {
|
||||
source_location_a = if os.exists('$c.a/v.v') { 'v.v ' } else { 'compiler/ ' }
|
||||
source_location_a = if os.exists('${c.a}/v.v') { 'v.v ' } else { 'compiler/ ' }
|
||||
}
|
||||
if os.exists('$c.b/cmd/v') {
|
||||
if os.exists('${c.b}/cmd/v') {
|
||||
source_location_b = 'cmd/v'
|
||||
} else {
|
||||
source_location_b = if os.exists('$c.b/v.v') { 'v.v ' } else { 'compiler/ ' }
|
||||
source_location_b = if os.exists('${c.b}/v.v') { 'v.v ' } else { 'compiler/ ' }
|
||||
}
|
||||
timestamp_a, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.a/ ; git rev-list -n1 --timestamp HEAD'))
|
||||
timestamp_b, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.b/ ; git rev-list -n1 --timestamp HEAD'))
|
||||
timestamp_a, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd ${c.a}/ ; git rev-list -n1 --timestamp HEAD'))
|
||||
timestamp_b, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd ${c.b}/ ; git rev-list -n1 --timestamp HEAD'))
|
||||
debug_option_a := if timestamp_a > 1570877641 { '-cg ' } else { '-debug ' }
|
||||
debug_option_b := if timestamp_b > 1570877641 { '-cg ' } else { '-debug ' }
|
||||
mut hyperfine_commands_arguments := []string{}
|
||||
@ -154,7 +154,7 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
|
||||
println(cmd)
|
||||
}
|
||||
for cmd in commands {
|
||||
hyperfine_commands_arguments << ' \'cd ${c.b:-34s} ; ./$cmd \' '.replace_each([
|
||||
hyperfine_commands_arguments << ' \'cd ${c.b:-34s} ; ./${cmd} \' '.replace_each([
|
||||
'@COMPILER@',
|
||||
source_location_b,
|
||||
'@DEBUG@',
|
||||
@ -162,7 +162,7 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
|
||||
])
|
||||
}
|
||||
for cmd in commands {
|
||||
hyperfine_commands_arguments << ' \'cd ${c.a:-34s} ; ./$cmd \' '.replace_each([
|
||||
hyperfine_commands_arguments << ' \'cd ${c.a:-34s} ; ./${cmd} \' '.replace_each([
|
||||
'@COMPILER@',
|
||||
source_location_a,
|
||||
'@DEBUG@',
|
||||
@ -171,15 +171,15 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
|
||||
}
|
||||
// /////////////////////////////////////////////////////////////////////////////
|
||||
cmd_stats_file := os.real_path([c.vgo.workdir, 'v_performance_stats_${label}.json'].join(os.path_separator))
|
||||
comparison_cmd := 'hyperfine $c.hyperfineopts ' + '--export-json $cmd_stats_file ' +
|
||||
'--time-unit millisecond ' + '--style full --warmup $c.warmups ' +
|
||||
comparison_cmd := 'hyperfine ${c.hyperfineopts} ' + '--export-json ${cmd_stats_file} ' +
|
||||
'--time-unit millisecond ' + '--style full --warmup ${c.warmups} ' +
|
||||
hyperfine_commands_arguments.join(' ')
|
||||
// /////////////////////////////////////////////////////////////////////////////
|
||||
if c.vgo.verbose {
|
||||
println(comparison_cmd)
|
||||
}
|
||||
os.system(comparison_cmd)
|
||||
println('The detailed performance comparison report was saved to: $cmd_stats_file .')
|
||||
println('The detailed performance comparison report was saved to: ${cmd_stats_file} .')
|
||||
println('')
|
||||
return cmd_stats_file
|
||||
}
|
||||
@ -198,7 +198,7 @@ fn main() {
|
||||
context.vflags = fp.string('vflags', 0, '', 'Additional options to pass to the v commands, for example "-cc tcc"')
|
||||
context.hyperfineopts = fp.string('hyperfine_options', 0, '', 'Additional options passed to hyperfine.
|
||||
${flag.space}For example on linux, you may want to pass:
|
||||
$flag.space--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'"
|
||||
${flag.space}--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'"
|
||||
')
|
||||
commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
|
||||
context.commit_before = commits[0]
|
||||
|
@ -56,24 +56,24 @@ fn main() {
|
||||
}
|
||||
if !os.exists(oldvexe) {
|
||||
if 0 != execute('${os.quoted_path(vexe)} -o ${os.quoted_path(oldvexe)} ${os.quoted_path(oldv_source)}') {
|
||||
panic('can not compile $oldvexe')
|
||||
panic('can not compile ${oldvexe}')
|
||||
}
|
||||
}
|
||||
os.execute('git checkout master')
|
||||
os.execute('git bisect reset')
|
||||
os.execute('git checkout $context.new_commit')
|
||||
os.execute('git checkout ${context.new_commit}')
|
||||
os.execute('git bisect start')
|
||||
os.execute('git bisect new')
|
||||
os.execute('git checkout $context.old_commit')
|
||||
os.execute('git checkout ${context.old_commit}')
|
||||
os.execute('git bisect old')
|
||||
println(term.colorize(term.bright_yellow, term.header('', '-')))
|
||||
execute('git bisect run ${os.quoted_path(oldvexe)} --bisect -c "$context.command"')
|
||||
execute('git bisect run ${os.quoted_path(oldvexe)} --bisect -c "${context.command}"')
|
||||
println(term.colorize(term.bright_yellow, term.header('', '-')))
|
||||
os.execute('git bisect reset')
|
||||
os.execute('git checkout master')
|
||||
}
|
||||
|
||||
fn execute(cmd string) int {
|
||||
eprintln('### $cmd')
|
||||
eprintln('### ${cmd}')
|
||||
return os.system(cmd)
|
||||
}
|
||||
|
@ -183,7 +183,7 @@ fn (mut context Context) parse_options() ! {
|
||||
scripting.set_verbose(true)
|
||||
}
|
||||
commands := fp.finalize() or {
|
||||
eprintln('Error: $err')
|
||||
eprintln('Error: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
context.commands = context.expand_all_commands(commands)
|
||||
@ -249,7 +249,7 @@ fn (mut context Context) run() {
|
||||
mut duration := 0
|
||||
mut sum := 0
|
||||
mut oldres := ''
|
||||
println('Series: ${si:4}/${context.series:-4}, command: $cmd')
|
||||
println('Series: ${si:4}/${context.series:-4}, command: ${cmd}')
|
||||
if context.warmup > 0 && run_warmups < context.commands.len {
|
||||
for i in 1 .. context.warmup + 1 {
|
||||
flushed_print('${context.cgoback}warming up run: ${i:4}/${context.warmup:-4} for ${cmd:-50s} took ${duration:6} ms ...')
|
||||
@ -273,7 +273,7 @@ fn (mut context Context) run() {
|
||||
res := scripting.exec(cmd) or { continue }
|
||||
duration = int(sw.elapsed().milliseconds())
|
||||
if res.exit_code != 0 {
|
||||
eprintln('${i:10} non 0 exit code for cmd: $cmd')
|
||||
eprintln('${i:10} non 0 exit code for cmd: ${cmd}')
|
||||
continue
|
||||
}
|
||||
trimed_output := res.output.trim_right('\r\n')
|
||||
@ -308,7 +308,7 @@ fn (mut context Context) run() {
|
||||
for k, v in m {
|
||||
// show a temporary summary for the current series/cmd cycle
|
||||
s := new_aints(v, context.nmins, context.nmaxs)
|
||||
println(' $k: $s')
|
||||
println(' ${k}: ${s}')
|
||||
summary[k] = s
|
||||
}
|
||||
// merge current raw results to the previous ones
|
||||
@ -346,7 +346,7 @@ fn (mut context Context) show_diff_summary() {
|
||||
}
|
||||
return 0
|
||||
})
|
||||
println('Summary (commands are ordered by ascending mean time), after $context.series series of $context.count repetitions:')
|
||||
println('Summary (commands are ordered by ascending mean time), after ${context.series} series of ${context.count} repetitions:')
|
||||
base := context.results[0].atiming.average
|
||||
mut first_cmd_percentage := f64(100.0)
|
||||
mut first_marker := ''
|
||||
@ -357,14 +357,14 @@ fn (mut context Context) show_diff_summary() {
|
||||
first_marker = bold('>')
|
||||
first_cmd_percentage = cpercent
|
||||
}
|
||||
println(' $first_marker${(i + 1):3} | ${cpercent:5.1f}% slower | ${r.cmd:-57s} | $r.atiming')
|
||||
println(' ${first_marker}${(i + 1):3} | ${cpercent:5.1f}% slower | ${r.cmd:-57s} | ${r.atiming}')
|
||||
}
|
||||
$if debugcontext ? {
|
||||
println('context: $context')
|
||||
println('context: ${context}')
|
||||
}
|
||||
if int(base) > context.fail_on_maxtime {
|
||||
flushed_print(performance_regression_label)
|
||||
println('average time: ${base:6.1f} ms > $context.fail_on_maxtime ms threshold.')
|
||||
println('average time: ${base:6.1f} ms > ${context.fail_on_maxtime} ms threshold.')
|
||||
exit(2)
|
||||
}
|
||||
if context.fail_on_regress_percent == max_fail_percent || context.results.len < 2 {
|
||||
|
@ -33,7 +33,7 @@ fn main() {
|
||||
known_skip_patterns = known_skip_patterns_env.split(',').filter(it != '')
|
||||
}
|
||||
for path in places {
|
||||
eprintln('> Checking folder: `$path` ...')
|
||||
eprintln('> Checking folder: `${path}` ...')
|
||||
mut found := 0
|
||||
files := os.walk_ext(path, '.v')
|
||||
mut v_files := map[string]int{}
|
||||
@ -54,15 +54,15 @@ fn main() {
|
||||
for folder, n_v_files in v_files {
|
||||
n_test_v_files := v_test_files[folder]
|
||||
if n_v_files > 1 && n_test_v_files == 0 {
|
||||
println('> ${n_test_v_files:5} _test.v files, with ${n_v_files:5} .v files, in folder: $folder')
|
||||
println('> ${n_test_v_files:5} _test.v files, with ${n_v_files:5} .v files, in folder: ${folder}')
|
||||
compilation := os.execute('${os.quoted_path(vexe)} -shared -W -Wfatal-errors -check ${os.quoted_path(folder)}')
|
||||
if compilation.exit_code != 0 {
|
||||
eprintln('> $folder has parser/checker errors!')
|
||||
eprintln('> ${folder} has parser/checker errors!')
|
||||
eprintln(compilation.output)
|
||||
}
|
||||
found++
|
||||
}
|
||||
}
|
||||
eprintln('> Found $found module folders without _test.v files in `$path` .')
|
||||
eprintln('> Found ${found} module folders without _test.v files in `${path}` .')
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ fn get_vexe_path() string {
|
||||
return env_vexe
|
||||
}
|
||||
me := os.executable()
|
||||
eprintln('me: $me')
|
||||
eprintln('me: ${me}')
|
||||
mut vexe_ := os.join_path(os.dir(os.dir(os.dir(me))), 'v')
|
||||
if os.user_os() == 'windows' {
|
||||
vexe_ += '.exe'
|
||||
@ -34,7 +34,7 @@ fn new_tdir() string {
|
||||
}
|
||||
|
||||
fn cleanup_tdir() {
|
||||
println('... removing tdir: $tdir')
|
||||
println('... removing tdir: ${tdir}')
|
||||
os.rmdir_all(tdir) or { eprintln(err) }
|
||||
}
|
||||
|
||||
@ -42,20 +42,20 @@ type MyResult = string
|
||||
|
||||
[noreturn]
|
||||
fn (result MyResult) fail(reason string) {
|
||||
eprintln('> $reason, but it does not. Result:\n$result')
|
||||
eprintln('> ${reason}, but it does not. Result:\n${result}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
fn (result MyResult) has(sub string) MyResult {
|
||||
if !result.contains(sub) {
|
||||
result.fail(' result should have the substring `$sub`')
|
||||
result.fail(' result should have the substring `${sub}`')
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
fn (result MyResult) matches(gpattern string) MyResult {
|
||||
if !result.match_glob(gpattern) {
|
||||
result.fail('result should match the glob pattern `$gpattern`')
|
||||
result.fail('result should match the glob pattern `${gpattern}`')
|
||||
}
|
||||
return result
|
||||
}
|
||||
@ -63,14 +63,14 @@ fn (result MyResult) matches(gpattern string) MyResult {
|
||||
fn create_test(tname string, tcontent string) !string {
|
||||
tpath := os.join_path(tdir, tname)
|
||||
os.write_file(tpath, tcontent)!
|
||||
eprintln('>>>>>>>> tpath: $tpath | tcontent: $tcontent')
|
||||
eprintln('>>>>>>>> tpath: ${tpath} | tcontent: ${tcontent}')
|
||||
return os.quoted_path(tpath)
|
||||
}
|
||||
|
||||
fn check_assert_continues_works() ! {
|
||||
os.chdir(tdir)!
|
||||
create_test('assert_continues_option_works_test.v', 'fn test_fail1() { assert 2==4\nassert 2==1\nassert 2==0 }\nfn test_ok(){ assert true }\nfn test_fail2() { assert false }')!
|
||||
result := check_fail('$vexe -assert continues assert_continues_option_works_test.v')
|
||||
result := check_fail('${vexe} -assert continues assert_continues_option_works_test.v')
|
||||
result.has('assert_continues_option_works_test.v:1: fn test_fail1')
|
||||
result.has('assert_continues_option_works_test.v:2: fn test_fail1')
|
||||
result.has('assert_continues_option_works_test.v:3: fn test_fail1')
|
||||
@ -78,7 +78,7 @@ fn check_assert_continues_works() ! {
|
||||
result.has('> assert 2 == 4').has('> assert 2 == 1').has('> assert 2 == 0')
|
||||
// Check if a test function, tagged with [assert_continues], has the same behaviour, without needing additional options
|
||||
create_test('assert_continues_tag_works_test.v', '[assert_continues]fn test_fail1() { assert 2==4\nassert 2==1\nassert 2==0 }\nfn test_ok(){ assert true }\nfn test_fail2() { assert false\n assert false }')!
|
||||
tag_res := check_fail('$vexe assert_continues_tag_works_test.v')
|
||||
tag_res := check_fail('${vexe} assert_continues_tag_works_test.v')
|
||||
tag_res.has('assert_continues_tag_works_test.v:1: fn test_fail1')
|
||||
tag_res.has('assert_continues_tag_works_test.v:2: fn test_fail1')
|
||||
tag_res.has('assert_continues_tag_works_test.v:3: fn test_fail1')
|
||||
@ -89,20 +89,20 @@ fn check_assert_continues_works() ! {
|
||||
}
|
||||
|
||||
fn check_ok(cmd string) MyResult {
|
||||
println('> check_ok cmd: $cmd')
|
||||
println('> check_ok cmd: ${cmd}')
|
||||
res := os.execute(cmd)
|
||||
if res.exit_code != 0 {
|
||||
eprintln('> check_ok failed.\n$res.output')
|
||||
eprintln('> check_ok failed.\n${res.output}')
|
||||
exit(1)
|
||||
}
|
||||
return res.output
|
||||
}
|
||||
|
||||
fn check_fail(cmd string) MyResult {
|
||||
println('> check_fail cmd: $cmd')
|
||||
println('> check_fail cmd: ${cmd}')
|
||||
res := os.execute(cmd)
|
||||
if res.exit_code == 0 {
|
||||
eprintln('> check_fail succeeded, but it should have failed.\n$res.output')
|
||||
eprintln('> check_fail succeeded, but it should have failed.\n${res.output}')
|
||||
exit(1)
|
||||
}
|
||||
return res.output
|
||||
@ -112,23 +112,23 @@ fn main() {
|
||||
defer {
|
||||
os.chdir(os.wd_at_startup) or {}
|
||||
}
|
||||
println('> vroot: $vroot | vexe: $vexe | tdir: $tdir')
|
||||
println('> vroot: ${vroot} | vexe: ${vexe} | tdir: ${tdir}')
|
||||
ok_fpath := create_test('a_single_ok_test.v', 'fn test_ok(){ assert true }')!
|
||||
if check_ok('$vexe $ok_fpath') != '' {
|
||||
if check_ok('${vexe} ${ok_fpath}') != '' {
|
||||
exit(1)
|
||||
}
|
||||
check_ok('$vexe test $ok_fpath').matches('*OK*a_single_ok_test.v*')
|
||||
check_ok('$vexe test "$tdir"').matches('*OK*a_single_ok_test.v*')
|
||||
check_ok('${vexe} test ${ok_fpath}').matches('*OK*a_single_ok_test.v*')
|
||||
check_ok('${vexe} test "${tdir}"').matches('*OK*a_single_ok_test.v*')
|
||||
//
|
||||
fail_fpath := create_test('a_single_failing_test.v', 'fn test_fail(){ assert 1 == 2 }')!
|
||||
check_fail('$vexe $fail_fpath').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
|
||||
check_fail('$vexe test $fail_fpath').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
|
||||
check_fail('$vexe test "$tdir"').has('> assert 1 == 2')
|
||||
check_fail('${vexe} ${fail_fpath}').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
|
||||
check_fail('${vexe} test ${fail_fpath}').has('> assert 1 == 2').has('a_single_failing_test.v:1: fn test_fail')
|
||||
check_fail('${vexe} test "${tdir}"').has('> assert 1 == 2')
|
||||
rel_dir := os.join_path(tdir, rand.ulid())
|
||||
os.mkdir(rel_dir)!
|
||||
os.chdir(rel_dir)!
|
||||
relative_path := '..' + os.path_separator + 'a_single_ok_test.v'
|
||||
check_ok('$vexe test ${os.quoted_path(relative_path)}').has('OK').has('a_single_ok_test.v')
|
||||
check_ok('${vexe} test ${os.quoted_path(relative_path)}').has('OK').has('a_single_ok_test.v')
|
||||
//
|
||||
check_assert_continues_works()!
|
||||
println('> all done')
|
||||
|
@ -35,10 +35,10 @@ fn (mut ctx Context) println(s string) {
|
||||
ctx.omode = if ctx.omode == .stderr { Target.stdout } else { Target.stderr }
|
||||
}
|
||||
if ctx.target in [.both, .stdout] || ctx.omode == .stdout {
|
||||
println('stdout, $s')
|
||||
println('stdout, ${s}')
|
||||
}
|
||||
if ctx.target in [.both, .stderr] || ctx.omode == .stderr {
|
||||
eprintln('stderr, $s')
|
||||
eprintln('stderr, ${s}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -71,11 +71,11 @@ fn main() {
|
||||
ctx.omode = .stdout
|
||||
}
|
||||
if ctx.is_verbose {
|
||||
eprintln('> args: $args | context: $ctx')
|
||||
eprintln('> args: ${args} | context: ${ctx}')
|
||||
}
|
||||
spawn do_timeout(&ctx)
|
||||
for i := 1; true; i++ {
|
||||
ctx.println('$i')
|
||||
ctx.println('${i}')
|
||||
time.sleep(ctx.period_ms * time.millisecond)
|
||||
}
|
||||
time.sleep(100 * time.second)
|
||||
|
@ -17,7 +17,7 @@ fn main() {
|
||||
// Git clone c2v
|
||||
if !os.exists(c2v_dir) {
|
||||
os.mkdir_all(vmodules)!
|
||||
println('C2V is not installed. Cloning C2V to $c2v_dir ...')
|
||||
println('C2V is not installed. Cloning C2V to ${c2v_dir} ...')
|
||||
os.chdir(vmodules)!
|
||||
res := os.execute('git clone https://github.com/vlang/c2v')
|
||||
if res.exit_code != 0 {
|
||||
@ -43,10 +43,10 @@ fn main() {
|
||||
passed_args := util.args_quote_paths(os.args[2..])
|
||||
// println(passed_args)
|
||||
os.chdir(os.wd_at_startup)!
|
||||
c2v_cmd := '${os.quoted_path(c2v_bin)} $passed_args'
|
||||
c2v_cmd := '${os.quoted_path(c2v_bin)} ${passed_args}'
|
||||
res := os.system(c2v_cmd)
|
||||
if res != 0 {
|
||||
eprintln('C2V command: $c2v_cmd')
|
||||
eprintln('C2V command: ${c2v_cmd}')
|
||||
eprintln('C2V failed to translate the C files. Please report it via GitHub.')
|
||||
exit(4)
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ fn (ctx Context) write_file_or_print(file string) {
|
||||
if ctx.is_print {
|
||||
println(json(file))
|
||||
} else {
|
||||
println('$time.now(): AST written to: ' + json_file(file))
|
||||
println('${time.now()}: AST written to: ' + json_file(file))
|
||||
}
|
||||
}
|
||||
|
||||
@ -74,7 +74,7 @@ fn (ctx Context) watch_for_changes(file string) {
|
||||
ctx.write_file_or_print(file)
|
||||
if ctx.is_compile {
|
||||
file_name := file[0..(file.len - os.file_ext(file).len)]
|
||||
os.system('v -o ${file_name}.c $file')
|
||||
os.system('v -o ${file_name}.c ${file}')
|
||||
}
|
||||
}
|
||||
timestamp = new_timestamp
|
||||
@ -96,11 +96,11 @@ fn get_abs_path(path string) string {
|
||||
// check file is v file and exists
|
||||
fn check_file(file string) {
|
||||
if os.file_ext(file) !in ['.v', '.vv', '.vsh'] {
|
||||
eprintln('the file `$file` must be a v file or vsh file')
|
||||
eprintln('the file `${file}` must be a v file or vsh file')
|
||||
exit(1)
|
||||
}
|
||||
if !os.exists(file) {
|
||||
eprintln('the v file `$file` does not exist')
|
||||
eprintln('the v file `${file}` does not exist')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
@ -224,12 +224,12 @@ fn (t Tree) type_node(typ ast.Type) &Node {
|
||||
|
||||
// token type node
|
||||
fn (t Tree) token_node(tok_kind token.Kind) &Node {
|
||||
return t.string_node('token:${int(tok_kind)}($tok_kind.str())')
|
||||
return t.string_node('token:${int(tok_kind)}(${tok_kind.str()})')
|
||||
}
|
||||
|
||||
// enum type node
|
||||
fn (t Tree) enum_node<T>(value T) &Node {
|
||||
return t.string_node('enum:${int(value)}($value)')
|
||||
return t.string_node('enum:${int(value)}(${value})')
|
||||
}
|
||||
|
||||
// for [][]comment
|
||||
|
@ -20,22 +20,22 @@ mut:
|
||||
|
||||
fn (context Context) header() string {
|
||||
mut header_s := ''
|
||||
header_s += 'module $context.module_name\n'
|
||||
header_s += 'module ${context.module_name}\n'
|
||||
header_s += '\n'
|
||||
allfiles := context.files.join(' ')
|
||||
mut options := []string{}
|
||||
if context.prefix.len > 0 {
|
||||
options << '-p $context.prefix'
|
||||
options << '-p ${context.prefix}'
|
||||
}
|
||||
if context.module_name.len > 0 {
|
||||
options << '-m $context.module_name'
|
||||
options << '-m ${context.module_name}'
|
||||
}
|
||||
if context.write_file.len > 0 {
|
||||
options << '-w $context.write_file'
|
||||
options << '-w ${context.write_file}'
|
||||
}
|
||||
soptions := options.join(' ')
|
||||
header_s += '// File generated by:\n'
|
||||
header_s += '// v bin2v $allfiles $soptions\n'
|
||||
header_s += '// v bin2v ${allfiles} ${soptions}\n'
|
||||
header_s += '// Please, do not edit this file.\n'
|
||||
header_s += '// Your changes may be overwritten.\n'
|
||||
header_s += 'const (\n'
|
||||
@ -49,9 +49,9 @@ fn (context Context) footer() string {
|
||||
fn (context Context) file2v(bname string, fbytes []u8, bn_max int) string {
|
||||
mut sb := strings.new_builder(1000)
|
||||
bn_diff_len := bn_max - bname.len
|
||||
sb.write_string('\t${bname}_len' + ' '.repeat(bn_diff_len - 4) + ' = $fbytes.len\n')
|
||||
sb.write_string('\t${bname}_len' + ' '.repeat(bn_diff_len - 4) + ' = ${fbytes.len}\n')
|
||||
fbyte := fbytes[0]
|
||||
bnmae_line := '\t$bname' + ' '.repeat(bn_diff_len) + ' = [u8($fbyte), '
|
||||
bnmae_line := '\t${bname}' + ' '.repeat(bn_diff_len) + ' = [u8(${fbyte}), '
|
||||
sb.write_string(bnmae_line)
|
||||
mut line_len := bnmae_line.len + 3
|
||||
for i := 1; i < fbytes.len; i++ {
|
||||
@ -65,7 +65,7 @@ fn (context Context) file2v(bname string, fbytes []u8, bn_max int) string {
|
||||
sb.write_string(b)
|
||||
line_len += b.len
|
||||
} else {
|
||||
sb.write_string('$b, ')
|
||||
sb.write_string('${b}, ')
|
||||
line_len += b.len + 2
|
||||
}
|
||||
}
|
||||
@ -76,8 +76,8 @@ fn (context Context) file2v(bname string, fbytes []u8, bn_max int) string {
|
||||
fn (context Context) bname_and_bytes(file string) !(string, []u8) {
|
||||
fname := os.file_name(file)
|
||||
fname_escaped := fname.replace_each(['.', '_', '-', '_'])
|
||||
byte_name := '$context.prefix$fname_escaped'.to_lower()
|
||||
fbytes := os.read_bytes(file) or { return error('Error: $err.msg()') }
|
||||
byte_name := '${context.prefix}${fname_escaped}'.to_lower()
|
||||
fbytes := os.read_bytes(file) or { return error('Error: ${err.msg()}') }
|
||||
return byte_name, fbytes
|
||||
}
|
||||
|
||||
@ -108,7 +108,7 @@ fn main() {
|
||||
exit(0)
|
||||
}
|
||||
files := fp.finalize() or {
|
||||
eprintln('Error: $err.msg()')
|
||||
eprintln('Error: ${err.msg()}')
|
||||
exit(1)
|
||||
}
|
||||
real_files := files.filter(it != 'bin2v')
|
||||
|
@ -8,9 +8,9 @@ const vroot = @VMODROOT
|
||||
fn get_vdoctor_output(is_verbose bool) string {
|
||||
vexe := os.getenv('VEXE')
|
||||
verbose_flag := if is_verbose { '-v' } else { '' }
|
||||
result := os.execute('${os.quoted_path(vexe)} $verbose_flag doctor')
|
||||
result := os.execute('${os.quoted_path(vexe)} ${verbose_flag} doctor')
|
||||
if result.exit_code != 0 {
|
||||
eprintln('unable to get `v doctor` output: $result.output')
|
||||
eprintln('unable to get `v doctor` output: ${result.output}')
|
||||
return ''
|
||||
}
|
||||
return result.output
|
||||
@ -23,21 +23,21 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
|
||||
wd := os.getwd()
|
||||
os.chdir(vroot) or {}
|
||||
verbose_flag := if is_verbose { '-v' } else { '' }
|
||||
vdbg_path := $if windows { '$vroot/vdbg.exe' } $else { '$vroot/vdbg' }
|
||||
vdbg_compilation_cmd := '${os.quoted_path(vexe)} $verbose_flag -g -o ${os.quoted_path(vdbg_path)} cmd/v'
|
||||
vdbg_path := $if windows { '${vroot}/vdbg.exe' } $else { '${vroot}/vdbg' }
|
||||
vdbg_compilation_cmd := '${os.quoted_path(vexe)} ${verbose_flag} -g -o ${os.quoted_path(vdbg_path)} cmd/v'
|
||||
vdbg_result := os.execute(vdbg_compilation_cmd)
|
||||
os.chdir(wd) or {}
|
||||
if vdbg_result.exit_code == 0 {
|
||||
vexe = vdbg_path
|
||||
} else {
|
||||
eprintln('unable to compile V in debug mode: $vdbg_result.output\ncommand: $vdbg_compilation_cmd\n')
|
||||
eprintln('unable to compile V in debug mode: ${vdbg_result.output}\ncommand: ${vdbg_compilation_cmd}\n')
|
||||
}
|
||||
//
|
||||
mut result := os.execute('${os.quoted_path(vexe)} $verbose_flag ${os.quoted_path(file_path)}')
|
||||
mut result := os.execute('${os.quoted_path(vexe)} ${verbose_flag} ${os.quoted_path(file_path)}')
|
||||
defer {
|
||||
os.rm(vdbg_path) or {
|
||||
if is_verbose {
|
||||
eprintln('unable to delete `vdbg`: $err')
|
||||
eprintln('unable to delete `vdbg`: ${err}')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -49,14 +49,14 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
|
||||
}
|
||||
os.rm(generated_file) or {
|
||||
if is_verbose {
|
||||
eprintln('unable to delete generated file: $err')
|
||||
eprintln('unable to delete generated file: ${err}')
|
||||
}
|
||||
}
|
||||
}
|
||||
run := is_yes
|
||||
|| ask('It looks like the compilation went well, do you want to run the file?')
|
||||
if run {
|
||||
result = os.execute('${os.quoted_path(vexe)} $verbose_flag run ${os.quoted_path(file_path)}')
|
||||
result = os.execute('${os.quoted_path(vexe)} ${verbose_flag} run ${os.quoted_path(file_path)}')
|
||||
if result.exit_code == 0 && !is_yes {
|
||||
confirm_or_exit('It looks like the file ran correctly as well, are you sure you want to continue?')
|
||||
}
|
||||
@ -66,7 +66,7 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
|
||||
}
|
||||
|
||||
fn ask(msg string) bool {
|
||||
prompt := os.input_opt('$msg [Y/n] ') or { 'y' }
|
||||
prompt := os.input_opt('${msg} [Y/n] ') or { 'y' }
|
||||
return prompt == '' || prompt[0].ascii_str().to_lower() != 'n'
|
||||
}
|
||||
|
||||
@ -90,7 +90,7 @@ fn main() {
|
||||
}
|
||||
else {
|
||||
if !arg.ends_with('.v') && !arg.ends_with('.vsh') && !arg.ends_with('.vv') {
|
||||
eprintln('unknown argument: `$arg`')
|
||||
eprintln('unknown argument: `${arg}`')
|
||||
exit(1)
|
||||
}
|
||||
if file_path != '' {
|
||||
@ -111,7 +111,7 @@ fn main() {
|
||||
vdoctor_output := get_vdoctor_output(is_verbose)
|
||||
// file content
|
||||
file_content := os.read_file(file_path) or {
|
||||
eprintln('unable to get file "$file_path" content: $err')
|
||||
eprintln('unable to get file "${file_path}" content: ${err}')
|
||||
''
|
||||
}
|
||||
// output from `v -g -o vdbg cmd/v && vdbg file.v`
|
||||
@ -136,26 +136,26 @@ fn main() {
|
||||
raw_body := '<!-- It is advisable to update all relevant modules using `v outdated` and `v install` -->
|
||||
**V doctor:**
|
||||
```
|
||||
$vdoctor_output
|
||||
${vdoctor_output}
|
||||
```
|
||||
|
||||
**What did you do?**
|
||||
`v -g -o vdbg cmd/v && vdbg $file_path`
|
||||
`v -g -o vdbg cmd/v && vdbg ${file_path}`
|
||||
{file_content}
|
||||
|
||||
**What did you expect to see?**
|
||||
|
||||
$expected_result
|
||||
${expected_result}
|
||||
|
||||
**What did you see instead?**
|
||||
```
|
||||
$build_output```'
|
||||
mut encoded_body := urllib.query_escape(raw_body.replace_once('{file_content}', '```v\n$file_content\n```'))
|
||||
mut generated_uri := 'https://github.com/vlang/v/issues/new?labels=Bug&body=$encoded_body'
|
||||
${build_output}```'
|
||||
mut encoded_body := urllib.query_escape(raw_body.replace_once('{file_content}', '```v\n${file_content}\n```'))
|
||||
mut generated_uri := 'https://github.com/vlang/v/issues/new?labels=Bug&body=${encoded_body}'
|
||||
if generated_uri.len > 8192 {
|
||||
// GitHub doesn't support URLs longer than 8192 characters
|
||||
encoded_body = urllib.query_escape(raw_body.replace_once('{file_content}', 'See attached file `$file_path`'))
|
||||
generated_uri = 'https://github.com/vlang/v/issues/new?labels=Bug&body=$encoded_body'
|
||||
encoded_body = urllib.query_escape(raw_body.replace_once('{file_content}', 'See attached file `${file_path}`'))
|
||||
generated_uri = 'https://github.com/vlang/v/issues/new?labels=Bug&body=${encoded_body}'
|
||||
println('Your file is too big to be submitted. Head over to the following URL and attach your file.')
|
||||
println(generated_uri)
|
||||
} else {
|
||||
|
@ -26,8 +26,8 @@ fn main() {
|
||||
os.chdir(vroot)!
|
||||
folder := os.join_path('cmd', 'tools')
|
||||
tfolder := os.join_path(vroot, 'cmd', 'tools')
|
||||
main_label := 'Building $folder ...'
|
||||
finish_label := 'building $folder'
|
||||
main_label := 'Building ${folder} ...'
|
||||
finish_label := 'building ${folder}'
|
||||
//
|
||||
mut skips := []string{}
|
||||
for stool in tools_in_subfolders {
|
||||
@ -68,7 +68,7 @@ fn main() {
|
||||
os.mv_by_cp(tpath, target_path) or {
|
||||
emsg := err.msg()
|
||||
if !emsg.contains('vbuild-tools') && !emsg.contains('vtest-all') {
|
||||
eprintln('error while moving $tpath to $target_path: $emsg')
|
||||
eprintln('error while moving ${tpath} to ${target_path}: ${emsg}')
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ fn get_replacement_function(options Options) ReplacementFunction {
|
||||
}
|
||||
|
||||
fn process_file(input_file string, options Options) {
|
||||
lines := os.read_lines(input_file) or { panic('Failed to read file: $input_file') }
|
||||
lines := os.read_lines(input_file) or { panic('Failed to read file: ${input_file}') }
|
||||
|
||||
mut re := regex.regex_opt(semver_query) or { panic('Could not create a RegEx parser.') }
|
||||
|
||||
@ -103,28 +103,28 @@ fn process_file(input_file string, options Options) {
|
||||
os.rm(backup_file) or {}
|
||||
|
||||
// Rename the original to the backup.
|
||||
os.mv(input_file, backup_file) or { panic('Failed to copy file: $input_file') }
|
||||
os.mv(input_file, backup_file) or { panic('Failed to copy file: ${input_file}') }
|
||||
|
||||
// Process the old file and write it back to the original.
|
||||
os.write_file(input_file, new_lines.join_lines()) or {
|
||||
panic('Failed to write file: $input_file')
|
||||
panic('Failed to write file: ${input_file}')
|
||||
}
|
||||
|
||||
// Remove the backup file.
|
||||
os.rm(backup_file) or {}
|
||||
|
||||
if replacement_complete {
|
||||
println('Bumped version in $input_file')
|
||||
println('Bumped version in ${input_file}')
|
||||
} else {
|
||||
println('No changes made in $input_file')
|
||||
println('No changes made in ${input_file}')
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
if os.args.len < 2 {
|
||||
println('Usage: $tool_name [options] [file1 file2 ...]
|
||||
$tool_description
|
||||
Try $tool_name -h for more help...')
|
||||
println('Usage: ${tool_name} [options] [file1 file2 ...]
|
||||
${tool_description}
|
||||
Try ${tool_name} -h for more help...')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
@ -162,7 +162,7 @@ Try $tool_name -h for more help...')
|
||||
|
||||
for input_file in files {
|
||||
if !os.exists(input_file) {
|
||||
println('File not found: $input_file')
|
||||
println('File not found: ${input_file}')
|
||||
exit(1)
|
||||
}
|
||||
process_file(input_file, options)
|
||||
|
@ -21,7 +21,7 @@ const (
|
||||
show_progress = os.getenv('GITHUB_JOB') == '' && '-silent' !in os.args
|
||||
non_option_args = cmdline.only_non_options(os.args[2..])
|
||||
is_verbose = os.getenv('VERBOSE') != ''
|
||||
vcheckfolder = os.join_path(os.vtmp_dir(), 'v', 'vcheck_$os.getuid()')
|
||||
vcheckfolder = os.join_path(os.vtmp_dir(), 'v', 'vcheck_${os.getuid()}')
|
||||
should_autofix = os.getenv('VAUTOFIX') != ''
|
||||
vexe = @VEXE
|
||||
)
|
||||
@ -71,7 +71,7 @@ fn main() {
|
||||
}
|
||||
real_path := os.real_path(file_path)
|
||||
lines := os.read_lines(real_path) or {
|
||||
println('"$file_path" does not exist')
|
||||
println('"${file_path}" does not exist')
|
||||
res.warnings++
|
||||
continue
|
||||
}
|
||||
@ -85,7 +85,7 @@ fn main() {
|
||||
clear_previous_line()
|
||||
}
|
||||
if res.warnings > 0 || res.errors > 0 || res.oks > 0 {
|
||||
println('\nWarnings: $res.warnings | Errors: $res.errors | OKs: $res.oks')
|
||||
println('\nWarnings: ${res.warnings} | Errors: ${res.errors} | OKs: ${res.oks}')
|
||||
}
|
||||
if res.errors > 0 {
|
||||
exit(1)
|
||||
@ -130,12 +130,12 @@ fn rtext(s string) string {
|
||||
}
|
||||
|
||||
fn wline(file_path string, lnumber int, column int, message string) string {
|
||||
return btext('$file_path:${lnumber + 1}:${column + 1}:') + btext(mtext(' warn:')) +
|
||||
rtext(' $message')
|
||||
return btext('${file_path}:${lnumber + 1}:${column + 1}:') + btext(mtext(' warn:')) +
|
||||
rtext(' ${message}')
|
||||
}
|
||||
|
||||
fn eline(file_path string, lnumber int, column int, message string) string {
|
||||
return btext('$file_path:${lnumber + 1}:${column + 1}:') + btext(rtext(' error: $message'))
|
||||
return btext('${file_path}:${lnumber + 1}:${column + 1}:') + btext(rtext(' error: ${message}'))
|
||||
}
|
||||
|
||||
const default_command = 'compile'
|
||||
@ -166,7 +166,7 @@ mut:
|
||||
fn (mut f MDFile) progress(message string) {
|
||||
if show_progress {
|
||||
clear_previous_line()
|
||||
println('File: ${f.path:-30s}, Lines: ${f.lines.len:5}, $message')
|
||||
println('File: ${f.path:-30s}, Lines: ${f.lines.len:5}, ${message}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -177,30 +177,30 @@ fn (mut f MDFile) check() CheckResult {
|
||||
// f.progress('line: $j')
|
||||
if f.state == .vexample {
|
||||
if line.len > too_long_line_length_example {
|
||||
wprintln(wline(f.path, j, line.len, 'example lines must be less than $too_long_line_length_example characters'))
|
||||
wprintln(wline(f.path, j, line.len, 'example lines must be less than ${too_long_line_length_example} characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if f.state == .codeblock {
|
||||
if line.len > too_long_line_length_codeblock {
|
||||
wprintln(wline(f.path, j, line.len, 'code lines must be less than $too_long_line_length_codeblock characters'))
|
||||
wprintln(wline(f.path, j, line.len, 'code lines must be less than ${too_long_line_length_codeblock} characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if line.starts_with('|') {
|
||||
if line.len > too_long_line_length_table {
|
||||
wprintln(wline(f.path, j, line.len, 'table lines must be less than $too_long_line_length_table characters'))
|
||||
wprintln(wline(f.path, j, line.len, 'table lines must be less than ${too_long_line_length_table} characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if line.contains('http') {
|
||||
if line.all_after('https').len > too_long_line_length_link {
|
||||
wprintln(wline(f.path, j, line.len, 'link lines must be less than $too_long_line_length_link characters'))
|
||||
wprintln(wline(f.path, j, line.len, 'link lines must be less than ${too_long_line_length_link} characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if line.len > too_long_line_length_other {
|
||||
eprintln(eline(f.path, j, line.len, 'must be less than $too_long_line_length_other characters'))
|
||||
eprintln(eline(f.path, j, line.len, 'must be less than ${too_long_line_length_other} characters'))
|
||||
eprintln(line)
|
||||
res.errors++
|
||||
}
|
||||
@ -224,7 +224,7 @@ fn (mut f MDFile) parse_line(lnumber int, line string) {
|
||||
if command == '' {
|
||||
command = default_command
|
||||
} else if command == 'nofmt' {
|
||||
command += ' $default_command'
|
||||
command += ' ${default_command}'
|
||||
}
|
||||
f.current = VCodeExample{
|
||||
sline: lnumber
|
||||
@ -331,14 +331,14 @@ fn (mut ad AnchorData) check_link_target_match(fpath string, mut res CheckResult
|
||||
found_error_warning = true
|
||||
res.errors++
|
||||
for anchordata in ad.anchors[link] {
|
||||
eprintln(eline(fpath, anchordata.line, 0, 'multiple link targets of existing link (#$link)'))
|
||||
eprintln(eline(fpath, anchordata.line, 0, 'multiple link targets of existing link (#${link})'))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
found_error_warning = true
|
||||
res.errors++
|
||||
for brokenlink in linkdata {
|
||||
eprintln(eline(fpath, brokenlink.line, 0, 'no link target found for existing link [$brokenlink.lable](#$link)'))
|
||||
eprintln(eline(fpath, brokenlink.line, 0, 'no link target found for existing link [${brokenlink.lable}](#${link})'))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -354,7 +354,7 @@ fn (mut ad AnchorData) check_link_target_match(fpath string, mut res CheckResult
|
||||
anchor.line
|
||||
}
|
||||
}
|
||||
wprintln(wline(fpath, line, 0, 'multiple link target for non existing link (#$link)'))
|
||||
wprintln(wline(fpath, line, 0, 'multiple link target for non existing link (#${link})'))
|
||||
found_error_warning = true
|
||||
res.warnings++
|
||||
}
|
||||
@ -394,7 +394,7 @@ fn create_ref_link(s string) string {
|
||||
|
||||
fn (mut f MDFile) debug() {
|
||||
for e in f.examples {
|
||||
eprintln('f.path: $f.path | example: $e')
|
||||
eprintln('f.path: ${f.path} | example: ${e}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -442,7 +442,7 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
mut acommands := e.command.split(' ')
|
||||
nofmt := 'nofmt' in acommands
|
||||
for command in acommands {
|
||||
f.progress('example from $e.sline to $e.eline, command: $command')
|
||||
f.progress('example from ${e.sline} to ${e.eline}, command: ${command}')
|
||||
fmt_res := if nofmt { 0 } else { get_fmt_exit_code(vfile, vexe) }
|
||||
match command {
|
||||
'compile' {
|
||||
@ -598,7 +598,7 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
}
|
||||
'nofmt' {}
|
||||
else {
|
||||
eprintln(eline(f.path, e.sline, 0, 'unrecognized command: "$command", use one of: wip/ignore/compile/failcompile/okfmt/nofmt/oksyntax/badsyntax/cgen/globals/live/shared'))
|
||||
eprintln(eline(f.path, e.sline, 0, 'unrecognized command: "${command}", use one of: wip/ignore/compile/failcompile/okfmt/nofmt/oksyntax/badsyntax/cgen/globals/live/shared'))
|
||||
should_cleanup_vfile = false
|
||||
errors++
|
||||
}
|
||||
@ -639,10 +639,10 @@ fn (mut f MDFile) report_not_formatted_example_if_needed(e VCodeExample, fmt_res
|
||||
}
|
||||
f.autofix_example(e, vfile) or {
|
||||
if err is ExampleWasRewritten {
|
||||
eprintln('>> f.path: $f.path | example from $e.sline to $e.eline was re-formated by vfmt')
|
||||
eprintln('>> f.path: ${f.path} | example from ${e.sline} to ${e.eline} was re-formated by vfmt')
|
||||
return err
|
||||
}
|
||||
eprintln('>> f.path: $f.path | encountered error while autofixing the example: $err')
|
||||
eprintln('>> f.path: ${f.path} | encountered error while autofixing the example: ${err}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -651,7 +651,7 @@ struct ExampleWasRewritten {
|
||||
}
|
||||
|
||||
fn (mut f MDFile) autofix_example(e VCodeExample, vfile string) ! {
|
||||
eprintln('>>> AUTOFIXING f.path: $f.path | e.sline: $e.sline | vfile: $vfile')
|
||||
eprintln('>>> AUTOFIXING f.path: ${f.path} | e.sline: ${e.sline} | vfile: ${vfile}')
|
||||
res := cmdexecute('${os.quoted_path(vexe)} fmt -w ${os.quoted_path(vfile)}')
|
||||
if res != 0 {
|
||||
return error('could not autoformat the example')
|
||||
|
@ -295,12 +295,12 @@ fn auto_complete(args []string) {
|
||||
println(setup_for_shell(shell_name))
|
||||
exit(0)
|
||||
}
|
||||
eprintln('Unknown shell ${shell_name}. Supported shells are: $auto_complete_shells')
|
||||
eprintln('Unknown shell ${shell_name}. Supported shells are: ${auto_complete_shells}')
|
||||
exit(1)
|
||||
}
|
||||
eprintln('auto completion require arguments to work.')
|
||||
} else {
|
||||
eprintln('auto completion failed for "$args".')
|
||||
eprintln('auto completion failed for "${args}".')
|
||||
}
|
||||
exit(1)
|
||||
}
|
||||
@ -309,7 +309,7 @@ fn auto_complete(args []string) {
|
||||
match sub {
|
||||
'setup' {
|
||||
if sub_args.len <= 1 || sub_args[1] !in auto_complete_shells {
|
||||
eprintln('please specify a shell to setup auto completion for ($auto_complete_shells).')
|
||||
eprintln('please specify a shell to setup auto completion for (${auto_complete_shells}).')
|
||||
exit(1)
|
||||
}
|
||||
shell := sub_args[1]
|
||||
@ -322,7 +322,7 @@ fn auto_complete(args []string) {
|
||||
mut lines := []string{}
|
||||
list := auto_complete_request(sub_args[1..])
|
||||
for entry in list {
|
||||
lines << "COMPREPLY+=('$entry')"
|
||||
lines << "COMPREPLY+=('${entry}')"
|
||||
}
|
||||
println(lines.join('\n'))
|
||||
}
|
||||
@ -333,7 +333,7 @@ fn auto_complete(args []string) {
|
||||
mut lines := []string{}
|
||||
list := auto_complete_request(sub_args[1..])
|
||||
for entry in list {
|
||||
lines << '$entry'
|
||||
lines << '${entry}'
|
||||
}
|
||||
println(lines.join('\n'))
|
||||
}
|
||||
@ -344,7 +344,7 @@ fn auto_complete(args []string) {
|
||||
mut lines := []string{}
|
||||
list := auto_complete_request(sub_args[1..])
|
||||
for entry in list {
|
||||
lines << 'compadd -U -S' + '""' + ' -- ' + "'$entry';"
|
||||
lines << 'compadd -U -S' + '""' + ' -- ' + "'${entry}';"
|
||||
}
|
||||
println(lines.join('\n'))
|
||||
}
|
||||
@ -542,7 +542,7 @@ _v_completions() {
|
||||
local limit
|
||||
# Send all words up to the word the cursor is currently on
|
||||
let limit=1+\$COMP_CWORD
|
||||
src=\$($vexe complete bash \$(printf "%s\\n" \${COMP_WORDS[@]: 0:\$limit}))
|
||||
src=\$(${vexe} complete bash \$(printf "%s\\n" \${COMP_WORDS[@]: 0:\$limit}))
|
||||
if [[ \$? == 0 ]]; then
|
||||
eval \${src}
|
||||
#echo \${src}
|
||||
@ -556,7 +556,7 @@ complete -o nospace -F _v_completions v
|
||||
setup = '
|
||||
function __v_completions
|
||||
# Send all words up to the one before the cursor
|
||||
$vexe complete fish (commandline -cop)
|
||||
${vexe} complete fish (commandline -cop)
|
||||
end
|
||||
complete -f -c v -a "(__v_completions)"
|
||||
'
|
||||
@ -567,7 +567,7 @@ complete -f -c v -a "(__v_completions)"
|
||||
_v() {
|
||||
local src
|
||||
# Send all words up to the word the cursor is currently on
|
||||
src=\$($vexe complete zsh \$(printf "%s\\n" \${(@)words[1,\$CURRENT]}))
|
||||
src=\$(${vexe} complete zsh \$(printf "%s\\n" \${(@)words[1,\$CURRENT]}))
|
||||
if [[ \$? == 0 ]]; then
|
||||
eval \${src}
|
||||
#echo \${src}
|
||||
@ -580,7 +580,7 @@ compdef _v v
|
||||
setup = '
|
||||
Register-ArgumentCompleter -Native -CommandName v -ScriptBlock {
|
||||
param(\$commandName, \$wordToComplete, \$cursorPosition)
|
||||
$vexe complete powershell "\$wordToComplete" | ForEach-Object {
|
||||
${vexe} complete powershell "\$wordToComplete" | ForEach-Object {
|
||||
[System.Management.Automation.CompletionResult]::new(\$_, \$_, \'ParameterValue\', \$_)
|
||||
}
|
||||
}
|
||||
|
@ -24,13 +24,13 @@ fn main() {
|
||||
}
|
||||
path := os.args[3]
|
||||
content := os.read_bytes(path) or {
|
||||
eprintln('unable to read "$path": $err')
|
||||
eprintln('unable to read "${path}": ${err}')
|
||||
exit(1)
|
||||
}
|
||||
compressed := match compression_type {
|
||||
.zlib {
|
||||
zlib.compress(content) or {
|
||||
eprintln('compression error: $err')
|
||||
eprintln('compression error: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
@ -38,7 +38,7 @@ fn main() {
|
||||
out_path := os.args[4]
|
||||
|
||||
os.write_file_array(out_path, compressed) or {
|
||||
eprintln('failed to write "$out_path": $err')
|
||||
eprintln('failed to write "${out_path}": ${err}')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ mut:
|
||||
}
|
||||
|
||||
fn cerror(e string) {
|
||||
eprintln('\nerror: $e')
|
||||
eprintln('\nerror: ${e}')
|
||||
}
|
||||
|
||||
fn check_name(name string) string {
|
||||
@ -30,12 +30,12 @@ fn check_name(name string) string {
|
||||
if cname.contains(' ') {
|
||||
cname = cname.replace(' ', '_')
|
||||
}
|
||||
eprintln('warning: the project name cannot be capitalized, the name will be changed to `$cname`')
|
||||
eprintln('warning: the project name cannot be capitalized, the name will be changed to `${cname}`')
|
||||
return cname
|
||||
}
|
||||
if name.contains(' ') {
|
||||
cname := name.replace(' ', '_')
|
||||
eprintln('warning: the project name cannot contain spaces, the name will be changed to `$cname`')
|
||||
eprintln('warning: the project name cannot contain spaces, the name will be changed to `${cname}`')
|
||||
return cname
|
||||
}
|
||||
return name
|
||||
@ -43,10 +43,10 @@ fn check_name(name string) string {
|
||||
|
||||
fn vmod_content(c Create) string {
|
||||
return "Module {
|
||||
name: '$c.name'
|
||||
description: '$c.description'
|
||||
version: '$c.version'
|
||||
license: '$c.license'
|
||||
name: '${c.name}'
|
||||
description: '${c.description}'
|
||||
version: '${c.version}'
|
||||
license: '${c.license}'
|
||||
dependencies: []
|
||||
}
|
||||
"
|
||||
@ -64,7 +64,7 @@ fn main() {
|
||||
fn gen_gitignore(name string) string {
|
||||
return '# Binaries for programs and plugins
|
||||
main
|
||||
$name
|
||||
${name}
|
||||
*.exe
|
||||
*.exe~
|
||||
*.so
|
||||
@ -104,7 +104,7 @@ indent_size = 4
|
||||
}
|
||||
|
||||
fn (c &Create) write_vmod(new bool) {
|
||||
vmod_path := if new { '$c.name/v.mod' } else { 'v.mod' }
|
||||
vmod_path := if new { '${c.name}/v.mod' } else { 'v.mod' }
|
||||
os.write_file(vmod_path, vmod_content(c)) or { panic(err) }
|
||||
}
|
||||
|
||||
@ -112,12 +112,12 @@ fn (c &Create) write_main(new bool) {
|
||||
if !new && (os.exists('${c.name}.v') || os.exists('src/${c.name}.v')) {
|
||||
return
|
||||
}
|
||||
main_path := if new { '$c.name/${c.name}.v' } else { '${c.name}.v' }
|
||||
main_path := if new { '${c.name}/${c.name}.v' } else { '${c.name}.v' }
|
||||
os.write_file(main_path, main_content()) or { panic(err) }
|
||||
}
|
||||
|
||||
fn (c &Create) write_gitattributes(new bool) {
|
||||
gitattributes_path := if new { '$c.name/.gitattributes' } else { '.gitattributes' }
|
||||
gitattributes_path := if new { '${c.name}/.gitattributes' } else { '.gitattributes' }
|
||||
if !new && os.exists(gitattributes_path) {
|
||||
return
|
||||
}
|
||||
@ -125,7 +125,7 @@ fn (c &Create) write_gitattributes(new bool) {
|
||||
}
|
||||
|
||||
fn (c &Create) write_editorconfig(new bool) {
|
||||
editorconfig_path := if new { '$c.name/.editorconfig' } else { '.editorconfig' }
|
||||
editorconfig_path := if new { '${c.name}/.editorconfig' } else { '.editorconfig' }
|
||||
if !new && os.exists(editorconfig_path) {
|
||||
return
|
||||
}
|
||||
@ -134,14 +134,14 @@ fn (c &Create) write_editorconfig(new bool) {
|
||||
|
||||
fn (c &Create) create_git_repo(dir string) {
|
||||
// Create Git Repo and .gitignore file
|
||||
if !os.is_dir('$dir/.git') {
|
||||
res := os.execute('git init $dir')
|
||||
if !os.is_dir('${dir}/.git') {
|
||||
res := os.execute('git init ${dir}')
|
||||
if res.exit_code != 0 {
|
||||
cerror('Unable to create git repo')
|
||||
exit(4)
|
||||
}
|
||||
}
|
||||
gitignore_path := '$dir/.gitignore'
|
||||
gitignore_path := '${dir}/.gitignore'
|
||||
if !os.exists(gitignore_path) {
|
||||
os.write_file(gitignore_path, gen_gitignore(c.name)) or {}
|
||||
}
|
||||
@ -155,21 +155,21 @@ fn create(args []string) {
|
||||
exit(1)
|
||||
}
|
||||
if c.name.contains('-') {
|
||||
cerror('"$c.name" should not contain hyphens')
|
||||
cerror('"${c.name}" should not contain hyphens')
|
||||
exit(1)
|
||||
}
|
||||
if os.is_dir(c.name) {
|
||||
cerror('$c.name folder already exists')
|
||||
cerror('${c.name} folder already exists')
|
||||
exit(3)
|
||||
}
|
||||
c.description = if args.len > 1 { args[1] } else { os.input('Input your project description: ') }
|
||||
default_version := '0.0.0'
|
||||
c.version = os.input('Input your project version: ($default_version) ')
|
||||
c.version = os.input('Input your project version: (${default_version}) ')
|
||||
if c.version == '' {
|
||||
c.version = default_version
|
||||
}
|
||||
default_license := os.getenv_opt('VLICENSE') or { 'MIT' }
|
||||
c.license = os.input('Input your project license: ($default_license) ')
|
||||
c.license = os.input('Input your project license: (${default_license}) ')
|
||||
if c.license == '' {
|
||||
c.license = default_license
|
||||
}
|
||||
@ -206,7 +206,7 @@ fn main() {
|
||||
init_project()
|
||||
}
|
||||
else {
|
||||
cerror('unknown command: $cmd')
|
||||
cerror('unknown command: ${cmd}')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
@ -59,8 +59,8 @@ fn (vd VDoc) render_search_index(out Output) {
|
||||
js_search_data.write_string('var searchModuleData = [')
|
||||
for i, title in vd.search_module_index {
|
||||
data := vd.search_module_data[i]
|
||||
js_search_index.write_string('"$title",')
|
||||
js_search_data.write_string('["$data.description","$data.link"],')
|
||||
js_search_index.write_string('"${title}",')
|
||||
js_search_data.write_string('["${data.description}","${data.link}"],')
|
||||
}
|
||||
js_search_index.writeln('];')
|
||||
js_search_index.write_string('var searchIndex = [')
|
||||
@ -68,9 +68,9 @@ fn (vd VDoc) render_search_index(out Output) {
|
||||
js_search_data.write_string('var searchData = [')
|
||||
for i, title in vd.search_index {
|
||||
data := vd.search_data[i]
|
||||
js_search_index.write_string('"$title",')
|
||||
js_search_index.write_string('"${title}",')
|
||||
// array instead of object to reduce file size
|
||||
js_search_data.write_string('["$data.badge","$data.description","$data.link","$data.prefix"],')
|
||||
js_search_data.write_string('["${data.badge}","${data.description}","${data.link}","${data.prefix}"],')
|
||||
}
|
||||
js_search_index.writeln('];')
|
||||
js_search_data.writeln('];')
|
||||
@ -94,7 +94,7 @@ fn (mut vd VDoc) render_static_html(out Output) {
|
||||
fn (vd VDoc) get_resource(name string, out Output) string {
|
||||
cfg := vd.cfg
|
||||
path := os.join_path(cfg.theme_dir, name)
|
||||
mut res := os.read_file(path) or { panic('vdoc: could not read $path') }
|
||||
mut res := os.read_file(path) or { panic('vdoc: could not read ${path}') }
|
||||
/*
|
||||
if minify {
|
||||
if name.ends_with('.js') {
|
||||
@ -110,7 +110,7 @@ fn (vd VDoc) get_resource(name string, out Output) string {
|
||||
} else {
|
||||
output_path := os.join_path(out.path, name)
|
||||
if !os.exists(output_path) {
|
||||
println('Generating $out.typ in "$output_path"')
|
||||
println('Generating ${out.typ} in "${output_path}"')
|
||||
os.write_file(output_path, res) or { panic(err) }
|
||||
}
|
||||
return name
|
||||
@ -150,7 +150,7 @@ fn (mut vd VDoc) create_search_results(mod string, dn doc.DocNode, out Output) {
|
||||
dn_description := trim_doc_node_description(comments)
|
||||
vd.search_index << dn.name
|
||||
vd.search_data << SearchResult{
|
||||
prefix: if dn.parent_name != '' { '$dn.kind ($dn.parent_name)' } else { '$dn.kind ' }
|
||||
prefix: if dn.parent_name != '' { '${dn.kind} (${dn.parent_name})' } else { '${dn.kind} ' }
|
||||
description: dn_description
|
||||
badge: mod
|
||||
link: vd.get_file_name(mod, out) + '#' + get_node_id(dn)
|
||||
@ -164,7 +164,7 @@ fn (vd VDoc) write_content(cn &doc.DocNode, d &doc.Doc, mut hw strings.Builder)
|
||||
cfg := vd.cfg
|
||||
base_dir := os.dir(os.real_path(cfg.input_path))
|
||||
file_path_name := if cfg.is_multi {
|
||||
cn.file_path.replace('$base_dir/', '')
|
||||
cn.file_path.replace('${base_dir}/', '')
|
||||
} else {
|
||||
os.file_name(cn.file_path)
|
||||
}
|
||||
@ -173,7 +173,7 @@ fn (vd VDoc) write_content(cn &doc.DocNode, d &doc.Doc, mut hw strings.Builder)
|
||||
hw.write_string(doc_node_html(cn, src_link, false, cfg.include_examples, d.table))
|
||||
}
|
||||
for child in cn.children {
|
||||
child_file_path_name := child.file_path.replace('$base_dir/', '')
|
||||
child_file_path_name := child.file_path.replace('${base_dir}/', '')
|
||||
child_src_link := get_src_link(vd.manifest.repo_url, child_file_path_name,
|
||||
child.pos.line_nr + 1)
|
||||
hw.write_string(doc_node_html(child, child_src_link, false, cfg.include_examples,
|
||||
@ -223,7 +223,7 @@ fn (vd VDoc) gen_html(d doc.Doc) string {
|
||||
submodules := vd.docs.filter(it.head.name.starts_with(submod_prefix + '.'))
|
||||
dropdown := if submodules.len > 0 { vd.assets['arrow_icon'] } else { '' }
|
||||
active_class := if dc.head.name == d.head.name { ' active' } else { '' }
|
||||
modules_toc.write_string('<li class="open$active_class"><div class="menu-row">$dropdown<a href="$href_name">$submod_prefix</a></div>')
|
||||
modules_toc.write_string('<li class="open${active_class}"><div class="menu-row">${dropdown}<a href="${href_name}">${submod_prefix}</a></div>')
|
||||
for j, cdoc in submodules {
|
||||
if j == 0 {
|
||||
modules_toc.write_string('<ul>')
|
||||
@ -234,7 +234,7 @@ fn (vd VDoc) gen_html(d doc.Doc) string {
|
||||
} else {
|
||||
''
|
||||
}
|
||||
modules_toc.write_string('<li$sub_selected_classes><a href="./${cdoc.head.name}.html">$submod_name</a></li>')
|
||||
modules_toc.write_string('<li${sub_selected_classes}><a href="./${cdoc.head.name}.html">${submod_name}</a></li>')
|
||||
if j == submodules.len - 1 {
|
||||
modules_toc.write_string('</ul>')
|
||||
}
|
||||
@ -280,15 +280,15 @@ fn get_src_link(repo_url string, file_name string, line_nr int) string {
|
||||
return ''
|
||||
}
|
||||
url.path = url.path.trim_right('/') + match url.host {
|
||||
'github.com' { '/blob/master/$file_name' }
|
||||
'gitlab.com' { '/-/blob/master/$file_name' }
|
||||
'git.sir.ht' { '/tree/master/$file_name' }
|
||||
'github.com' { '/blob/master/${file_name}' }
|
||||
'gitlab.com' { '/-/blob/master/${file_name}' }
|
||||
'git.sir.ht' { '/tree/master/${file_name}' }
|
||||
else { '' }
|
||||
}
|
||||
if url.path == '/' {
|
||||
return ''
|
||||
}
|
||||
url.fragment = 'L$line_nr'
|
||||
url.fragment = 'L${line_nr}'
|
||||
return url.str()
|
||||
}
|
||||
|
||||
@ -299,18 +299,18 @@ fn html_highlight(code string, tb &ast.Table) string {
|
||||
lit := if typ in [.unone, .operator, .punctuation] {
|
||||
tok.kind.str()
|
||||
} else if typ == .string {
|
||||
"'$tok.lit'"
|
||||
"'${tok.lit}'"
|
||||
} else if typ == .char {
|
||||
'`$tok.lit`'
|
||||
'`${tok.lit}`'
|
||||
} else if typ == .comment {
|
||||
if tok.lit != '' && tok.lit[0] == 1 { '//${tok.lit[1..]}' } else { '//$tok.lit' }
|
||||
if tok.lit != '' && tok.lit[0] == 1 { '//${tok.lit[1..]}' } else { '//${tok.lit}' }
|
||||
} else {
|
||||
tok.lit
|
||||
}
|
||||
if typ in [.unone, .name] {
|
||||
return lit
|
||||
}
|
||||
return '<span class="token $typ">$lit</span>'
|
||||
return '<span class="token ${typ}">${lit}</span>'
|
||||
}
|
||||
mut s := scanner.new_scanner(code, .parse_comments, &pref.Preferences{})
|
||||
mut tok := s.scan()
|
||||
@ -398,44 +398,44 @@ fn doc_node_html(dn doc.DocNode, link string, head bool, include_examples bool,
|
||||
mut tags := dn.tags.filter(!it.starts_with('deprecated'))
|
||||
tags.sort()
|
||||
mut node_id := get_node_id(dn)
|
||||
mut hash_link := if !head { ' <a href="#$node_id">#</a>' } else { '' }
|
||||
mut hash_link := if !head { ' <a href="#${node_id}">#</a>' } else { '' }
|
||||
if head && is_module_readme(dn) {
|
||||
node_id = 'readme_$node_id'
|
||||
hash_link = ' <a href="#$node_id">#</a>'
|
||||
node_id = 'readme_${node_id}'
|
||||
hash_link = ' <a href="#${node_id}">#</a>'
|
||||
}
|
||||
dnw.writeln('${tabs[1]}<section id="$node_id" class="doc-node$node_class">')
|
||||
dnw.writeln('${tabs[1]}<section id="${node_id}" class="doc-node${node_class}">')
|
||||
if dn.name.len > 0 {
|
||||
if dn.kind == .const_group {
|
||||
dnw.write_string('${tabs[2]}<div class="title"><$head_tag>$sym_name$hash_link</$head_tag>')
|
||||
dnw.write_string('${tabs[2]}<div class="title"><${head_tag}>${sym_name}${hash_link}</${head_tag}>')
|
||||
} else {
|
||||
dnw.write_string('${tabs[2]}<div class="title"><$head_tag>$dn.kind $sym_name$hash_link</$head_tag>')
|
||||
dnw.write_string('${tabs[2]}<div class="title"><${head_tag}>${dn.kind} ${sym_name}${hash_link}</${head_tag}>')
|
||||
}
|
||||
if link.len != 0 {
|
||||
dnw.write_string('<a class="link" rel="noreferrer" target="_blank" href="$link">$link_svg</a>')
|
||||
dnw.write_string('<a class="link" rel="noreferrer" target="_blank" href="${link}">${link_svg}</a>')
|
||||
}
|
||||
dnw.write_string('</div>')
|
||||
}
|
||||
if deprecated_tags.len > 0 {
|
||||
attributes := deprecated_tags.map('<div class="attribute attribute-deprecated">${no_quotes(it)}</div>').join('')
|
||||
dnw.writeln('<div class="attributes">$attributes</div>')
|
||||
dnw.writeln('<div class="attributes">${attributes}</div>')
|
||||
}
|
||||
if tags.len > 0 {
|
||||
attributes := tags.map('<div class="attribute">$it</div>').join('')
|
||||
dnw.writeln('<div class="attributes">$attributes</div>')
|
||||
attributes := tags.map('<div class="attribute">${it}</div>').join('')
|
||||
dnw.writeln('<div class="attributes">${attributes}</div>')
|
||||
}
|
||||
if !head && dn.content.len > 0 {
|
||||
dnw.writeln('<pre class="signature"><code>$highlighted_code</code></pre>')
|
||||
dnw.writeln('<pre class="signature"><code>${highlighted_code}</code></pre>')
|
||||
}
|
||||
// do not mess with md_content further, its formatting is important, just output it 1:1 !
|
||||
dnw.writeln('$md_content\n')
|
||||
dnw.writeln('${md_content}\n')
|
||||
// Write examples if any found
|
||||
examples := dn.examples()
|
||||
if include_examples && examples.len > 0 {
|
||||
example_title := if examples.len > 1 { 'Examples' } else { 'Example' }
|
||||
dnw.writeln('<section class="doc-node examples"><h4>$example_title</h4>')
|
||||
dnw.writeln('<section class="doc-node examples"><h4>${example_title}</h4>')
|
||||
for example in examples {
|
||||
hl_example := html_highlight(example, tb)
|
||||
dnw.writeln('<pre><code class="language-v">$hl_example</code></pre>')
|
||||
dnw.writeln('<pre><code class="language-v">${hl_example}</code></pre>')
|
||||
}
|
||||
dnw.writeln('</section>')
|
||||
}
|
||||
@ -488,17 +488,17 @@ fn write_toc(dn doc.DocNode, mut toc strings.Builder) {
|
||||
if dn.comments.len == 0 || (dn.comments.len > 0 && dn.comments[0].text.len == 0) {
|
||||
return
|
||||
}
|
||||
toc.write_string('<li class="open"><a href="#readme_$toc_slug">README</a>')
|
||||
toc.write_string('<li class="open"><a href="#readme_${toc_slug}">README</a>')
|
||||
} else if dn.name != 'Constants' {
|
||||
toc.write_string('<li class="open"><a href="#$toc_slug">$dn.kind $dn.name</a>')
|
||||
toc.write_string('<li class="open"><a href="#${toc_slug}">${dn.kind} ${dn.name}</a>')
|
||||
toc.writeln(' <ul>')
|
||||
for child in dn.children {
|
||||
cname := dn.name + '.' + child.name
|
||||
toc.writeln('<li><a href="#${slug(cname)}">$child.kind $child.name</a></li>')
|
||||
toc.writeln('<li><a href="#${slug(cname)}">${child.kind} ${child.name}</a></li>')
|
||||
}
|
||||
toc.writeln('</ul>')
|
||||
} else {
|
||||
toc.write_string('<li class="open"><a href="#$toc_slug">$dn.name</a>')
|
||||
toc.write_string('<li class="open"><a href="#${toc_slug}">${dn.name}</a>')
|
||||
}
|
||||
toc.writeln('</li>')
|
||||
}
|
||||
|
@ -10,40 +10,40 @@ fn markdown_escape_script_tags(str string) string {
|
||||
fn (vd VDoc) gen_markdown(d doc.Doc, with_toc bool) string {
|
||||
mut hw := strings.new_builder(200)
|
||||
mut cw := strings.new_builder(200)
|
||||
hw.writeln('# $d.head.content\n')
|
||||
hw.writeln('# ${d.head.content}\n')
|
||||
if d.head.comments.len > 0 {
|
||||
comments := if vd.cfg.include_examples {
|
||||
d.head.merge_comments()
|
||||
} else {
|
||||
d.head.merge_comments_without_examples()
|
||||
}
|
||||
hw.writeln('$comments\n')
|
||||
hw.writeln('${comments}\n')
|
||||
}
|
||||
if with_toc {
|
||||
hw.writeln('## Contents')
|
||||
}
|
||||
vd.write_markdown_content(d.contents.arr(), mut cw, mut hw, 0, with_toc)
|
||||
footer_text := gen_footer_text(d, !vd.cfg.no_timestamp)
|
||||
cw.writeln('#### $footer_text')
|
||||
cw.writeln('#### ${footer_text}')
|
||||
return hw.str() + '\n' + cw.str()
|
||||
}
|
||||
|
||||
fn (vd VDoc) write_markdown_content(contents []doc.DocNode, mut cw strings.Builder, mut hw strings.Builder, indent int, with_toc bool) {
|
||||
for cn in contents {
|
||||
if with_toc && cn.name.len > 0 {
|
||||
hw.writeln(' '.repeat(2 * indent) + '- [${slug(cn.name)}](#$cn.name)')
|
||||
cw.writeln('## $cn.name')
|
||||
hw.writeln(' '.repeat(2 * indent) + '- [${slug(cn.name)}](#${cn.name})')
|
||||
cw.writeln('## ${cn.name}')
|
||||
}
|
||||
if cn.content.len > 0 {
|
||||
comments := cn.merge_comments_without_examples()
|
||||
cw.writeln('```v\n$cn.content\n```\n$comments\n')
|
||||
cw.writeln('```v\n${cn.content}\n```\n${comments}\n')
|
||||
// Write examples if any found
|
||||
examples := cn.examples()
|
||||
if vd.cfg.include_examples && examples.len > 0 {
|
||||
example_title := if examples.len > 1 { 'Examples' } else { 'Example' }
|
||||
cw.writeln('$example_title\n```v\n')
|
||||
cw.writeln('${example_title}\n```v\n')
|
||||
for example in examples {
|
||||
cw.writeln('$example\n')
|
||||
cw.writeln('${example}\n')
|
||||
}
|
||||
cw.writeln('```\n')
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ fn escape(str string) string {
|
||||
|
||||
fn get_sym_name(dn doc.DocNode) string {
|
||||
sym_name := if dn.parent_name.len > 0 && dn.parent_name != 'void' {
|
||||
'($dn.parent_name) $dn.name'
|
||||
'(${dn.parent_name}) ${dn.name}'
|
||||
} else {
|
||||
dn.name
|
||||
}
|
||||
@ -29,7 +29,7 @@ fn get_sym_name(dn doc.DocNode) string {
|
||||
|
||||
fn get_node_id(dn doc.DocNode) string {
|
||||
tag := if dn.parent_name.len > 0 && dn.parent_name != 'void' {
|
||||
'${dn.parent_name}.$dn.name'
|
||||
'${dn.parent_name}.${dn.name}'
|
||||
} else {
|
||||
dn.name
|
||||
}
|
||||
@ -37,7 +37,7 @@ fn get_node_id(dn doc.DocNode) string {
|
||||
}
|
||||
|
||||
fn is_module_readme(dn doc.DocNode) bool {
|
||||
if dn.comments.len > 0 && dn.content == 'module $dn.name' {
|
||||
if dn.comments.len > 0 && dn.content == 'module ${dn.name}' {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
@ -133,8 +133,8 @@ fn gen_footer_text(d &doc.Doc, include_timestamp bool) string {
|
||||
return footer_text
|
||||
}
|
||||
generated_time := d.time_generated
|
||||
time_str := '$generated_time.day $generated_time.smonth() $generated_time.year $generated_time.hhmmss()'
|
||||
return '$footer_text Generated on: $time_str'
|
||||
time_str := '${generated_time.day} ${generated_time.smonth()} ${generated_time.year} ${generated_time.hhmmss()}'
|
||||
return '${footer_text} Generated on: ${time_str}'
|
||||
}
|
||||
|
||||
fn color_highlight(code string, tb &ast.Table) string {
|
||||
@ -152,20 +152,20 @@ fn color_highlight(code string, tb &ast.Table) string {
|
||||
'"'])
|
||||
if use_double_quote {
|
||||
s := unescaped_val.replace_each(['\x01', '\\\\', '"', '\\"'])
|
||||
lit = term.yellow('"$s"')
|
||||
lit = term.yellow('"${s}"')
|
||||
} else {
|
||||
s := unescaped_val.replace_each(['\x01', '\\\\', "'", "\\'"])
|
||||
lit = term.yellow("'$s'")
|
||||
lit = term.yellow("'${s}'")
|
||||
}
|
||||
}
|
||||
.char {
|
||||
lit = term.yellow('`$tok.lit`')
|
||||
lit = term.yellow('`${tok.lit}`')
|
||||
}
|
||||
.comment {
|
||||
lit = if tok.lit != '' && tok.lit[0] == 1 {
|
||||
'//${tok.lit[1..]}'
|
||||
} else {
|
||||
'//$tok.lit'
|
||||
'//${tok.lit}'
|
||||
}
|
||||
}
|
||||
.keyword {
|
||||
|
@ -82,9 +82,9 @@ fn (vd VDoc) gen_json(d doc.Doc) string {
|
||||
} else {
|
||||
d.head.merge_comments_without_examples()
|
||||
}
|
||||
jw.write_string('{"module_name":"$d.head.name","description":"${escape(comments)}","contents":')
|
||||
jw.write_string('{"module_name":"${d.head.name}","description":"${escape(comments)}","contents":')
|
||||
jw.write_string(json.encode(d.contents.keys().map(d.contents[it])))
|
||||
jw.write_string(',"generator":"vdoc","time_generated":"$d.time_generated.str()"}')
|
||||
jw.write_string(',"generator":"vdoc","time_generated":"${d.time_generated.str()}"}')
|
||||
return jw.str()
|
||||
}
|
||||
|
||||
@ -95,7 +95,7 @@ fn (vd VDoc) gen_plaintext(d doc.Doc) string {
|
||||
content_arr := d.head.content.split(' ')
|
||||
pw.writeln('${term.bright_blue(content_arr[0])} ${term.green(content_arr[1])}\n')
|
||||
} else {
|
||||
pw.writeln('$d.head.content\n')
|
||||
pw.writeln('${d.head.content}\n')
|
||||
}
|
||||
if cfg.include_comments {
|
||||
comments := if cfg.include_examples {
|
||||
@ -145,7 +145,7 @@ fn (vd VDoc) write_plaintext_content(contents []doc.DocNode, mut pw strings.Buil
|
||||
}
|
||||
}
|
||||
if cfg.show_loc {
|
||||
pw.writeln('Location: $cn.file_path:${cn.pos.line_nr + 1}\n')
|
||||
pw.writeln('Location: ${cn.file_path}:${cn.pos.line_nr + 1}\n')
|
||||
}
|
||||
}
|
||||
vd.write_plaintext_content(cn.children, mut pw)
|
||||
@ -193,7 +193,7 @@ fn (vd VDoc) work_processor(mut work sync.Channel, mut wg sync.WaitGroup) {
|
||||
}
|
||||
file_name, content := vd.render_doc(pdoc.d, pdoc.out)
|
||||
output_path := os.join_path(pdoc.out.path, file_name)
|
||||
println('Generating $pdoc.out.typ in "$output_path"')
|
||||
println('Generating ${pdoc.out.typ} in "${output_path}"')
|
||||
os.write_file(output_path, content) or { panic(err) }
|
||||
}
|
||||
wg.done()
|
||||
@ -237,7 +237,7 @@ fn (vd VDoc) get_readme(path string) string {
|
||||
return ''
|
||||
}
|
||||
readme_path := os.join_path(path, '${fname}.md')
|
||||
vd.vprintln('Reading README file from $readme_path')
|
||||
vd.vprintln('Reading README file from ${readme_path}')
|
||||
readme_contents := os.read_file(readme_path) or { '' }
|
||||
return readme_contents
|
||||
}
|
||||
@ -287,7 +287,7 @@ fn (mut vd VDoc) generate_docs_from_file() {
|
||||
}
|
||||
manifest_path := os.join_path(dir_path, 'v.mod')
|
||||
if os.exists(manifest_path) {
|
||||
vd.vprintln('Reading v.mod info from $manifest_path')
|
||||
vd.vprintln('Reading v.mod info from ${manifest_path}')
|
||||
if manifest := vmod.from_file(manifest_path) {
|
||||
vd.manifest = manifest
|
||||
}
|
||||
@ -313,7 +313,7 @@ fn (mut vd VDoc) generate_docs_from_file() {
|
||||
cfg.input_path,
|
||||
] }
|
||||
for dirpath in dirs {
|
||||
vd.vprintln('Generating $out.typ docs for "$dirpath"')
|
||||
vd.vprintln('Generating ${out.typ} docs for "${dirpath}"')
|
||||
mut dcs := doc.generate(dirpath, cfg.pub_only, true, cfg.platform, cfg.symbol_name) or {
|
||||
vd.emit_generate_err(err)
|
||||
exit(1)
|
||||
@ -410,7 +410,7 @@ fn (mut vd VDoc) generate_docs_from_file() {
|
||||
|
||||
fn (vd VDoc) vprintln(str string) {
|
||||
if vd.cfg.is_verbose {
|
||||
println('vdoc: $str')
|
||||
println('vdoc: ${str}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -428,7 +428,7 @@ fn parse_arguments(args []string) Config {
|
||||
format := cmdline.option(current_args, '-f', '')
|
||||
if format !in allowed_formats {
|
||||
allowed_str := allowed_formats.join(', ')
|
||||
eprintln('vdoc: "$format" is not a valid format. Only $allowed_str are allowed.')
|
||||
eprintln('vdoc: "${format}" is not a valid format. Only ${allowed_str} are allowed.')
|
||||
exit(1)
|
||||
}
|
||||
cfg.output_type = set_output_type_from_str(format)
|
||||
@ -517,7 +517,7 @@ fn parse_arguments(args []string) Config {
|
||||
} else if !is_path {
|
||||
// TODO vd.vprintln('Input "$cfg.input_path" is not a valid path. Looking for modules named "$cfg.input_path"...')
|
||||
mod_path := doc.lookup_module(cfg.input_path) or {
|
||||
eprintln('vdoc: $err')
|
||||
eprintln('vdoc: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
cfg.input_path = mod_path
|
||||
@ -544,6 +544,6 @@ fn main() {
|
||||
repo_url: ''
|
||||
}
|
||||
}
|
||||
vd.vprintln('Setting output type to "$cfg.output_type"')
|
||||
vd.vprintln('Setting output type to "${cfg.output_type}"')
|
||||
vd.generate_docs_from_file()
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ fn (mut a App) println(s string) {
|
||||
fn (mut a App) collect_info() {
|
||||
mut os_kind := os.user_os()
|
||||
mut arch_details := []string{}
|
||||
arch_details << '$runtime.nr_cpus() cpus'
|
||||
arch_details << '${runtime.nr_cpus()} cpus'
|
||||
if runtime.is_32bit() {
|
||||
arch_details << '32bit'
|
||||
}
|
||||
@ -89,12 +89,12 @@ fn (mut a App) collect_info() {
|
||||
)
|
||||
p := a.parse(wmic_info, '=')
|
||||
caption, build_number, os_arch := p['caption'], p['buildnumber'], p['osarchitecture']
|
||||
os_details = '$caption v$build_number $os_arch'
|
||||
os_details = '${caption} v${build_number} ${os_arch}'
|
||||
} else {
|
||||
ouname := os.uname()
|
||||
os_details = '$ouname.release, $ouname.version'
|
||||
os_details = '${ouname.release}, ${ouname.version}'
|
||||
}
|
||||
a.line('OS', '$os_kind, $os_details')
|
||||
a.line('OS', '${os_kind}, ${os_details}')
|
||||
a.line('Processor', arch_details.join(', '))
|
||||
a.line('CC version', a.cmd(command: 'cc --version'))
|
||||
a.println('')
|
||||
@ -113,11 +113,11 @@ fn (mut a App) collect_info() {
|
||||
a.line('V full version', version.full_v_version(true))
|
||||
vtmp := os.getenv('VTMP')
|
||||
if vtmp != '' {
|
||||
a.line('env VTMP', '"$vtmp"')
|
||||
a.line('env VTMP', '"${vtmp}"')
|
||||
}
|
||||
vflags := os.getenv('VFLAGS')
|
||||
if vflags != '' {
|
||||
a.line('env VFLAGS', '"$vflags"')
|
||||
a.line('env VFLAGS', '"${vflags}"')
|
||||
}
|
||||
a.println('')
|
||||
a.line('Git version', a.cmd(command: 'git --version'))
|
||||
@ -146,11 +146,11 @@ fn (mut a App) cmd(c CmdConfig) string {
|
||||
return output[c.line]
|
||||
}
|
||||
}
|
||||
return 'Error: $x.output'
|
||||
return 'Error: ${x.output}'
|
||||
}
|
||||
|
||||
fn (mut a App) line(label string, value string) {
|
||||
a.println('$label: ${term.colorize(term.bold, value)}')
|
||||
a.println('${label}: ${term.colorize(term.bold, value)}')
|
||||
}
|
||||
|
||||
fn (app &App) parse(config string, sep string) map[string]string {
|
||||
@ -204,7 +204,7 @@ fn (mut a App) get_linux_os_name() string {
|
||||
}
|
||||
'uname' {
|
||||
ouname := os.uname()
|
||||
os_details = '$ouname.release, $ouname.version'
|
||||
os_details = '${ouname.release}, ${ouname.version}'
|
||||
break
|
||||
}
|
||||
else {}
|
||||
@ -231,7 +231,7 @@ fn (mut a App) git_info() string {
|
||||
os.execute('git -C . fetch V_REPO')
|
||||
commit_count := a.cmd(command: 'git rev-list @{0}...V_REPO/master --right-only --count').int()
|
||||
if commit_count > 0 {
|
||||
out += ' ($commit_count commit(s) behind V master)'
|
||||
out += ' (${commit_count} commit(s) behind V master)'
|
||||
}
|
||||
return out
|
||||
}
|
||||
@ -247,7 +247,7 @@ fn (mut a App) report_tcc_version(tccfolder string) {
|
||||
tcc_commit := a.cmd(
|
||||
command: 'git -C ${os.quoted_path(tccfolder)} describe --abbrev=8 --dirty --always --tags'
|
||||
)
|
||||
a.line('$tccfolder status', '$tcc_branch_name $tcc_commit')
|
||||
a.line('${tccfolder} status', '${tcc_branch_name} ${tcc_commit}')
|
||||
}
|
||||
|
||||
fn (mut a App) report_info() {
|
||||
|
@ -61,7 +61,7 @@ fn main() {
|
||||
if term_colors {
|
||||
os.setenv('VCOLORS', 'always', true)
|
||||
}
|
||||
foptions.vlog('vfmt foptions: $foptions')
|
||||
foptions.vlog('vfmt foptions: ${foptions}')
|
||||
if foptions.is_worker {
|
||||
// -worker should be added by a parent vfmt process.
|
||||
// We launch a sub process for each file because
|
||||
@ -74,7 +74,7 @@ fn main() {
|
||||
// we are NOT a worker at this stage, i.e. we are a parent vfmt process
|
||||
possible_files := cmdline.only_non_options(cmdline.options_after(args, ['fmt']))
|
||||
if foptions.is_verbose {
|
||||
eprintln('vfmt toolexe: $toolexe')
|
||||
eprintln('vfmt toolexe: ${toolexe}')
|
||||
eprintln('vfmt args: ' + os.args.str())
|
||||
eprintln('vfmt env_vflags_and_os_args: ' + args.str())
|
||||
eprintln('vfmt possible_files: ' + possible_files.str())
|
||||
@ -107,7 +107,7 @@ fn main() {
|
||||
mut worker_command_array := cli_args_no_files.clone()
|
||||
worker_command_array << ['-worker', util.quote_path(fpath)]
|
||||
worker_cmd := worker_command_array.join(' ')
|
||||
foptions.vlog('vfmt worker_cmd: $worker_cmd')
|
||||
foptions.vlog('vfmt worker_cmd: ${worker_cmd}')
|
||||
worker_result := os.execute(worker_cmd)
|
||||
// Guard against a possibly crashing worker process.
|
||||
if worker_result.exit_code != 0 {
|
||||
@ -133,7 +133,7 @@ fn main() {
|
||||
errors++
|
||||
}
|
||||
if errors > 0 {
|
||||
eprintln('Encountered a total of: $errors errors.')
|
||||
eprintln('Encountered a total of: ${errors} errors.')
|
||||
if foptions.is_noerror {
|
||||
exit(0)
|
||||
}
|
||||
@ -162,17 +162,17 @@ fn (foptions &FormatOptions) vlog(msg string) {
|
||||
}
|
||||
|
||||
fn (foptions &FormatOptions) format_file(file string) {
|
||||
foptions.vlog('vfmt2 running fmt.fmt over file: $file')
|
||||
foptions.vlog('vfmt2 running fmt.fmt over file: ${file}')
|
||||
prefs, table := setup_preferences_and_table()
|
||||
file_ast := parser.parse_file(file, table, .parse_comments, prefs)
|
||||
// checker.new_checker(table, prefs).check(file_ast)
|
||||
formatted_content := fmt.fmt(file_ast, table, prefs, foptions.is_debug)
|
||||
file_name := os.file_name(file)
|
||||
ulid := rand.ulid()
|
||||
vfmt_output_path := os.join_path(vtmp_folder, 'vfmt_${ulid}_$file_name')
|
||||
vfmt_output_path := os.join_path(vtmp_folder, 'vfmt_${ulid}_${file_name}')
|
||||
os.write_file(vfmt_output_path, formatted_content) or { panic(err) }
|
||||
foptions.vlog('fmt.fmt worked and $formatted_content.len bytes were written to $vfmt_output_path .')
|
||||
eprintln('$formatted_file_token$vfmt_output_path')
|
||||
foptions.vlog('fmt.fmt worked and ${formatted_content.len} bytes were written to ${vfmt_output_path} .')
|
||||
eprintln('${formatted_file_token}${vfmt_output_path}')
|
||||
}
|
||||
|
||||
fn (foptions &FormatOptions) format_pipe() {
|
||||
@ -184,20 +184,20 @@ fn (foptions &FormatOptions) format_pipe() {
|
||||
formatted_content := fmt.fmt(file_ast, table, prefs, foptions.is_debug, source_text: input_text)
|
||||
print(formatted_content)
|
||||
flush_stdout()
|
||||
foptions.vlog('fmt.fmt worked and $formatted_content.len bytes were written to stdout.')
|
||||
foptions.vlog('fmt.fmt worked and ${formatted_content.len} bytes were written to stdout.')
|
||||
}
|
||||
|
||||
fn print_compiler_options(compiler_params &pref.Preferences) {
|
||||
eprintln(' os: ' + compiler_params.os.str())
|
||||
eprintln(' ccompiler: $compiler_params.ccompiler')
|
||||
eprintln(' path: $compiler_params.path ')
|
||||
eprintln(' out_name: $compiler_params.out_name ')
|
||||
eprintln(' vroot: $compiler_params.vroot ')
|
||||
eprintln('lookup_path: $compiler_params.lookup_path ')
|
||||
eprintln(' out_name: $compiler_params.out_name ')
|
||||
eprintln(' cflags: $compiler_params.cflags ')
|
||||
eprintln(' is_test: $compiler_params.is_test ')
|
||||
eprintln(' is_script: $compiler_params.is_script ')
|
||||
eprintln(' ccompiler: ${compiler_params.ccompiler}')
|
||||
eprintln(' path: ${compiler_params.path} ')
|
||||
eprintln(' out_name: ${compiler_params.out_name} ')
|
||||
eprintln(' vroot: ${compiler_params.vroot} ')
|
||||
eprintln('lookup_path: ${compiler_params.lookup_path} ')
|
||||
eprintln(' out_name: ${compiler_params.out_name} ')
|
||||
eprintln(' cflags: ${compiler_params.cflags} ')
|
||||
eprintln(' is_test: ${compiler_params.is_test} ')
|
||||
eprintln(' is_script: ${compiler_params.is_script} ')
|
||||
}
|
||||
|
||||
fn (mut foptions FormatOptions) find_diff_cmd() string {
|
||||
@ -218,11 +218,11 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
|
||||
return
|
||||
}
|
||||
fc := os.read_file(file) or {
|
||||
eprintln('File $file could not be read')
|
||||
eprintln('File ${file} could not be read')
|
||||
return
|
||||
}
|
||||
formatted_fc := os.read_file(formatted_file_path) or {
|
||||
eprintln('File $formatted_file_path could not be read')
|
||||
eprintln('File ${formatted_file_path} could not be read')
|
||||
return
|
||||
}
|
||||
is_formatted_different := fc != formatted_fc
|
||||
@ -231,7 +231,7 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
|
||||
return
|
||||
}
|
||||
diff_cmd := foptions.find_diff_cmd()
|
||||
foptions.vlog('Using diff command: $diff_cmd')
|
||||
foptions.vlog('Using diff command: ${diff_cmd}')
|
||||
diff := diff.color_compare_files(diff_cmd, file, formatted_file_path)
|
||||
if diff.len > 0 {
|
||||
println(diff)
|
||||
@ -242,19 +242,19 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
|
||||
if !is_formatted_different {
|
||||
return
|
||||
}
|
||||
println("$file is not vfmt'ed")
|
||||
println("${file} is not vfmt'ed")
|
||||
return error('')
|
||||
}
|
||||
if foptions.is_c {
|
||||
if is_formatted_different {
|
||||
eprintln('File is not formatted: $file')
|
||||
eprintln('File is not formatted: ${file}')
|
||||
return error('')
|
||||
}
|
||||
return
|
||||
}
|
||||
if foptions.is_l {
|
||||
if is_formatted_different {
|
||||
eprintln('File needs formatting: $file')
|
||||
eprintln('File needs formatting: ${file}')
|
||||
}
|
||||
return
|
||||
}
|
||||
@ -273,9 +273,9 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
|
||||
$if !windows {
|
||||
os.chmod(file, int(perms_to_restore)) or { panic(err) }
|
||||
}
|
||||
eprintln('Reformatted file: $file')
|
||||
eprintln('Reformatted file: ${file}')
|
||||
} else {
|
||||
eprintln('Already formatted file: $file')
|
||||
eprintln('Already formatted file: ${file}')
|
||||
}
|
||||
return
|
||||
}
|
||||
@ -285,9 +285,9 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
|
||||
|
||||
fn (f FormatOptions) str() string {
|
||||
return
|
||||
'FormatOptions{ is_l: $f.is_l, is_w: $f.is_w, is_diff: $f.is_diff, is_verbose: $f.is_verbose,' +
|
||||
' is_all: $f.is_all, is_worker: $f.is_worker, is_debug: $f.is_debug, is_noerror: $f.is_noerror,' +
|
||||
' is_verify: $f.is_verify" }'
|
||||
'FormatOptions{ is_l: ${f.is_l}, is_w: ${f.is_w}, is_diff: ${f.is_diff}, is_verbose: ${f.is_verbose},' +
|
||||
' is_all: ${f.is_all}, is_worker: ${f.is_worker}, is_debug: ${f.is_debug}, is_noerror: ${f.is_noerror},' +
|
||||
' is_verify: ${f.is_verify}" }'
|
||||
}
|
||||
|
||||
fn file_to_mod_name_and_is_module_file(file string) (string, bool) {
|
||||
@ -308,7 +308,7 @@ fn file_to_mod_name_and_is_module_file(file string) (string, bool) {
|
||||
}
|
||||
|
||||
fn read_source_lines(file string) ?[]string {
|
||||
source_lines := os.read_lines(file) or { return error('can not read $file') }
|
||||
source_lines := os.read_lines(file) or { return error('can not read ${file}') }
|
||||
return source_lines
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,7 @@ mut:
|
||||
|
||||
fn (co CaptureOptions) validate() ! {
|
||||
if co.method !in supported_capture_methods {
|
||||
return error('capture method "$co.method" is not supported. Supported methods are: $supported_capture_methods')
|
||||
return error('capture method "${co.method}" is not supported. Supported methods are: ${supported_capture_methods}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -129,7 +129,7 @@ mut:
|
||||
}
|
||||
|
||||
fn (opt Options) verbose_execute(cmd string) os.Result {
|
||||
opt.verbose_eprintln('Running `$cmd`')
|
||||
opt.verbose_eprintln('Running `${cmd}`')
|
||||
return os.execute(cmd)
|
||||
}
|
||||
|
||||
@ -141,11 +141,11 @@ fn (opt Options) verbose_eprintln(msg string) {
|
||||
|
||||
fn main() {
|
||||
if runtime_os !in supported_hosts {
|
||||
eprintln('$tool_name is currently only supported on $supported_hosts hosts')
|
||||
eprintln('${tool_name} is currently only supported on ${supported_hosts} hosts')
|
||||
exit(1)
|
||||
}
|
||||
if os.args.len == 1 {
|
||||
eprintln('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
|
||||
eprintln('Usage: ${tool_name} PATH \n${tool_description}\n${tool_name} -h for more help...')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
@ -194,12 +194,12 @@ fn main() {
|
||||
all_paths_in_use := [path, gen_in_path, target_path]
|
||||
for path_in_use in all_paths_in_use {
|
||||
if !os.is_dir(path_in_use) {
|
||||
eprintln('`$path_in_use` is not a directory')
|
||||
eprintln('`${path_in_use}` is not a directory')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
if path == target_path || gen_in_path == target_path || gen_in_path == path {
|
||||
eprintln('Compare paths can not be the same directory `$path`/`$target_path`/`$gen_in_path`')
|
||||
eprintln('Compare paths can not be the same directory `${path}`/`${target_path}`/`${gen_in_path}`')
|
||||
exit(1)
|
||||
}
|
||||
compare_screenshots(opt, gen_in_path, target_path)!
|
||||
@ -212,7 +212,7 @@ fn generate_screenshots(mut opt Options, output_path string) ! {
|
||||
dst_path := output_path.trim_right('/')
|
||||
|
||||
if !os.is_dir(path) {
|
||||
return error('`$path` is not a directory')
|
||||
return error('`${path}` is not a directory')
|
||||
}
|
||||
|
||||
for mut app_config in opt.config.apps {
|
||||
@ -227,29 +227,29 @@ fn generate_screenshots(mut opt Options, output_path string) ! {
|
||||
}
|
||||
|
||||
if app_config.capture.method == 'gg_record' {
|
||||
opt.verbose_eprintln('Compiling shaders (if needed) for `$file`')
|
||||
opt.verbose_eprintln('Compiling shaders (if needed) for `${file}`')
|
||||
sh_result := opt.verbose_execute('${os.quoted_path(v_exe)} shader ${os.quoted_path(app_path)}')
|
||||
if sh_result.exit_code != 0 {
|
||||
opt.verbose_eprintln('Skipping shader compile for `$file` v shader failed with:\n$sh_result.output')
|
||||
opt.verbose_eprintln('Skipping shader compile for `${file}` v shader failed with:\n${sh_result.output}')
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if !os.exists(dst_path) {
|
||||
opt.verbose_eprintln('Creating output path `$dst_path`')
|
||||
os.mkdir_all(dst_path) or { return error('Failed making directory `$dst_path`') }
|
||||
opt.verbose_eprintln('Creating output path `${dst_path}`')
|
||||
os.mkdir_all(dst_path) or { return error('Failed making directory `${dst_path}`') }
|
||||
}
|
||||
|
||||
screenshot_path := os.join_path(dst_path, rel_out_path)
|
||||
if !os.exists(screenshot_path) {
|
||||
os.mkdir_all(screenshot_path) or {
|
||||
return error('Failed making screenshot path `$screenshot_path`')
|
||||
return error('Failed making screenshot path `${screenshot_path}`')
|
||||
}
|
||||
}
|
||||
|
||||
app_config.screenshots_path = screenshot_path
|
||||
app_config.screenshots = take_screenshots(opt, app_config) or {
|
||||
return error('Failed taking screenshots of `$app_path`:\n$err.msg()')
|
||||
return error('Failed taking screenshots of `${app_path}`:\n${err.msg()}')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -259,28 +259,28 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ! {
|
||||
mut warns := map[string]string{}
|
||||
for app_config in opt.config.apps {
|
||||
screenshots := app_config.screenshots
|
||||
opt.verbose_eprintln('Comparing $screenshots.len screenshots in `$output_path` with `$target_path`')
|
||||
opt.verbose_eprintln('Comparing ${screenshots.len} screenshots in `${output_path}` with `${target_path}`')
|
||||
for screenshot in screenshots {
|
||||
relative_screenshot := screenshot.all_after(output_path + os.path_separator)
|
||||
|
||||
src := screenshot
|
||||
target := os.join_path(target_path, relative_screenshot)
|
||||
opt.verbose_eprintln('Comparing `$src` with `$target` with $app_config.compare.method')
|
||||
opt.verbose_eprintln('Comparing `${src}` with `${target}` with ${app_config.compare.method}')
|
||||
|
||||
if app_config.compare.method == 'idiff' {
|
||||
if idiff_exe == '' {
|
||||
return error('$tool_name need the `idiff` tool installed. It can be installed on Ubuntu with `sudo apt install openimageio-tools`')
|
||||
return error('${tool_name} need the `idiff` tool installed. It can be installed on Ubuntu with `sudo apt install openimageio-tools`')
|
||||
}
|
||||
diff_file := os.join_path(os.vtmp_dir(), os.file_name(src).all_before_last('.') +
|
||||
'.diff.tif')
|
||||
flags := app_config.compare.flags.join(' ')
|
||||
diff_cmd := '${os.quoted_path(idiff_exe)} $flags -abs -od -o ${os.quoted_path(diff_file)} -abs ${os.quoted_path(src)} ${os.quoted_path(target)}'
|
||||
diff_cmd := '${os.quoted_path(idiff_exe)} ${flags} -abs -od -o ${os.quoted_path(diff_file)} -abs ${os.quoted_path(src)} ${os.quoted_path(target)}'
|
||||
result := opt.verbose_execute(diff_cmd)
|
||||
if result.exit_code == 0 {
|
||||
opt.verbose_eprintln('OUTPUT: \n$result.output')
|
||||
opt.verbose_eprintln('OUTPUT: \n${result.output}')
|
||||
}
|
||||
if result.exit_code != 0 {
|
||||
eprintln('OUTPUT: \n$result.output')
|
||||
eprintln('OUTPUT: \n${result.output}')
|
||||
if result.exit_code == 1 {
|
||||
warns[src] = target
|
||||
} else {
|
||||
@ -295,30 +295,30 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ! {
|
||||
eprintln('--- WARNINGS ---')
|
||||
eprintln('The following files had warnings when compared to their targets')
|
||||
for warn_src, warn_target in warns {
|
||||
eprintln('$warn_src ~= $warn_target')
|
||||
eprintln('${warn_src} ~= ${warn_target}')
|
||||
}
|
||||
}
|
||||
if fails.len > 0 {
|
||||
eprintln('--- ERRORS ---')
|
||||
eprintln('The following files did not match their targets')
|
||||
for fail_src, fail_target in fails {
|
||||
eprintln('$fail_src != $fail_target')
|
||||
eprintln('${fail_src} != ${fail_target}')
|
||||
}
|
||||
first := fails.keys()[0]
|
||||
fail_copy := os.join_path(os.vtmp_dir(), 'fail.' + first.all_after_last('.'))
|
||||
os.cp(first, fail_copy)!
|
||||
eprintln('First failed file `$first` is copied to `$fail_copy`')
|
||||
eprintln('First failed file `${first}` is copied to `${fail_copy}`')
|
||||
|
||||
diff_file := os.join_path(os.vtmp_dir(), os.file_name(first).all_before_last('.') +
|
||||
'.diff.tif')
|
||||
diff_copy := os.join_path(os.vtmp_dir(), 'diff.tif')
|
||||
if os.is_file(diff_file) {
|
||||
os.cp(diff_file, diff_copy)!
|
||||
eprintln('First failed diff file `$diff_file` is copied to `$diff_copy`')
|
||||
eprintln('Removing alpha channel from $diff_copy ...')
|
||||
eprintln('First failed diff file `${diff_file}` is copied to `${diff_copy}`')
|
||||
eprintln('Removing alpha channel from ${diff_copy} ...')
|
||||
final_fail_result_file := os.join_path(os.vtmp_dir(), 'diff.png')
|
||||
opt.verbose_execute('convert ${os.quoted_path(diff_copy)} -alpha off ${os.quoted_path(final_fail_result_file)}')
|
||||
eprintln('Final diff file: `$final_fail_result_file`')
|
||||
eprintln('Final diff file: `${final_fail_result_file}`')
|
||||
}
|
||||
exit(1)
|
||||
}
|
||||
@ -327,26 +327,26 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ! {
|
||||
fn take_screenshots(opt Options, app AppConfig) ![]string {
|
||||
out_path := app.screenshots_path
|
||||
if !opt.compare_only {
|
||||
opt.verbose_eprintln('Taking screenshot(s) of `$app.path` to `$out_path`')
|
||||
opt.verbose_eprintln('Taking screenshot(s) of `${app.path}` to `${out_path}`')
|
||||
match app.capture.method {
|
||||
'gg_record' {
|
||||
for k, v in app.capture.env {
|
||||
rv := v.replace('\$OUT_PATH', out_path)
|
||||
opt.verbose_eprintln('Setting ENV `$k` = $rv ...')
|
||||
os.setenv('$k', rv, true)
|
||||
opt.verbose_eprintln('Setting ENV `${k}` = ${rv} ...')
|
||||
os.setenv('${k}', rv, true)
|
||||
}
|
||||
|
||||
flags := app.capture.flags.join(' ')
|
||||
result := opt.verbose_execute('${os.quoted_path(v_exe)} $flags -d gg_record run ${os.quoted_path(app.abs_path)}')
|
||||
result := opt.verbose_execute('${os.quoted_path(v_exe)} ${flags} -d gg_record run ${os.quoted_path(app.abs_path)}')
|
||||
if result.exit_code != 0 {
|
||||
return error('Failed taking screenshot of `$app.abs_path`:\n$result.output')
|
||||
return error('Failed taking screenshot of `${app.abs_path}`:\n${result.output}')
|
||||
}
|
||||
}
|
||||
'generic_screenshot' {
|
||||
for k, v in app.capture.env {
|
||||
rv := v.replace('\$OUT_PATH', out_path)
|
||||
opt.verbose_eprintln('Setting ENV `$k` = $rv ...')
|
||||
os.setenv('$k', rv, true)
|
||||
opt.verbose_eprintln('Setting ENV `${k}` = ${rv} ...')
|
||||
os.setenv('${k}', rv, true)
|
||||
}
|
||||
|
||||
existing_screenshots := get_app_screenshots(out_path, app)!
|
||||
@ -354,9 +354,9 @@ fn take_screenshots(opt Options, app AppConfig) ![]string {
|
||||
flags := app.capture.flags
|
||||
|
||||
if !os.exists(app.abs_path) {
|
||||
return error('Failed starting app `$app.abs_path`, the path does not exist')
|
||||
return error('Failed starting app `${app.abs_path}`, the path does not exist')
|
||||
}
|
||||
opt.verbose_eprintln('Running $app.abs_path $flags')
|
||||
opt.verbose_eprintln('Running ${app.abs_path} ${flags}')
|
||||
mut p_app := os.new_process(app.abs_path)
|
||||
p_app.set_args(flags)
|
||||
p_app.set_redirect_stdio()
|
||||
@ -364,56 +364,56 @@ fn take_screenshots(opt Options, app AppConfig) ![]string {
|
||||
|
||||
if !p_app.is_alive() {
|
||||
output := p_app.stdout_read() + '\n' + p_app.stderr_read()
|
||||
return error('Failed starting app `$app.abs_path` (before screenshot):\n$output')
|
||||
return error('Failed starting app `${app.abs_path}` (before screenshot):\n${output}')
|
||||
}
|
||||
if app.capture.wait_ms > 0 {
|
||||
opt.verbose_eprintln('Waiting $app.capture.wait_ms before capturing')
|
||||
opt.verbose_eprintln('Waiting ${app.capture.wait_ms} before capturing')
|
||||
time.sleep(app.capture.wait_ms * time.millisecond)
|
||||
}
|
||||
if !p_app.is_alive() {
|
||||
output := p_app.stdout_slurp() + '\n' + p_app.stderr_slurp()
|
||||
return error('App `$app.abs_path` exited ($p_app.code) before a screenshot could be captured:\n$output')
|
||||
return error('App `${app.abs_path}` exited (${p_app.code}) before a screenshot could be captured:\n${output}')
|
||||
}
|
||||
// Use ImageMagick's `import` tool to take the screenshot
|
||||
out_file := os.join_path(out_path, os.file_name(app.path) +
|
||||
'_screenshot_${existing_screenshots.len:02}.png')
|
||||
result := opt.verbose_execute('import -window root "$out_file"')
|
||||
result := opt.verbose_execute('import -window root "${out_file}"')
|
||||
if result.exit_code != 0 {
|
||||
p_app.signal_kill()
|
||||
return error('Failed taking screenshot of `$app.abs_path` to "$out_file":\n$result.output')
|
||||
return error('Failed taking screenshot of `${app.abs_path}` to "${out_file}":\n${result.output}')
|
||||
}
|
||||
|
||||
// When using regions the capture is split up into regions.len
|
||||
// And name the output based on each region's properties
|
||||
if app.capture.regions.len > 0 {
|
||||
for region in app.capture.regions {
|
||||
region_id := 'x${region.x}y${region.y}w${region.width}h$region.height'
|
||||
region_id := 'x${region.x}y${region.y}w${region.width}h${region.height}'
|
||||
region_out_file := os.join_path(out_path, os.file_name(app.path) +
|
||||
'_screenshot_${existing_screenshots.len:02}_region_${region_id}.png')
|
||||
// If the region is empty (w, h == 0, 0) infer a full screenshot,
|
||||
// This allows for capturing both regions *and* the complete screen
|
||||
if region.is_empty() {
|
||||
os.cp(out_file, region_out_file) or {
|
||||
return error('Failed copying original screenshot "$out_file" to region file "$region_out_file"')
|
||||
return error('Failed copying original screenshot "${out_file}" to region file "${region_out_file}"')
|
||||
}
|
||||
continue
|
||||
}
|
||||
extract_result := opt.verbose_execute('convert -extract ${region.width}x$region.height+$region.x+$region.y "$out_file" "$region_out_file"')
|
||||
extract_result := opt.verbose_execute('convert -extract ${region.width}x${region.height}+${region.x}+${region.y} "${out_file}" "${region_out_file}"')
|
||||
if extract_result.exit_code != 0 {
|
||||
p_app.signal_kill()
|
||||
return error('Failed extracting region $region_id from screenshot of `$app.abs_path` to "$region_out_file":\n$result.output')
|
||||
return error('Failed extracting region ${region_id} from screenshot of `${app.abs_path}` to "${region_out_file}":\n${result.output}')
|
||||
}
|
||||
}
|
||||
// When done, remove the original file that was split into regions.
|
||||
opt.verbose_eprintln('Removing "$out_file" (region mode)')
|
||||
opt.verbose_eprintln('Removing "${out_file}" (region mode)')
|
||||
os.rm(out_file) or {
|
||||
return error('Failed removing original screenshot "$out_file"')
|
||||
return error('Failed removing original screenshot "${out_file}"')
|
||||
}
|
||||
}
|
||||
p_app.signal_kill()
|
||||
}
|
||||
else {
|
||||
return error('Unsupported capture method "$app.capture.method"')
|
||||
return error('Unsupported capture method "${app.capture.method}"')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -422,7 +422,7 @@ fn take_screenshots(opt Options, app AppConfig) ![]string {
|
||||
|
||||
fn get_app_screenshots(path string, app AppConfig) ![]string {
|
||||
mut screenshots := []string{}
|
||||
shots := os.ls(path) or { return error('Failed listing dir `$path`') }
|
||||
shots := os.ls(path) or { return error('Failed listing dir `${path}`') }
|
||||
for shot in shots {
|
||||
if shot.starts_with(os.file_name(app.path).all_before_last('.')) {
|
||||
screenshots << os.join_path(path, shot)
|
||||
|
@ -226,7 +226,7 @@ fn (upd VlsUpdater) download_prebuilt() ! {
|
||||
}
|
||||
|
||||
fn (upd VlsUpdater) print_new_vls_version(new_vls_exec_path string) {
|
||||
exec_version := os.execute('$new_vls_exec_path --version')
|
||||
exec_version := os.execute('${new_vls_exec_path} --version')
|
||||
if exec_version.exit_code == 0 {
|
||||
upd.log('VLS was updated to version: ${exec_version.output.all_after('vls version ').trim_space()}')
|
||||
}
|
||||
@ -242,13 +242,13 @@ fn (upd VlsUpdater) compile_from_source() ! {
|
||||
|
||||
if !os.exists(vls_src_folder) {
|
||||
upd.log('Cloning VLS repo...')
|
||||
clone_result := os.execute('$git clone https://github.com/nedpals/vls $vls_src_folder')
|
||||
clone_result := os.execute('${git} clone https://github.com/nedpals/vls ${vls_src_folder}')
|
||||
if clone_result.exit_code != 0 {
|
||||
return error('Failed to build VLS from source. Reason: $clone_result.output')
|
||||
return error('Failed to build VLS from source. Reason: ${clone_result.output}')
|
||||
}
|
||||
} else {
|
||||
upd.log('Updating VLS repo...')
|
||||
pull_result := os.execute('$git -C $vls_src_folder pull')
|
||||
pull_result := os.execute('${git} -C ${vls_src_folder} pull')
|
||||
if !upd.is_force && pull_result.output.trim_space() == 'Already up to date.' {
|
||||
upd.log("VLS was already updated to it's latest version.")
|
||||
return
|
||||
@ -271,7 +271,7 @@ fn (upd VlsUpdater) compile_from_source() ! {
|
||||
|
||||
compile_result := os.execute('v run ${os.join_path(vls_src_folder, 'build.vsh')} ${possible_compilers[selected_compiler_idx]}')
|
||||
if compile_result.exit_code != 0 {
|
||||
return error('Cannot compile VLS from source: $compile_result.output')
|
||||
return error('Cannot compile VLS from source: ${compile_result.output}')
|
||||
}
|
||||
|
||||
exec_path := os.join_path(vls_src_folder, 'bin', 'vls')
|
||||
@ -372,10 +372,10 @@ fn (mut upd VlsUpdater) parse(mut fp flag.FlagParser) ! {
|
||||
fn (upd VlsUpdater) log(msg string) {
|
||||
match upd.output {
|
||||
.text {
|
||||
println('> $msg')
|
||||
println('> ${msg}')
|
||||
}
|
||||
.json {
|
||||
print('{"message":"$msg"}')
|
||||
print('{"message":"${msg}"}')
|
||||
flush_stdout()
|
||||
}
|
||||
.silent {}
|
||||
@ -397,7 +397,7 @@ fn (upd VlsUpdater) error_details(err IError) string {
|
||||
the specified path exists and is a valid executable.
|
||||
- If you have an existing installation of VLS, be sure
|
||||
to remove "vls.config.json" and "bin" located inside
|
||||
"$vls_dir_shortened" and re-install.
|
||||
"${vls_dir_shortened}" and re-install.
|
||||
|
||||
If none of the options listed have solved your issue,
|
||||
please report it at https://github.com/vlang/v/issues
|
||||
@ -413,7 +413,7 @@ fn (upd VlsUpdater) error_details(err IError) string {
|
||||
fn (upd VlsUpdater) cli_error(err IError) {
|
||||
match upd.output {
|
||||
.text {
|
||||
eprintln('v ls error: $err.msg() ($err.code())')
|
||||
eprintln('v ls error: ${err.msg()} (${err.code()})')
|
||||
if err !is none {
|
||||
eprintln(upd.error_details(err))
|
||||
}
|
||||
@ -421,7 +421,7 @@ fn (upd VlsUpdater) cli_error(err IError) {
|
||||
print_backtrace()
|
||||
}
|
||||
.json {
|
||||
print('{"error":{"message":${json.encode(err.msg())},"code":"$err.code()","details":${json.encode(upd.error_details(err).trim_space())}}}')
|
||||
print('{"error":{"message":${json.encode(err.msg())},"code":"${err.code()}","details":${json.encode(upd.error_details(err).trim_space())}}}')
|
||||
flush_stdout()
|
||||
}
|
||||
.silent {}
|
||||
@ -433,7 +433,7 @@ fn (upd VlsUpdater) check_installation() {
|
||||
if upd.ls_path.len == 0 {
|
||||
upd.log('Language server is not installed')
|
||||
} else {
|
||||
upd.log('Language server is installed at: $upd.ls_path')
|
||||
upd.log('Language server is installed at: ${upd.ls_path}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -457,7 +457,7 @@ fn (upd VlsUpdater) run(fp flag.FlagParser) ! {
|
||||
}
|
||||
}
|
||||
} else if upd.pass_to_ls {
|
||||
exit(os.system('$upd.ls_path ${upd.args.join(' ')}'))
|
||||
exit(os.system('${upd.ls_path} ${upd.args.join(' ')}'))
|
||||
} else if upd.is_help {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
|
@ -110,12 +110,12 @@ fn (opt &Options) report_undocumented_functions_in_path(path string) int {
|
||||
fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
|
||||
if list.len > 0 {
|
||||
for undocumented_fn in list {
|
||||
mut line_numbers := '$undocumented_fn.line:0:'
|
||||
mut line_numbers := '${undocumented_fn.line}:0:'
|
||||
if opt.no_line_numbers {
|
||||
line_numbers = ''
|
||||
}
|
||||
tags_str := if opt.collect_tags && undocumented_fn.tags.len > 0 {
|
||||
'$undocumented_fn.tags'
|
||||
'${undocumented_fn.tags}'
|
||||
} else {
|
||||
''
|
||||
}
|
||||
@ -126,7 +126,7 @@ fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
|
||||
os.real_path(file)
|
||||
}
|
||||
if opt.deprecated {
|
||||
println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
|
||||
println('${ofile}:${line_numbers}${undocumented_fn.signature} ${tags_str}')
|
||||
} else {
|
||||
mut has_deprecation_tag := false
|
||||
for tag in undocumented_fn.tags {
|
||||
@ -136,7 +136,7 @@ fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
|
||||
}
|
||||
}
|
||||
if !has_deprecation_tag {
|
||||
println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
|
||||
println('${ofile}:${line_numbers}${undocumented_fn.signature} ${tags_str}')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -247,7 +247,7 @@ fn main() {
|
||||
}
|
||||
if opt.additional_args.len == 0 {
|
||||
println(fp.usage())
|
||||
eprintln('Error: $tool_name is missing PATH input')
|
||||
eprintln('Error: ${tool_name} is missing PATH input')
|
||||
exit(1)
|
||||
}
|
||||
// Allow short-long versions to prevent false positive situations, should
|
||||
@ -262,7 +262,7 @@ fn main() {
|
||||
if opt.diff {
|
||||
if opt.additional_args.len < 2 {
|
||||
println(fp.usage())
|
||||
eprintln('Error: $tool_name --diff needs two valid PATH inputs')
|
||||
eprintln('Error: ${tool_name} --diff needs two valid PATH inputs')
|
||||
exit(1)
|
||||
}
|
||||
path_old := opt.additional_args[0]
|
||||
@ -270,7 +270,7 @@ fn main() {
|
||||
if !(os.is_file(path_old) || os.is_dir(path_old)) || !(os.is_file(path_new)
|
||||
|| os.is_dir(path_new)) {
|
||||
println(fp.usage())
|
||||
eprintln('Error: $tool_name --diff needs two valid PATH inputs')
|
||||
eprintln('Error: ${tool_name} --diff needs two valid PATH inputs')
|
||||
exit(1)
|
||||
}
|
||||
list := opt.diff_undocumented_functions_in_paths(path_old, path_new)
|
||||
|
152
cmd/tools/vpm.v
152
cmd/tools/vpm.v
@ -66,7 +66,7 @@ fn main() {
|
||||
// args are: vpm [options] SUBCOMMAND module names
|
||||
params := cmdline.only_non_options(os.args[1..])
|
||||
options := cmdline.only_options(os.args[1..])
|
||||
verbose_println('cli params: $params')
|
||||
verbose_println('cli params: ${params}')
|
||||
if params.len < 1 {
|
||||
vpm_help()
|
||||
exit(5)
|
||||
@ -126,10 +126,10 @@ fn main() {
|
||||
vpm_show(module_names)
|
||||
}
|
||||
else {
|
||||
eprintln('Error: you tried to run "v $vpm_command"')
|
||||
eprintln('Error: you tried to run "v ${vpm_command}"')
|
||||
eprintln('... but the v package management tool vpm only knows about these commands:')
|
||||
for validcmd in valid_vpm_commands {
|
||||
eprintln(' v $validcmd')
|
||||
eprintln(' v ${validcmd}')
|
||||
}
|
||||
exit(3)
|
||||
}
|
||||
@ -156,7 +156,7 @@ fn vpm_search(keywords []string) {
|
||||
continue
|
||||
}
|
||||
if index == 0 {
|
||||
println('Search results for "$joined":\n')
|
||||
println('Search results for "${joined}":\n')
|
||||
}
|
||||
index++
|
||||
mut parts := mod.split('.')
|
||||
@ -168,17 +168,17 @@ fn vpm_search(keywords []string) {
|
||||
parts[0] = ' by ${parts[0]} '
|
||||
}
|
||||
installed := if mod in installed_modules { ' (installed)' } else { '' }
|
||||
println('${index}. ${parts[1]}${parts[0]}[$mod]$installed')
|
||||
println('${index}. ${parts[1]}${parts[0]}[${mod}]${installed}')
|
||||
break
|
||||
}
|
||||
}
|
||||
if index == 0 {
|
||||
vexe := os.getenv('VEXE')
|
||||
vroot := os.real_path(os.dir(vexe))
|
||||
mut messages := ['No module(s) found for `$joined` .']
|
||||
mut messages := ['No module(s) found for `${joined}` .']
|
||||
for vlibmod in search_keys {
|
||||
if os.is_dir(os.join_path(vroot, 'vlib', vlibmod)) {
|
||||
messages << 'There is already an existing "$vlibmod" module in vlib, so you can just `import $vlibmod` .'
|
||||
messages << 'There is already an existing "${vlibmod}" module in vlib, so you can just `import ${vlibmod}` .'
|
||||
}
|
||||
}
|
||||
for m in messages {
|
||||
@ -195,7 +195,7 @@ fn vpm_install_from_vpm(module_names []string) {
|
||||
name := n.trim_space().replace('_', '-')
|
||||
mod := get_module_meta_info(name) or {
|
||||
errors++
|
||||
eprintln('Errors while retrieving meta data for module $name:')
|
||||
eprintln('Errors while retrieving meta data for module ${name}:')
|
||||
eprintln(err)
|
||||
continue
|
||||
}
|
||||
@ -205,12 +205,12 @@ fn vpm_install_from_vpm(module_names []string) {
|
||||
}
|
||||
if vcs !in supported_vcs_systems {
|
||||
errors++
|
||||
eprintln('Skipping module "$name", since it uses an unsupported VCS {$vcs} .')
|
||||
eprintln('Skipping module "${name}", since it uses an unsupported VCS {${vcs}} .')
|
||||
continue
|
||||
}
|
||||
if !ensure_vcs_is_installed(vcs) {
|
||||
errors++
|
||||
eprintln('VPM needs `$vcs` to be installed.')
|
||||
eprintln('VPM needs `${vcs}` to be installed.')
|
||||
continue
|
||||
}
|
||||
//
|
||||
@ -219,14 +219,14 @@ fn vpm_install_from_vpm(module_names []string) {
|
||||
vpm_update([name])
|
||||
continue
|
||||
}
|
||||
println('Installing module "$name" from "$mod.url" to "$minfo.final_module_path" ...')
|
||||
println('Installing module "${name}" from "${mod.url}" to "${minfo.final_module_path}" ...')
|
||||
vcs_install_cmd := supported_vcs_install_cmds[vcs]
|
||||
cmd := '$vcs_install_cmd "$mod.url" "$minfo.final_module_path"'
|
||||
verbose_println(' command: $cmd')
|
||||
cmd := '${vcs_install_cmd} "${mod.url}" "${minfo.final_module_path}"'
|
||||
verbose_println(' command: ${cmd}')
|
||||
cmdres := os.execute(cmd)
|
||||
if cmdres.exit_code != 0 {
|
||||
errors++
|
||||
eprintln('Failed installing module "$name" to "$minfo.final_module_path" .')
|
||||
eprintln('Failed installing module "${name}" to "${minfo.final_module_path}" .')
|
||||
print_failed_cmd(cmd, cmdres)
|
||||
continue
|
||||
}
|
||||
@ -238,8 +238,8 @@ fn vpm_install_from_vpm(module_names []string) {
|
||||
}
|
||||
|
||||
fn print_failed_cmd(cmd string, cmdres os.Result) {
|
||||
verbose_println('Failed command: $cmd')
|
||||
verbose_println('Failed command output:\n$cmdres.output')
|
||||
verbose_println('Failed command: ${cmd}')
|
||||
verbose_println('Failed command output:\n${cmdres.output}')
|
||||
}
|
||||
|
||||
fn ensure_vcs_is_installed(vcs string) bool {
|
||||
@ -260,7 +260,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
|
||||
first_cut_pos := url.last_index('/') or {
|
||||
errors++
|
||||
eprintln('Errors while retrieving name for module "$url" :')
|
||||
eprintln('Errors while retrieving name for module "${url}" :')
|
||||
eprintln(err)
|
||||
continue
|
||||
}
|
||||
@ -269,7 +269,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
|
||||
second_cut_pos := url.substr(0, first_cut_pos).last_index('/') or {
|
||||
errors++
|
||||
eprintln('Errors while retrieving name for module "$url" :')
|
||||
eprintln('Errors while retrieving name for module "${url}" :')
|
||||
eprintln(err)
|
||||
continue
|
||||
}
|
||||
@ -284,17 +284,17 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
}
|
||||
if !ensure_vcs_is_installed(vcs_key) {
|
||||
errors++
|
||||
eprintln('VPM needs `$vcs_key` to be installed.')
|
||||
eprintln('VPM needs `${vcs_key}` to be installed.')
|
||||
continue
|
||||
}
|
||||
println('Installing module "$name" from "$url" to "$final_module_path" ...')
|
||||
println('Installing module "${name}" from "${url}" to "${final_module_path}" ...')
|
||||
vcs_install_cmd := supported_vcs_install_cmds[vcs_key]
|
||||
cmd := '$vcs_install_cmd "$url" "$final_module_path"'
|
||||
verbose_println(' command: $cmd')
|
||||
cmd := '${vcs_install_cmd} "${url}" "${final_module_path}"'
|
||||
verbose_println(' command: ${cmd}')
|
||||
cmdres := os.execute(cmd)
|
||||
if cmdres.exit_code != 0 {
|
||||
errors++
|
||||
eprintln('Failed installing module "$name" to "$final_module_path" .')
|
||||
eprintln('Failed installing module "${name}" to "${final_module_path}" .')
|
||||
print_failed_cmd(cmd, cmdres)
|
||||
continue
|
||||
}
|
||||
@ -306,30 +306,30 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
return
|
||||
}
|
||||
minfo := mod_name_info(vmod.name)
|
||||
println('Relocating module from "$name" to "$vmod.name" ( "$minfo.final_module_path" ) ...')
|
||||
println('Relocating module from "${name}" to "${vmod.name}" ( "${minfo.final_module_path}" ) ...')
|
||||
if os.exists(minfo.final_module_path) {
|
||||
eprintln('Warning module "$minfo.final_module_path" already exsits!')
|
||||
eprintln('Removing module "$minfo.final_module_path" ...')
|
||||
eprintln('Warning module "${minfo.final_module_path}" already exsits!')
|
||||
eprintln('Removing module "${minfo.final_module_path}" ...')
|
||||
os.rmdir_all(minfo.final_module_path) or {
|
||||
errors++
|
||||
println('Errors while removing "$minfo.final_module_path" :')
|
||||
println('Errors while removing "${minfo.final_module_path}" :')
|
||||
println(err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
os.mv(final_module_path, minfo.final_module_path) or {
|
||||
errors++
|
||||
eprintln('Errors while relocating module "$name" :')
|
||||
eprintln('Errors while relocating module "${name}" :')
|
||||
eprintln(err)
|
||||
os.rmdir_all(final_module_path) or {
|
||||
errors++
|
||||
eprintln('Errors while removing "$final_module_path" :')
|
||||
eprintln('Errors while removing "${final_module_path}" :')
|
||||
eprintln(err)
|
||||
continue
|
||||
}
|
||||
continue
|
||||
}
|
||||
println('Module "$name" relocated to "$vmod.name" successfully.')
|
||||
println('Module "${name}" relocated to "${vmod.name}" successfully.')
|
||||
final_module_path = minfo.final_module_path
|
||||
name = vmod.name
|
||||
}
|
||||
@ -387,23 +387,23 @@ fn vpm_update(m []string) {
|
||||
zname := url_to_module_name(modulename)
|
||||
final_module_path := valid_final_path_of_existing_module(modulename) or { continue }
|
||||
os.chdir(final_module_path) or {}
|
||||
println('Updating module "$zname" in "$final_module_path" ...')
|
||||
println('Updating module "${zname}" in "${final_module_path}" ...')
|
||||
vcs := vcs_used_in_dir(final_module_path) or { continue }
|
||||
if !ensure_vcs_is_installed(vcs[0]) {
|
||||
errors++
|
||||
println('VPM needs `$vcs` to be installed.')
|
||||
println('VPM needs `${vcs}` to be installed.')
|
||||
continue
|
||||
}
|
||||
vcs_cmd := supported_vcs_update_cmds[vcs[0]]
|
||||
verbose_println(' command: $vcs_cmd')
|
||||
vcs_res := os.execute('$vcs_cmd')
|
||||
verbose_println(' command: ${vcs_cmd}')
|
||||
vcs_res := os.execute('${vcs_cmd}')
|
||||
if vcs_res.exit_code != 0 {
|
||||
errors++
|
||||
println('Failed updating module "$zname" in "$final_module_path" .')
|
||||
println('Failed updating module "${zname}" in "${final_module_path}" .')
|
||||
print_failed_cmd(vcs_cmd, vcs_res)
|
||||
continue
|
||||
} else {
|
||||
verbose_println(' $vcs_res.output.trim_space()')
|
||||
verbose_println(' ${vcs_res.output.trim_space()}')
|
||||
}
|
||||
resolve_dependencies(modulename, final_module_path, module_names)
|
||||
}
|
||||
@ -424,9 +424,9 @@ fn get_outdated() ?[]string {
|
||||
for step in vcs_cmd_steps {
|
||||
res := os.execute(step)
|
||||
if res.exit_code < 0 {
|
||||
verbose_println('Error command: $step')
|
||||
verbose_println('Error details:\n$res.output')
|
||||
return error('Error while checking latest commits for "$name" .')
|
||||
verbose_println('Error command: ${step}')
|
||||
verbose_println('Error details:\n${res.output}')
|
||||
return error('Error while checking latest commits for "${name}" .')
|
||||
}
|
||||
if vcs[0] == 'hg' {
|
||||
if res.exit_code == 1 {
|
||||
@ -457,7 +457,7 @@ fn vpm_outdated() {
|
||||
if outdated.len > 0 {
|
||||
eprintln('Outdated modules:')
|
||||
for m in outdated {
|
||||
eprintln(' $m')
|
||||
eprintln(' ${m}')
|
||||
}
|
||||
} else {
|
||||
println('Modules are up to date.')
|
||||
@ -486,10 +486,10 @@ fn vpm_remove(module_names []string) {
|
||||
}
|
||||
for name in module_names {
|
||||
final_module_path := valid_final_path_of_existing_module(name) or { continue }
|
||||
eprintln('Removing module "$name" ...')
|
||||
verbose_println('removing folder $final_module_path')
|
||||
eprintln('Removing module "${name}" ...')
|
||||
verbose_println('removing folder ${final_module_path}')
|
||||
os.rmdir_all(final_module_path) or {
|
||||
verbose_println('error while removing "$final_module_path": $err.msg()')
|
||||
verbose_println('error while removing "${final_module_path}": ${err.msg()}')
|
||||
}
|
||||
// delete author directory if it is empty
|
||||
author := name.split('.')[0]
|
||||
@ -498,9 +498,9 @@ fn vpm_remove(module_names []string) {
|
||||
continue
|
||||
}
|
||||
if os.is_dir_empty(author_dir) {
|
||||
verbose_println('removing author folder $author_dir')
|
||||
verbose_println('removing author folder ${author_dir}')
|
||||
os.rmdir(author_dir) or {
|
||||
verbose_println('error while removing "$author_dir": $err.msg()')
|
||||
verbose_println('error while removing "${author_dir}": ${err.msg()}')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -510,15 +510,15 @@ fn valid_final_path_of_existing_module(modulename string) ?string {
|
||||
name := if mod := get_mod_by_url(modulename) { mod.name } else { modulename }
|
||||
minfo := mod_name_info(name)
|
||||
if !os.exists(minfo.final_module_path) {
|
||||
eprintln('No module with name "$minfo.mname_normalised" exists at $minfo.final_module_path')
|
||||
eprintln('No module with name "${minfo.mname_normalised}" exists at ${minfo.final_module_path}')
|
||||
return none
|
||||
}
|
||||
if !os.is_dir(minfo.final_module_path) {
|
||||
eprintln('Skipping "$minfo.final_module_path", since it is not a folder.')
|
||||
eprintln('Skipping "${minfo.final_module_path}", since it is not a folder.')
|
||||
return none
|
||||
}
|
||||
vcs_used_in_dir(minfo.final_module_path) or {
|
||||
eprintln('Skipping "$minfo.final_module_path", since it does not use a supported vcs.')
|
||||
eprintln('Skipping "${minfo.final_module_path}", since it does not use a supported vcs.')
|
||||
return none
|
||||
}
|
||||
return minfo.final_module_path
|
||||
@ -526,7 +526,7 @@ fn valid_final_path_of_existing_module(modulename string) ?string {
|
||||
|
||||
fn ensure_vmodules_dir_exist() {
|
||||
if !os.is_dir(settings.vmodules_path) {
|
||||
println('Creating "$settings.vmodules_path/" ...')
|
||||
println('Creating "${settings.vmodules_path}/" ...')
|
||||
os.mkdir(settings.vmodules_path) or { panic(err) }
|
||||
}
|
||||
}
|
||||
@ -566,7 +566,7 @@ fn get_installed_modules() []string {
|
||||
mods := os.ls(adir) or { continue }
|
||||
for m in mods {
|
||||
vcs_used_in_dir(os.join_path(adir, m)) or { continue }
|
||||
modules << '${author}.$m'
|
||||
modules << '${author}.${m}'
|
||||
}
|
||||
}
|
||||
return modules
|
||||
@ -601,7 +601,7 @@ fn get_all_modules() []string {
|
||||
url := get_working_server_url()
|
||||
r := http.get(url) or { panic(err) }
|
||||
if r.status_code != 200 {
|
||||
eprintln('Failed to search vpm.vlang.io. Status code: $r.status_code')
|
||||
eprintln('Failed to search vpm.vlang.io. Status code: ${r.status_code}')
|
||||
exit(1)
|
||||
}
|
||||
s := r.body
|
||||
@ -656,14 +656,14 @@ fn resolve_dependencies(name string, module_path string, module_names []string)
|
||||
}
|
||||
}
|
||||
if deps.len > 0 {
|
||||
println('Resolving $deps.len dependencies for module "$name" ...')
|
||||
verbose_println('Found dependencies: $deps')
|
||||
println('Resolving ${deps.len} dependencies for module "${name}" ...')
|
||||
verbose_println('Found dependencies: ${deps}')
|
||||
vpm_install(deps, Source.vpm)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_vmod(data string) !Vmod {
|
||||
manifest := vmod.decode(data) or { return error('Parsing v.mod file failed, $err') }
|
||||
manifest := vmod.decode(data) or { return error('Parsing v.mod file failed, ${err}') }
|
||||
mut vmod := Vmod{}
|
||||
vmod.name = manifest.name
|
||||
vmod.version = manifest.version
|
||||
@ -678,9 +678,9 @@ fn get_working_server_url() string {
|
||||
vpm_server_urls
|
||||
}
|
||||
for url in server_urls {
|
||||
verbose_println('Trying server url: $url')
|
||||
verbose_println('Trying server url: ${url}')
|
||||
http.head(url) or {
|
||||
verbose_println(' $url failed.')
|
||||
verbose_println(' ${url} failed.')
|
||||
continue
|
||||
}
|
||||
return url
|
||||
@ -720,7 +720,7 @@ fn verbose_println(s string) {
|
||||
|
||||
fn get_mod_by_url(name string) ?Mod {
|
||||
if purl := urllib.parse(name) {
|
||||
verbose_println('purl: $purl')
|
||||
verbose_println('purl: ${purl}')
|
||||
mod := Mod{
|
||||
name: purl.path.trim_left('/').trim_right('/').replace('/', '.')
|
||||
url: name
|
||||
@ -728,7 +728,7 @@ fn get_mod_by_url(name string) ?Mod {
|
||||
verbose_println(mod.str())
|
||||
return mod
|
||||
}
|
||||
return error('invalid url: $name')
|
||||
return error('invalid url: ${name}')
|
||||
}
|
||||
|
||||
fn get_module_meta_info(name string) ?Mod {
|
||||
@ -738,19 +738,19 @@ fn get_module_meta_info(name string) ?Mod {
|
||||
mut errors := []string{}
|
||||
|
||||
for server_url in vpm_server_urls {
|
||||
modurl := server_url + '/jsmod/$name'
|
||||
verbose_println('Retrieving module metadata from: "$modurl" ...')
|
||||
modurl := server_url + '/jsmod/${name}'
|
||||
verbose_println('Retrieving module metadata from: "${modurl}" ...')
|
||||
r := http.get(modurl) or {
|
||||
errors << 'Http server did not respond to our request for "$modurl" .'
|
||||
errors << 'Error details: $err'
|
||||
errors << 'Http server did not respond to our request for "${modurl}" .'
|
||||
errors << 'Error details: ${err}'
|
||||
continue
|
||||
}
|
||||
if r.status_code == 404 || r.body.trim_space() == '404' {
|
||||
errors << 'Skipping module "$name", since "$server_url" reported that "$name" does not exist.'
|
||||
errors << 'Skipping module "${name}", since "${server_url}" reported that "${name}" does not exist.'
|
||||
continue
|
||||
}
|
||||
if r.status_code != 200 {
|
||||
errors << 'Skipping module "$name", since "$server_url" responded with $r.status_code http status code. Please try again later.'
|
||||
errors << 'Skipping module "${name}", since "${server_url}" responded with ${r.status_code} http status code. Please try again later.'
|
||||
continue
|
||||
}
|
||||
s := r.body
|
||||
@ -760,11 +760,11 @@ fn get_module_meta_info(name string) ?Mod {
|
||||
continue
|
||||
}
|
||||
mod := json.decode(Mod, s) or {
|
||||
errors << 'Skipping module "$name", since its information is not in json format.'
|
||||
errors << 'Skipping module "${name}", since its information is not in json format.'
|
||||
continue
|
||||
}
|
||||
if '' == mod.url || '' == mod.name {
|
||||
errors << 'Skipping module "$name", since it is missing name or url information.'
|
||||
errors << 'Skipping module "${name}", since it is missing name or url information.'
|
||||
continue
|
||||
}
|
||||
return mod
|
||||
@ -778,9 +778,9 @@ fn vpm_show(module_names []string) {
|
||||
if module_name !in installed_modules {
|
||||
module_meta_info := get_module_meta_info(module_name) or { continue }
|
||||
print('
|
||||
Name: $module_meta_info.name
|
||||
Homepage: $module_meta_info.url
|
||||
Downloads: $module_meta_info.nr_downloads
|
||||
Name: ${module_meta_info.name}
|
||||
Homepage: ${module_meta_info.url}
|
||||
Downloads: ${module_meta_info.nr_downloads}
|
||||
Installed: False
|
||||
--------
|
||||
')
|
||||
@ -788,13 +788,13 @@ Installed: False
|
||||
}
|
||||
path := os.join_path(os.vmodules_dir(), module_name.replace('.', os.path_separator))
|
||||
mod := vmod.from_file(os.join_path(path, 'v.mod')) or { continue }
|
||||
print('Name: $mod.name
|
||||
Version: $mod.version
|
||||
Description: $mod.description
|
||||
Homepage: $mod.repo_url
|
||||
Author: $mod.author
|
||||
License: $mod.license
|
||||
Location: $path
|
||||
print('Name: ${mod.name}
|
||||
Version: ${mod.version}
|
||||
Description: ${mod.description}
|
||||
Homepage: ${mod.repo_url}
|
||||
Author: ${mod.author}
|
||||
License: ${mod.license}
|
||||
Location: ${path}
|
||||
Requires: ${mod.dependencies.join(', ')}
|
||||
--------
|
||||
')
|
||||
|
@ -107,7 +107,7 @@ fn (mut r Repl) checks() bool {
|
||||
|
||||
fn (r &Repl) function_call(line string) (bool, FnType) {
|
||||
for function in r.functions_name {
|
||||
is_function_definition := line.replace(' ', '').starts_with('$function:=')
|
||||
is_function_definition := line.replace(' ', '').starts_with('${function}:=')
|
||||
if line.starts_with(function) && !is_function_definition {
|
||||
// TODO(vincenzopalazzo) store the type of the function here
|
||||
fntype := r.check_fn_type_kind(line)
|
||||
@ -141,7 +141,7 @@ fn (r &Repl) is_function_call(line string) bool {
|
||||
fn (r &Repl) import_to_source_code() []string {
|
||||
mut imports_line := []string{}
|
||||
for mod in r.modules {
|
||||
mut import_str := 'import $mod'
|
||||
mut import_str := 'import ${mod}'
|
||||
if mod in r.alias {
|
||||
import_str += ' as ${r.alias[mod]}'
|
||||
}
|
||||
@ -177,7 +177,7 @@ fn (r &Repl) current_source_code(should_add_temp_lines bool, not_add_print bool)
|
||||
// do not return anything, while others return results.
|
||||
// This function checks which one we have:
|
||||
fn (r &Repl) check_fn_type_kind(new_line string) FnType {
|
||||
source_code := r.current_source_code(true, false) + '\nprintln($new_line)'
|
||||
source_code := r.current_source_code(true, false) + '\nprintln(${new_line})'
|
||||
check_file := os.join_path(r.folder, '${rand.ulid()}.vrepl.check.v')
|
||||
os.write_file(check_file, source_code) or { panic(err) }
|
||||
defer {
|
||||
@ -228,11 +228,11 @@ fn (mut r Repl) list_source() {
|
||||
}
|
||||
|
||||
fn highlight_console_command(command string) string {
|
||||
return term.bright_white(term.bright_bg_black(' $command '))
|
||||
return term.bright_white(term.bright_bg_black(' ${command} '))
|
||||
}
|
||||
|
||||
fn highlight_repl_command(command string) string {
|
||||
return term.bright_white(term.bg_blue(' $command '))
|
||||
return term.bright_white(term.bg_blue(' ${command} '))
|
||||
}
|
||||
|
||||
fn print_welcome_screen() {
|
||||
@ -253,19 +253,19 @@ fn print_welcome_screen() {
|
||||
term.bright_blue(r' \__/ '),
|
||||
]
|
||||
help_text := [
|
||||
'Welcome to the V REPL (for help with V itself, type $cmd_exit, then run $cmd_v_help).',
|
||||
'Welcome to the V REPL (for help with V itself, type ${cmd_exit}, then run ${cmd_v_help}).',
|
||||
'Note: the REPL is highly experimental. For best V experience, use a text editor, ',
|
||||
'save your code in a $file_main file and execute: $cmd_v_run',
|
||||
'${version.full_v_version(false)} . Use $cmd_list to see the accumulated program so far.',
|
||||
'Use Ctrl-C or $cmd_exit to exit, or $cmd_help to see other available commands.',
|
||||
'save your code in a ${file_main} file and execute: ${cmd_v_run}',
|
||||
'${version.full_v_version(false)} . Use ${cmd_list} to see the accumulated program so far.',
|
||||
'Use Ctrl-C or ${cmd_exit} to exit, or ${cmd_help} to see other available commands.',
|
||||
]
|
||||
if width >= 97 {
|
||||
eprintln('${vlogo[0]}')
|
||||
eprintln('${vlogo[1]} $vbar ${help_text[0]}')
|
||||
eprintln('${vlogo[2]} $vbar ${help_text[1]}')
|
||||
eprintln('${vlogo[3]} $vbar ${help_text[2]}')
|
||||
eprintln('${vlogo[4]} $vbar ${help_text[3]}')
|
||||
eprintln('${vlogo[5]} $vbar ${help_text[4]}')
|
||||
eprintln('${vlogo[1]} ${vbar} ${help_text[0]}')
|
||||
eprintln('${vlogo[2]} ${vbar} ${help_text[1]}')
|
||||
eprintln('${vlogo[3]} ${vbar} ${help_text[2]}')
|
||||
eprintln('${vlogo[4]} ${vbar} ${help_text[3]}')
|
||||
eprintln('${vlogo[5]} ${vbar} ${help_text[4]}')
|
||||
eprintln('')
|
||||
} else {
|
||||
if width >= 14 {
|
||||
@ -286,7 +286,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
|
||||
if vstartup != '' {
|
||||
result := repl_run_vfile(vstartup) or {
|
||||
os.Result{
|
||||
output: '$vstartup file not found'
|
||||
output: '${vstartup} file not found'
|
||||
}
|
||||
}
|
||||
print('\n')
|
||||
@ -359,7 +359,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
|
||||
r.line = ''
|
||||
}
|
||||
if r.line == 'debug_repl' {
|
||||
eprintln('repl: $r')
|
||||
eprintln('repl: ${r}')
|
||||
continue
|
||||
}
|
||||
if r.line == 'reset' {
|
||||
@ -385,7 +385,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
|
||||
r.line = 'println(' + r.line[1..] + ')'
|
||||
}
|
||||
if r.line.starts_with('print') {
|
||||
source_code := r.current_source_code(false, false) + '\n$r.line\n'
|
||||
source_code := r.current_source_code(false, false) + '\n${r.line}\n'
|
||||
os.write_file(temp_file, source_code) or { panic(err) }
|
||||
s := repl_run_vfile(temp_file) or { return 1 }
|
||||
if s.output.len > r.last_output.len {
|
||||
@ -438,8 +438,8 @@ fn run_repl(workdir string, vrepl_prefix string) int {
|
||||
is_statement = true
|
||||
}
|
||||
if !is_statement && (!func_call || fntype == FnType.fn_type) && r.line != '' {
|
||||
temp_line = 'println($r.line)'
|
||||
source_code := r.current_source_code(false, false) + '\n$temp_line\n'
|
||||
temp_line = 'println(${r.line})'
|
||||
source_code := r.current_source_code(false, false) + '\n${temp_line}\n'
|
||||
os.write_file(temp_file, source_code) or { panic(err) }
|
||||
s := repl_run_vfile(temp_file) or { return 1 }
|
||||
if s.output.len > r.last_output.len {
|
||||
@ -452,10 +452,10 @@ fn run_repl(workdir string, vrepl_prefix string) int {
|
||||
if temp_line.starts_with('import ') {
|
||||
mod := r.line.fields()[1]
|
||||
if mod !in r.modules {
|
||||
temp_source_code = '$temp_line\n' + r.current_source_code(false, true)
|
||||
temp_source_code = '${temp_line}\n' + r.current_source_code(false, true)
|
||||
}
|
||||
} else if temp_line.starts_with('#include ') {
|
||||
temp_source_code = '$temp_line\n' + r.current_source_code(false, false)
|
||||
temp_source_code = '${temp_line}\n' + r.current_source_code(false, false)
|
||||
} else {
|
||||
for i, l in r.lines {
|
||||
if (l.starts_with('for ') || l.starts_with('if ')) && l.contains('println') {
|
||||
@ -463,7 +463,7 @@ fn run_repl(workdir string, vrepl_prefix string) int {
|
||||
break
|
||||
}
|
||||
}
|
||||
temp_source_code = r.current_source_code(true, false) + '\n$temp_line\n'
|
||||
temp_source_code = r.current_source_code(true, false) + '\n${temp_line}\n'
|
||||
}
|
||||
os.write_file(temp_file, temp_source_code) or { panic(err) }
|
||||
s := repl_run_vfile(temp_file) or { return 1 }
|
||||
@ -547,7 +547,7 @@ fn main() {
|
||||
}
|
||||
|
||||
fn rerror(s string) {
|
||||
println('V repl error: $s')
|
||||
println('V repl error: ${s}')
|
||||
os.flush()
|
||||
}
|
||||
|
||||
@ -578,7 +578,7 @@ fn cleanup_files(file string) {
|
||||
|
||||
fn repl_run_vfile(file string) ?os.Result {
|
||||
$if trace_repl_temp_files ? {
|
||||
eprintln('>> repl_run_vfile file: $file')
|
||||
eprintln('>> repl_run_vfile file: ${file}')
|
||||
}
|
||||
s := os.execute('${os.quoted_path(vexe)} -repl run ${os.quoted_path(file)}')
|
||||
if s.exit_code < 0 {
|
||||
|
@ -21,8 +21,8 @@ fn main() {
|
||||
for tok.kind != .eof {
|
||||
tok = scanner.scan()
|
||||
pos := tok.pos()
|
||||
location := '$path:${pos.line_nr + 1}:${pos.col + 1}:'
|
||||
println('${location:-32} | pos: ${pos.pos:-5} | $tok.debug()')
|
||||
location := '${path}:${pos.line_nr + 1}:${pos.col + 1}:'
|
||||
println('${location:-32} | pos: ${pos.pos:-5} | ${tok.debug()}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,15 +20,15 @@ fn main() {
|
||||
vexe_name := os.file_name(vexe)
|
||||
short_v_name := vexe_name.all_before('.')
|
||||
//
|
||||
recompilation.must_be_enabled(vroot, 'Please install V from source, to use `$vexe_name self` .')
|
||||
recompilation.must_be_enabled(vroot, 'Please install V from source, to use `${vexe_name} self` .')
|
||||
os.chdir(vroot)!
|
||||
os.setenv('VCOLORS', 'always', true)
|
||||
args := os.args[1..].filter(it != 'self')
|
||||
jargs := args.join(' ')
|
||||
obinary := cmdline.option(args, '-o', '')
|
||||
sargs := if obinary != '' { jargs } else { '$jargs -o v2' }
|
||||
cmd := '${os.quoted_path(vexe)} $sargs ${os.quoted_path('cmd/v')}'
|
||||
options := if args.len > 0 { '($sargs)' } else { '' }
|
||||
sargs := if obinary != '' { jargs } else { '${jargs} -o v2' }
|
||||
cmd := '${os.quoted_path(vexe)} ${sargs} ${os.quoted_path('cmd/v')}'
|
||||
options := if args.len > 0 { '(${sargs})' } else { '' }
|
||||
println('V self compiling ${options}...')
|
||||
compile(vroot, cmd)
|
||||
if obinary != '' {
|
||||
@ -37,13 +37,13 @@ fn main() {
|
||||
return
|
||||
}
|
||||
backup_old_version_and_rename_newer(short_v_name) or { panic(err.msg()) }
|
||||
println('V built successfully as executable "$vexe_name".')
|
||||
println('V built successfully as executable "${vexe_name}".')
|
||||
}
|
||||
|
||||
fn compile(vroot string, cmd string) {
|
||||
result := os.execute_or_exit(cmd)
|
||||
if result.exit_code != 0 {
|
||||
eprintln('cannot compile to `$vroot`: \n$result.output')
|
||||
eprintln('cannot compile to `${vroot}`: \n${result.output}')
|
||||
exit(1)
|
||||
}
|
||||
if result.output.len > 0 {
|
||||
@ -59,34 +59,34 @@ fn list_folder(short_v_name string, bmessage string, message string) {
|
||||
println(bmessage)
|
||||
}
|
||||
if os.user_os() == 'windows' {
|
||||
os.system('dir $short_v_name*.exe')
|
||||
os.system('dir ${short_v_name}*.exe')
|
||||
} else {
|
||||
os.system('ls -lartd $short_v_name*')
|
||||
os.system('ls -lartd ${short_v_name}*')
|
||||
}
|
||||
println(message)
|
||||
}
|
||||
|
||||
fn backup_old_version_and_rename_newer(short_v_name string) !bool {
|
||||
mut errors := []string{}
|
||||
short_v_file := if os.user_os() == 'windows' { '${short_v_name}.exe' } else { '$short_v_name' }
|
||||
short_v_file := if os.user_os() == 'windows' { '${short_v_name}.exe' } else { '${short_v_name}' }
|
||||
short_v2_file := if os.user_os() == 'windows' { 'v2.exe' } else { 'v2' }
|
||||
short_bak_file := if os.user_os() == 'windows' { 'v_old.exe' } else { 'v_old' }
|
||||
v_file := os.real_path(short_v_file)
|
||||
v2_file := os.real_path(short_v2_file)
|
||||
bak_file := os.real_path(short_bak_file)
|
||||
|
||||
list_folder(short_v_name, 'before:', 'removing $bak_file ...')
|
||||
list_folder(short_v_name, 'before:', 'removing ${bak_file} ...')
|
||||
if os.exists(bak_file) {
|
||||
os.rm(bak_file) or { errors << 'failed removing $bak_file: $err.msg()' }
|
||||
os.rm(bak_file) or { errors << 'failed removing ${bak_file}: ${err.msg()}' }
|
||||
}
|
||||
|
||||
list_folder(short_v_name, '', 'moving $v_file to $bak_file ...')
|
||||
list_folder(short_v_name, '', 'moving ${v_file} to ${bak_file} ...')
|
||||
os.mv(v_file, bak_file) or { errors << err.msg() }
|
||||
|
||||
list_folder(short_v_name, '', 'removing $v_file ...')
|
||||
list_folder(short_v_name, '', 'removing ${v_file} ...')
|
||||
os.rm(v_file) or {}
|
||||
|
||||
list_folder(short_v_name, '', 'moving $v2_file to $v_file ...')
|
||||
list_folder(short_v_name, '', 'moving ${v2_file} to ${v_file} ...')
|
||||
os.mv_by_cp(v2_file, v_file) or { panic(err.msg()) }
|
||||
|
||||
list_folder(short_v_name, 'after:', '')
|
||||
|
@ -15,7 +15,7 @@ fn main() {
|
||||
if os.is_dir(freetype_folder) {
|
||||
println('Thirdparty "freetype" is already installed.')
|
||||
} else {
|
||||
s := os.execute('git clone --depth=1 $freetype_repo_url $freetype_folder')
|
||||
s := os.execute('git clone --depth=1 ${freetype_repo_url} ${freetype_folder}')
|
||||
if s.exit_code != 0 {
|
||||
panic(s.output)
|
||||
}
|
||||
|
@ -53,9 +53,9 @@ const (
|
||||
|
||||
shdc_version = shdc_full_hash[0..8]
|
||||
shdc_urls = {
|
||||
'windows': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/win32/sokol-shdc.exe'
|
||||
'macos': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/osx/sokol-shdc'
|
||||
'linux': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/linux/sokol-shdc'
|
||||
'windows': 'https://github.com/floooh/sokol-tools-bin/raw/${shdc_full_hash}/bin/win32/sokol-shdc.exe'
|
||||
'macos': 'https://github.com/floooh/sokol-tools-bin/raw/${shdc_full_hash}/bin/osx/sokol-shdc'
|
||||
'linux': 'https://github.com/floooh/sokol-tools-bin/raw/${shdc_full_hash}/bin/linux/sokol-shdc'
|
||||
}
|
||||
shdc_version_file = os.join_path(cache_dir, 'sokol-shdc.version')
|
||||
shdc = shdc_exe()
|
||||
@ -77,7 +77,7 @@ struct CompileOptions {
|
||||
|
||||
fn main() {
|
||||
if os.args.len == 1 {
|
||||
println('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
|
||||
println('Usage: ${tool_name} PATH \n${tool_description}\n${tool_name} -h for more help...')
|
||||
exit(1)
|
||||
}
|
||||
mut fp := flag.new_flag_parser(os.args[1..])
|
||||
@ -91,7 +91,7 @@ fn main() {
|
||||
show_help: fp.bool('help', `h`, false, 'Show this help text.')
|
||||
force_update: fp.bool('force-update', `u`, false, 'Force update of the sokol-shdc tool.')
|
||||
verbose: fp.bool('verbose', `v`, false, 'Be verbose about the tools progress.')
|
||||
slangs: fp.string_multi('slang', `l`, 'Shader dialects to generate code for. Default is all.\n Available dialects: $supported_slangs')
|
||||
slangs: fp.string_multi('slang', `l`, 'Shader dialects to generate code for. Default is all.\n Available dialects: ${supported_slangs}')
|
||||
}
|
||||
if opt.show_help {
|
||||
println(fp.usage())
|
||||
@ -124,7 +124,7 @@ fn shader_program_name(shader_file string) string {
|
||||
// validate_shader_file returns an error if `shader_file` isn't valid.
|
||||
fn validate_shader_file(shader_file string) ! {
|
||||
shader_program := os.read_lines(shader_file) or {
|
||||
return error('shader program at "$shader_file" could not be opened for reading')
|
||||
return error('shader program at "${shader_file}" could not be opened for reading')
|
||||
}
|
||||
mut has_program_directive := false
|
||||
for line in shader_program {
|
||||
@ -134,7 +134,7 @@ fn validate_shader_file(shader_file string) ! {
|
||||
}
|
||||
}
|
||||
if !has_program_directive {
|
||||
return error('shader program at "$shader_file" is missing a "@program" directive.')
|
||||
return error('shader program at "${shader_file}" is missing a "@program" directive.')
|
||||
}
|
||||
}
|
||||
|
||||
@ -152,7 +152,7 @@ fn compile_shaders(opt Options, input_path string) ! {
|
||||
|
||||
if shader_files.len == 0 {
|
||||
if opt.verbose {
|
||||
eprintln('$tool_name found no shader files to compile for "$path"')
|
||||
eprintln('${tool_name} found no shader files to compile for "${path}"')
|
||||
}
|
||||
return
|
||||
}
|
||||
@ -190,23 +190,23 @@ fn compile_shader(opt CompileOptions, shader_file string) ! {
|
||||
|
||||
header_name := os.file_name(out_file)
|
||||
if opt.verbose {
|
||||
eprintln('$tool_name generating shader code for $slangs in header "$header_name" in "$path" from $shader_file')
|
||||
eprintln('${tool_name} generating shader code for ${slangs} in header "${header_name}" in "${path}" from ${shader_file}')
|
||||
}
|
||||
|
||||
cmd :=
|
||||
'${os.quoted_path(shdc)} --input ${os.quoted_path(shader_file)} --output ${os.quoted_path(out_file)} --slang ' +
|
||||
os.quoted_path(slangs.join(':'))
|
||||
if opt.verbose {
|
||||
eprintln('$tool_name executing:\n$cmd')
|
||||
eprintln('${tool_name} executing:\n${cmd}')
|
||||
}
|
||||
res := os.execute(cmd)
|
||||
if res.exit_code != 0 {
|
||||
eprintln('$tool_name failed generating shader includes:\n $res.output\n $cmd')
|
||||
eprintln('${tool_name} failed generating shader includes:\n ${res.output}\n ${cmd}')
|
||||
exit(1)
|
||||
}
|
||||
if opt.verbose {
|
||||
program_name := shader_program_name(shader_file)
|
||||
eprintln('$tool_name usage example in V:\n\nimport sokol.gfx\n\n#include "$header_name"\n\nfn C.${program_name}_shader_desc(gfx.Backend) &gfx.ShaderDesc\n')
|
||||
eprintln('${tool_name} usage example in V:\n\nimport sokol.gfx\n\n#include "${header_name}"\n\nfn C.${program_name}_shader_desc(gfx.Backend) &gfx.ShaderDesc\n')
|
||||
}
|
||||
}
|
||||
|
||||
@ -245,7 +245,7 @@ fn ensure_external_tools(opt Options) ! {
|
||||
if is_shdc_available && is_shdc_executable {
|
||||
if opt.verbose {
|
||||
version := os.read_file(shdc_version_file) or { 'unknown' }
|
||||
eprintln('$tool_name using sokol-shdc version $version at "$shdc"')
|
||||
eprintln('${tool_name} using sokol-shdc version ${version} at "${shdc}"')
|
||||
}
|
||||
return
|
||||
}
|
||||
@ -265,15 +265,15 @@ fn download_shdc(opt Options) ! {
|
||||
// We want to use the same, runtime, OS type as this tool is invoked on.
|
||||
download_url := shdc_urls[runtime_os] or { '' }
|
||||
if download_url == '' {
|
||||
return error('$tool_name failed to download an external dependency "sokol-shdc" for ${runtime_os}.\nThe supported host platforms for shader compilation is $supported_hosts')
|
||||
return error('${tool_name} failed to download an external dependency "sokol-shdc" for ${runtime_os}.\nThe supported host platforms for shader compilation is ${supported_hosts}')
|
||||
}
|
||||
update_to_shdc_version := os.read_file(shdc_version_file) or { shdc_version }
|
||||
file := shdc_exe()
|
||||
if opt.verbose {
|
||||
if shdc_version != update_to_shdc_version && os.exists(file) {
|
||||
eprintln('$tool_name updating sokol-shdc to version $update_to_shdc_version ...')
|
||||
eprintln('${tool_name} updating sokol-shdc to version ${update_to_shdc_version} ...')
|
||||
} else {
|
||||
eprintln('$tool_name installing sokol-shdc version $update_to_shdc_version ...')
|
||||
eprintln('${tool_name} installing sokol-shdc version ${update_to_shdc_version} ...')
|
||||
}
|
||||
}
|
||||
if os.exists(file) {
|
||||
@ -283,11 +283,11 @@ fn download_shdc(opt Options) ! {
|
||||
mut dtmp_file, dtmp_path := util.temp_file(util.TempFileOptions{ path: os.dir(file) })!
|
||||
dtmp_file.close()
|
||||
if opt.verbose {
|
||||
eprintln('$tool_name downloading sokol-shdc from $download_url')
|
||||
eprintln('${tool_name} downloading sokol-shdc from ${download_url}')
|
||||
}
|
||||
http.download_file(download_url, dtmp_path) or {
|
||||
os.rm(dtmp_path)!
|
||||
return error('$tool_name failed to download sokol-shdc needed for shader compiling: $err')
|
||||
return error('${tool_name} failed to download sokol-shdc needed for shader compiling: ${err}')
|
||||
}
|
||||
// Make it executable
|
||||
os.chmod(dtmp_path, 0o775)!
|
||||
|
@ -29,17 +29,17 @@ fn main() {
|
||||
mut failed_commands := []string{}
|
||||
for idx, example in files {
|
||||
cmd := '${os.quoted_path(@VEXE)} ${os.quoted_path(example)}'
|
||||
println('> compiling ${idx + 1:4}/${files.len:-4}: $cmd')
|
||||
println('> compiling ${idx + 1:4}/${files.len:-4}: ${cmd}')
|
||||
if 0 != os.system(cmd) {
|
||||
failed_commands << cmd
|
||||
}
|
||||
}
|
||||
if failed_commands.len > 0 {
|
||||
for idx, fcmd in failed_commands {
|
||||
eprintln('>>> FAILED command ${idx + 1:4}/${failed_commands.len:-4}: $fcmd')
|
||||
eprintln('>>> FAILED command ${idx + 1:4}/${failed_commands.len:-4}: ${fcmd}')
|
||||
}
|
||||
println('Summary: ${failed_commands.len:4}/${files.len:-4} file(s) failed to compile.')
|
||||
exit(1)
|
||||
}
|
||||
println('Summary: all $files.len file(s) compiled successfully.')
|
||||
println('Summary: all ${files.len} file(s) compiled successfully.')
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ fn setup_symlink_github() {
|
||||
mut content := os.read_file(os.getenv('GITHUB_PATH')) or {
|
||||
panic('Failed to read GITHUB_PATH.')
|
||||
}
|
||||
content += '\n$os.getwd()\n'
|
||||
content += '\n${os.getwd()}\n'
|
||||
os.write_file(os.getenv('GITHUB_PATH'), content) or { panic('Failed to write to GITHUB_PATH.') }
|
||||
}
|
||||
|
||||
@ -59,7 +59,7 @@ fn setup_symlink_unix(vexe string) {
|
||||
}
|
||||
os.rm(link_path) or {}
|
||||
os.symlink(vexe, link_path) or {
|
||||
eprintln('Failed to create symlink "$link_path". Try again with sudo.')
|
||||
eprintln('Failed to create symlink "${link_path}". Try again with sudo.')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
@ -90,19 +90,19 @@ fn setup_symlink_windows(vexe string) {
|
||||
os.symlink(vsymlink, vexe) or {
|
||||
// typically only fails if you're on a network drive (VirtualBox)
|
||||
// do batch file creation instead
|
||||
eprintln('Could not create a native symlink: $err')
|
||||
eprintln('Could not create a native symlink: ${err}')
|
||||
eprintln('Creating a batch file instead...')
|
||||
vsymlink = os.join_path(vsymlinkdir, 'v.bat')
|
||||
if os.exists(vsymlink) {
|
||||
os.rm(vsymlink) or { panic(err) }
|
||||
}
|
||||
os.write_file(vsymlink, '@echo off\n"$vexe" %*') or { panic(err) }
|
||||
eprintln('$vsymlink file written.')
|
||||
os.write_file(vsymlink, '@echo off\n"${vexe}" %*') or { panic(err) }
|
||||
eprintln('${vsymlink} file written.')
|
||||
}
|
||||
if !os.exists(vsymlink) {
|
||||
warn_and_exit('Could not create $vsymlink')
|
||||
warn_and_exit('Could not create ${vsymlink}')
|
||||
}
|
||||
println('Symlink $vsymlink to $vexe created.')
|
||||
println('Symlink ${vsymlink} to ${vexe} created.')
|
||||
println('Checking system %PATH%...')
|
||||
reg_sys_env_handle := get_reg_sys_env_handle() or {
|
||||
warn_and_exit(err.msg())
|
||||
@ -114,7 +114,7 @@ fn setup_symlink_windows(vexe string) {
|
||||
// }
|
||||
// if the above succeeded, and we cannot get the value, it may simply be empty
|
||||
sys_env_path := get_reg_value(reg_sys_env_handle, 'Path') or { '' }
|
||||
current_sys_paths := sys_env_path.split(os.path_delimiter).map(it.trim('/$os.path_separator'))
|
||||
current_sys_paths := sys_env_path.split(os.path_delimiter).map(it.trim('/${os.path_separator}'))
|
||||
mut new_paths := [vsymlinkdir]
|
||||
for p in current_sys_paths {
|
||||
if p == '' {
|
||||
@ -161,7 +161,7 @@ fn get_reg_sys_env_handle() ?voidptr {
|
||||
reg_key_path := 'Environment'
|
||||
reg_env_key := unsafe { nil } // or HKEY (HANDLE)
|
||||
if C.RegOpenKeyEx(os.hkey_current_user, reg_key_path.to_wide(), 0, 1 | 2, ®_env_key) != 0 {
|
||||
return error('Could not open "$reg_key_path" in the registry')
|
||||
return error('Could not open "${reg_key_path}" in the registry')
|
||||
}
|
||||
return reg_env_key
|
||||
}
|
||||
@ -175,7 +175,7 @@ fn get_reg_value(reg_env_key voidptr, key string) ?string {
|
||||
reg_value_size := u32(4095) // this is the max length (not for the registry, but for the system %PATH%)
|
||||
mut reg_value := unsafe { &u16(malloc(int(reg_value_size))) }
|
||||
if C.RegQueryValueExW(reg_env_key, key.to_wide(), 0, 0, reg_value, ®_value_size) != 0 {
|
||||
return error('Unable to get registry value for "$key".')
|
||||
return error('Unable to get registry value for "${key}".')
|
||||
}
|
||||
return unsafe { string_from_wide(reg_value) }
|
||||
}
|
||||
@ -187,7 +187,7 @@ fn set_reg_value(reg_key voidptr, key string, value string) ?bool {
|
||||
$if windows {
|
||||
if C.RegSetValueExW(reg_key, key.to_wide(), 0, C.REG_EXPAND_SZ, value.to_wide(),
|
||||
value.len * 2) != 0 {
|
||||
return error('Unable to set registry value for "$key". %PATH% may be too long.')
|
||||
return error('Unable to set registry value for "${key}". %PATH% may be too long.')
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
@ -29,14 +29,14 @@ fn main() {
|
||||
flush_stdout()
|
||||
println('')
|
||||
println(term.header_left(term_highlight('Summary of `v test-all`:'), '-'))
|
||||
println(term_highlight('Total runtime: $spent ms'))
|
||||
println(term_highlight('Total runtime: ${spent} ms'))
|
||||
for ocmd in oks {
|
||||
msg := if ocmd.okmsg != '' { ocmd.okmsg } else { ocmd.line }
|
||||
println(term.colorize(term.green, '> OK: $msg '))
|
||||
println(term.colorize(term.green, '> OK: ${msg} '))
|
||||
}
|
||||
for fcmd in fails {
|
||||
msg := if fcmd.errmsg != '' { fcmd.errmsg } else { fcmd.line }
|
||||
println(term.failed('> Failed:') + ' $msg')
|
||||
println(term.failed('> Failed:') + ' ${msg}')
|
||||
}
|
||||
flush_stdout()
|
||||
if fails.len > 0 {
|
||||
@ -76,28 +76,28 @@ mut:
|
||||
fn get_all_commands() []Command {
|
||||
mut res := []Command{}
|
||||
res << Command{
|
||||
line: '$vexe examples/hello_world.v'
|
||||
line: '${vexe} examples/hello_world.v'
|
||||
okmsg: 'V can compile hello world.'
|
||||
rmfile: 'examples/hello_world'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -o hhww.c examples/hello_world.v'
|
||||
line: '${vexe} -o hhww.c examples/hello_world.v'
|
||||
okmsg: 'V can output a .c file, without compiling further.'
|
||||
rmfile: 'hhww.c'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -skip-unused examples/hello_world.v'
|
||||
line: '${vexe} -skip-unused examples/hello_world.v'
|
||||
okmsg: 'V can compile hello world with -skip-unused.'
|
||||
rmfile: 'examples/hello_world'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -skip-unused -profile - examples/hello_world.v'
|
||||
line: '${vexe} -skip-unused -profile - examples/hello_world.v'
|
||||
okmsg: 'V can compile hello world with both -skip-unused and -profile .'
|
||||
rmfile: 'examples/hello_world'
|
||||
}
|
||||
$if linux || macos {
|
||||
res << Command{
|
||||
line: '$vexe run examples/hello_world.v'
|
||||
line: '${vexe} run examples/hello_world.v'
|
||||
okmsg: 'V can run hello world.'
|
||||
runcmd: .execute
|
||||
expect: 'Hello, World!\n'
|
||||
@ -106,8 +106,8 @@ fn get_all_commands() []Command {
|
||||
for compiler_name in ['clang', 'gcc'] {
|
||||
if _ := os.find_abs_path_of_executable(compiler_name) {
|
||||
res << Command{
|
||||
line: '$vexe -cc $compiler_name -gc boehm run examples/hello_world.v'
|
||||
okmsg: '`v -cc $compiler_name -gc boehm run examples/hello_world.v` works'
|
||||
line: '${vexe} -cc ${compiler_name} -gc boehm run examples/hello_world.v'
|
||||
okmsg: '`v -cc ${compiler_name} -gc boehm run examples/hello_world.v` works'
|
||||
runcmd: .execute
|
||||
expect: 'Hello, World!\n'
|
||||
}
|
||||
@ -115,13 +115,13 @@ fn get_all_commands() []Command {
|
||||
}
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe interpret examples/hello_world.v'
|
||||
line: '${vexe} interpret examples/hello_world.v'
|
||||
okmsg: 'V can interpret hello world.'
|
||||
runcmd: .execute
|
||||
expect: 'Hello, World!\n'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe interpret examples/hanoi.v'
|
||||
line: '${vexe} interpret examples/hanoi.v'
|
||||
okmsg: 'V can interpret hanoi.v'
|
||||
runcmd: .execute
|
||||
starts_with: 'Disc 1 from A to C...\n'
|
||||
@ -129,121 +129,121 @@ fn get_all_commands() []Command {
|
||||
contains: 'Disc 7 from A to C...\n'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -o - examples/hello_world.v | grep "#define V_COMMIT_HASH" > /dev/null'
|
||||
line: '${vexe} -o - examples/hello_world.v | grep "#define V_COMMIT_HASH" > /dev/null'
|
||||
okmsg: 'V prints the generated source code to stdout with `-o -` .'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe run examples/v_script.vsh > /dev/null'
|
||||
line: '${vexe} run examples/v_script.vsh > /dev/null'
|
||||
okmsg: 'V can run the .VSH script file examples/v_script.vsh'
|
||||
}
|
||||
$if linux {
|
||||
res << Command{
|
||||
line: '$vexe -b native run examples/native/hello_world.v > /dev/null'
|
||||
line: '${vexe} -b native run examples/native/hello_world.v > /dev/null'
|
||||
okmsg: 'V compiles and runs examples/native/hello_world.v on the native backend for linux'
|
||||
}
|
||||
}
|
||||
// only compilation:
|
||||
res << Command{
|
||||
line: '$vexe -os linux -b native -o hw.linux examples/hello_world.v'
|
||||
line: '${vexe} -os linux -b native -o hw.linux examples/hello_world.v'
|
||||
okmsg: 'V compiles hello_world.v on the native backend for linux'
|
||||
rmfile: 'hw.linux'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -os macos -b native -o hw.macos examples/hello_world.v'
|
||||
line: '${vexe} -os macos -b native -o hw.macos examples/hello_world.v'
|
||||
okmsg: 'V compiles hello_world.v on the native backend for macos'
|
||||
rmfile: 'hw.macos'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -os windows -b native -o hw.exe examples/hello_world.v'
|
||||
line: '${vexe} -os windows -b native -o hw.exe examples/hello_world.v'
|
||||
okmsg: 'V compiles hello_world.v on the native backend for windows'
|
||||
rmfile: 'hw.exe'
|
||||
}
|
||||
//
|
||||
res << Command{
|
||||
line: '$vexe -b js -o hw.js examples/hello_world.v'
|
||||
line: '${vexe} -b js -o hw.js examples/hello_world.v'
|
||||
okmsg: 'V compiles hello_world.v on the JS backend'
|
||||
rmfile: 'hw.js'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -skip-unused -b js -o hw_skip_unused.js examples/hello_world.v'
|
||||
line: '${vexe} -skip-unused -b js -o hw_skip_unused.js examples/hello_world.v'
|
||||
okmsg: 'V compiles hello_world.v on the JS backend, with -skip-unused'
|
||||
rmfile: 'hw_skip_unused.js'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -skip-unused examples/2048'
|
||||
line: '${vexe} -skip-unused examples/2048'
|
||||
okmsg: 'V can compile 2048 with -skip-unused.'
|
||||
rmfile: 'examples/2048/2048'
|
||||
}
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -o vtmp cmd/v'
|
||||
line: '${vexe} -o vtmp cmd/v'
|
||||
okmsg: 'V can compile itself.'
|
||||
rmfile: 'vtmp'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -o vtmp_werror -cstrict cmd/v'
|
||||
line: '${vexe} -o vtmp_werror -cstrict cmd/v'
|
||||
okmsg: 'V can compile itself with -cstrict.'
|
||||
rmfile: 'vtmp_werror'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -o vtmp_autofree -autofree cmd/v'
|
||||
line: '${vexe} -o vtmp_autofree -autofree cmd/v'
|
||||
okmsg: 'V can compile itself with -autofree.'
|
||||
rmfile: 'vtmp_autofree'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -o vtmp_prealloc -prealloc cmd/v'
|
||||
line: '${vexe} -o vtmp_prealloc -prealloc cmd/v'
|
||||
okmsg: 'V can compile itself with -prealloc.'
|
||||
rmfile: 'vtmp_prealloc'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -o vtmp_unused -skip-unused cmd/v'
|
||||
line: '${vexe} -o vtmp_unused -skip-unused cmd/v'
|
||||
okmsg: 'V can compile itself with -skip-unused.'
|
||||
rmfile: 'vtmp_unused'
|
||||
}
|
||||
$if linux {
|
||||
res << Command{
|
||||
line: '$vexe -cc gcc -keepc -freestanding -o bel vlib/os/bare/bare_example_linux.v'
|
||||
line: '${vexe} -cc gcc -keepc -freestanding -o bel vlib/os/bare/bare_example_linux.v'
|
||||
okmsg: 'V can compile with -freestanding on Linux with GCC.'
|
||||
rmfile: 'bel'
|
||||
}
|
||||
|
||||
res << Command{
|
||||
line: '$vexe -cc gcc -keepc -freestanding -o str_array vlib/strconv/bare/str_array_example.v'
|
||||
line: '${vexe} -cc gcc -keepc -freestanding -o str_array vlib/strconv/bare/str_array_example.v'
|
||||
okmsg: 'V can compile & allocate memory with -freestanding on Linux with GCC.'
|
||||
rmfile: 'str_array'
|
||||
}
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe $vargs -progress test-cleancode'
|
||||
line: '${vexe} ${vargs} -progress test-cleancode'
|
||||
okmsg: 'All .v files are invariant when processed with `v fmt`'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe $vargs -progress test-fmt'
|
||||
line: '${vexe} ${vargs} -progress test-fmt'
|
||||
okmsg: 'All .v files can be processed with `v fmt`. Note: the result may not always be compilable, but `v fmt` should not crash.'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe $vargs -progress test-self'
|
||||
line: '${vexe} ${vargs} -progress test-self'
|
||||
okmsg: 'There are no _test.v file regressions.'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe $vargs -progress -W build-tools'
|
||||
line: '${vexe} ${vargs} -progress -W build-tools'
|
||||
okmsg: 'All tools can be compiled.'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe $vargs -progress -W build-examples'
|
||||
line: '${vexe} ${vargs} -progress -W build-examples'
|
||||
okmsg: 'All examples can be compiled.'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe check-md -hide-warnings .'
|
||||
line: '${vexe} check-md -hide-warnings .'
|
||||
label: 'Check ```v ``` code examples and formatting of .MD files...'
|
||||
okmsg: 'All .md files look good.'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe install nedpals.args'
|
||||
line: '${vexe} install nedpals.args'
|
||||
okmsg: '`v install` works.'
|
||||
}
|
||||
res << Command{
|
||||
line: '$vexe -usecache -cg examples/hello_world.v'
|
||||
line: '${vexe} -usecache -cg examples/hello_world.v'
|
||||
okmsg: '`v -usecache -cg` works.'
|
||||
rmfile: 'examples/hello_world'
|
||||
}
|
||||
@ -251,13 +251,13 @@ fn get_all_commands() []Command {
|
||||
// own #flags (tetris depends on gg, which uses sokol) can be compiled
|
||||
// with -usecache:
|
||||
res << Command{
|
||||
line: '$vexe -usecache examples/tetris/tetris.v'
|
||||
line: '${vexe} -usecache examples/tetris/tetris.v'
|
||||
okmsg: '`v -usecache` works.'
|
||||
rmfile: 'examples/tetris/tetris'
|
||||
}
|
||||
$if macos || linux {
|
||||
res << Command{
|
||||
line: '$vexe -o v.c cmd/v && cc -Werror v.c -lpthread -lm && rm -rf a.out'
|
||||
line: '${vexe} -o v.c cmd/v && cc -Werror v.c -lpthread -lm && rm -rf a.out'
|
||||
label: 'v.c should be buildable with no warnings...'
|
||||
okmsg: 'v.c can be compiled without warnings. This is good :)'
|
||||
rmfile: 'v.c'
|
||||
@ -265,7 +265,7 @@ fn get_all_commands() []Command {
|
||||
}
|
||||
$if linux {
|
||||
res << Command{
|
||||
line: '$vexe vlib/v/tests/bench/bench_stbi_load.v && prlimit -v10485760 vlib/v/tests/bench/bench_stbi_load'
|
||||
line: '${vexe} vlib/v/tests/bench/bench_stbi_load.v && prlimit -v10485760 vlib/v/tests/bench/bench_stbi_load'
|
||||
okmsg: 'STBI load does not leak with GC on, when loading images multiple times (use < 10MB)'
|
||||
runcmd: .execute
|
||||
contains: 'logo.png 1000 times.'
|
||||
@ -274,7 +274,7 @@ fn get_all_commands() []Command {
|
||||
}
|
||||
$if !windows {
|
||||
res << Command{
|
||||
line: '$vexe -raw-vsh-tmp-prefix tmp vlib/v/tests/script_with_no_extension'
|
||||
line: '${vexe} -raw-vsh-tmp-prefix tmp vlib/v/tests/script_with_no_extension'
|
||||
okmsg: 'V can crun a script, that lacks a .vsh extension'
|
||||
runcmd: .execute
|
||||
expect: 'Test\n'
|
||||
@ -282,7 +282,7 @@ fn get_all_commands() []Command {
|
||||
}
|
||||
|
||||
res << Command{
|
||||
line: '$vexe -raw-vsh-tmp-prefix tmp run vlib/v/tests/script_with_no_extension'
|
||||
line: '${vexe} -raw-vsh-tmp-prefix tmp run vlib/v/tests/script_with_no_extension'
|
||||
okmsg: 'V can run a script, that lacks a .vsh extension'
|
||||
runcmd: .execute
|
||||
expect: 'Test\n'
|
||||
@ -344,23 +344,23 @@ fn (mut cmd Command) run() {
|
||||
}
|
||||
//
|
||||
run_label := if is_failed { term.failed('FAILED') } else { term_highlight('OK') }
|
||||
println('> Running: "$cmd.line" took: $spent ms ... $run_label')
|
||||
println('> Running: "${cmd.line}" took: ${spent} ms ... ${run_label}')
|
||||
//
|
||||
if is_failed && is_failed_expected {
|
||||
eprintln('> expected:\n$cmd.expect')
|
||||
eprintln('> output:\n$cmd.output')
|
||||
eprintln('> expected:\n${cmd.expect}')
|
||||
eprintln('> output:\n${cmd.output}')
|
||||
}
|
||||
if is_failed && is_failed_starts_with {
|
||||
eprintln('> expected to start with:\n$cmd.starts_with')
|
||||
eprintln('> expected to start with:\n${cmd.starts_with}')
|
||||
eprintln('> output:\n${cmd.output#[..cmd.starts_with.len]}')
|
||||
}
|
||||
if is_failed && is_failed_ends_with {
|
||||
eprintln('> expected to end with:\n$cmd.ends_with')
|
||||
eprintln('> expected to end with:\n${cmd.ends_with}')
|
||||
eprintln('> output:\n${cmd.output#[-cmd.starts_with.len..]}')
|
||||
}
|
||||
if is_failed && is_failed_contains {
|
||||
eprintln('> expected to contain:\n$cmd.contains')
|
||||
eprintln('> output:\n$cmd.output')
|
||||
eprintln('> expected to contain:\n${cmd.contains}')
|
||||
eprintln('> output:\n${cmd.output}')
|
||||
}
|
||||
if vtest_nocleanup {
|
||||
return
|
||||
@ -371,7 +371,7 @@ fn (mut cmd Command) run() {
|
||||
file_existed = file_existed || rm_existing(cmd.rmfile + '.exe')
|
||||
}
|
||||
if !file_existed {
|
||||
eprintln('Expected file did not exist: $cmd.rmfile')
|
||||
eprintln('Expected file did not exist: ${cmd.rmfile}')
|
||||
cmd.ecode = 999
|
||||
}
|
||||
}
|
||||
|
@ -58,9 +58,9 @@ fn main() {
|
||||
|
||||
fn tsession(vargs string, tool_source string, tool_cmd string, tool_args string, flist []string, slist []string) testing.TestSession {
|
||||
os.chdir(vroot) or {}
|
||||
title_message := 'running $tool_cmd over most .v files'
|
||||
title_message := 'running ${tool_cmd} over most .v files'
|
||||
testing.eheader(title_message)
|
||||
mut test_session := testing.new_test_session('$vargs $tool_args', false)
|
||||
mut test_session := testing.new_test_session('${vargs} ${tool_args}', false)
|
||||
test_session.files << flist
|
||||
test_session.skip_files << slist
|
||||
util.prepare_tool_when_needed(tool_source)
|
||||
@ -93,10 +93,10 @@ fn v_test_vetting(vargs string) {
|
||||
if vet_session.benchmark.nfail > 0 || verify_session.benchmark.nfail > 0 {
|
||||
eprintln('\n')
|
||||
if vet_session.benchmark.nfail > 0 {
|
||||
eprintln('WARNING: `v vet` failed $vet_session.benchmark.nfail times.')
|
||||
eprintln('WARNING: `v vet` failed ${vet_session.benchmark.nfail} times.')
|
||||
}
|
||||
if verify_session.benchmark.nfail > 0 {
|
||||
eprintln('WARNING: `v fmt -verify` failed $verify_session.benchmark.nfail times.')
|
||||
eprintln('WARNING: `v fmt -verify` failed ${verify_session.benchmark.nfail} times.')
|
||||
}
|
||||
exit(1)
|
||||
}
|
||||
|
@ -19,13 +19,13 @@ fn v_test_formatting(vargs string) {
|
||||
all_v_files := v_files()
|
||||
util.prepare_tool_when_needed('vfmt.v')
|
||||
testing.eheader('Run "v fmt" over all .v files')
|
||||
mut vfmt_test_session := testing.new_test_session('$vargs fmt -worker', false)
|
||||
mut vfmt_test_session := testing.new_test_session('${vargs} fmt -worker', false)
|
||||
vfmt_test_session.files << all_v_files
|
||||
vfmt_test_session.skip_files << known_failing_exceptions
|
||||
vfmt_test_session.test()
|
||||
eprintln(vfmt_test_session.benchmark.total_message('running vfmt over V files'))
|
||||
if vfmt_test_session.benchmark.nfail > 0 {
|
||||
eprintln('\nWARNING: v fmt failed $vfmt_test_session.benchmark.nfail times.\n')
|
||||
eprintln('\nWARNING: v fmt failed ${vfmt_test_session.benchmark.nfail} times.\n')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ fn main() {
|
||||
mut context := process_cli_args()
|
||||
if context.is_worker {
|
||||
pid := os.getpid()
|
||||
context.log('> worker ${pid:5} starts parsing at cut_index: ${context.cut_index:5} | $context.path')
|
||||
context.log('> worker ${pid:5} starts parsing at cut_index: ${context.cut_index:5} | ${context.path}')
|
||||
// A worker's process job is to try to parse a single given file in context.path.
|
||||
// It can crash/panic freely.
|
||||
context.table = ast.new_table()
|
||||
@ -62,7 +62,7 @@ fn main() {
|
||||
exit(ecode_timeout)
|
||||
}(context.timeout_ms)
|
||||
_ := parser.parse_text(source, context.path, context.table, .skip_comments, context.pref)
|
||||
context.log('> worker ${pid:5} finished parsing $context.path')
|
||||
context.log('> worker ${pid:5} finished parsing ${context.path}')
|
||||
exit(0)
|
||||
} else {
|
||||
// The process supervisor should NOT crash/panic, unlike the workers.
|
||||
@ -161,17 +161,17 @@ fn (mut context Context) log(msg string) {
|
||||
if context.is_verbose {
|
||||
label := yellow('info')
|
||||
ts := time.now().format_ss_micro()
|
||||
eprintln('$label: $ts | $msg')
|
||||
eprintln('${label}: ${ts} | ${msg}')
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut context Context) error(msg string) {
|
||||
label := red('error')
|
||||
eprintln('$label: $msg')
|
||||
eprintln('${label}: ${msg}')
|
||||
}
|
||||
|
||||
fn (mut context Context) expand_all_paths() {
|
||||
context.log('> context.all_paths before: $context.all_paths')
|
||||
context.log('> context.all_paths before: ${context.all_paths}')
|
||||
mut files := []string{}
|
||||
for path in context.all_paths {
|
||||
if os.is_dir(path) {
|
||||
@ -180,24 +180,24 @@ fn (mut context Context) expand_all_paths() {
|
||||
continue
|
||||
}
|
||||
if !path.ends_with('.v') && !path.ends_with('.vv') && !path.ends_with('.vsh') {
|
||||
context.error('`v test-parser` can only be used on .v/.vv/.vsh files.\nOffending file: "$path".')
|
||||
context.error('`v test-parser` can only be used on .v/.vv/.vsh files.\nOffending file: "${path}".')
|
||||
continue
|
||||
}
|
||||
if !os.exists(path) {
|
||||
context.error('"$path" does not exist.')
|
||||
context.error('"${path}" does not exist.')
|
||||
continue
|
||||
}
|
||||
files << path
|
||||
}
|
||||
context.all_paths = files
|
||||
context.log('> context.all_paths after: $context.all_paths')
|
||||
context.log('> context.all_paths after: ${context.all_paths}')
|
||||
}
|
||||
|
||||
fn (mut context Context) process_whole_file_in_worker(path string) (int, int) {
|
||||
context.path = path // needed for the progress bar
|
||||
context.log('> context.process_whole_file_in_worker path: $path')
|
||||
context.log('> context.process_whole_file_in_worker path: ${path}')
|
||||
if !(os.is_file(path) && os.is_readable(path)) {
|
||||
context.error('$path is not readable')
|
||||
context.error('${path} is not readable')
|
||||
return 1, 0
|
||||
}
|
||||
source := os.read_file(path) or { '' }
|
||||
@ -212,10 +212,10 @@ fn (mut context Context) process_whole_file_in_worker(path string) (int, int) {
|
||||
for i in 0 .. len {
|
||||
verbosity := if context.is_verbose { '-v' } else { '' }
|
||||
context.cut_index = i // needed for the progress bar
|
||||
cmd := '${os.quoted_path(context.myself)} $verbosity --worker --timeout_ms ${context.timeout_ms:5} --cut_index ${i:5} --path ${os.quoted_path(path)} '
|
||||
cmd := '${os.quoted_path(context.myself)} ${verbosity} --worker --timeout_ms ${context.timeout_ms:5} --cut_index ${i:5} --path ${os.quoted_path(path)} '
|
||||
context.log(cmd)
|
||||
mut res := os.execute(cmd)
|
||||
context.log('worker exit_code: $res.exit_code | worker output:\n$res.output')
|
||||
context.log('worker exit_code: ${res.exit_code} | worker output:\n${res.output}')
|
||||
if res.exit_code != 0 {
|
||||
fails++
|
||||
mut is_panic := false
|
||||
@ -232,10 +232,10 @@ fn (mut context Context) process_whole_file_in_worker(path string) (int, int) {
|
||||
} else {
|
||||
red('parser failure: crash, ${ecode_details[res.exit_code]}')
|
||||
}
|
||||
path_to_line := bold('$path:$line:$col:')
|
||||
path_to_line := bold('${path}:${line}:${col}:')
|
||||
err_line := last_line.trim_left('\t')
|
||||
println('$path_to_line $err')
|
||||
println('\t$line | $err_line')
|
||||
println('${path_to_line} ${err}')
|
||||
println('\t${line} | ${err_line}')
|
||||
println('')
|
||||
eprintln(res.output)
|
||||
}
|
||||
@ -269,7 +269,7 @@ fn (mut context Context) print_status() {
|
||||
return
|
||||
}
|
||||
term.cursor_up(1)
|
||||
eprint('\r $msg\n')
|
||||
eprint('\r ${msg}\n')
|
||||
}
|
||||
|
||||
fn (mut context Context) print_periodic_status() {
|
||||
|
@ -381,7 +381,7 @@ fn main() {
|
||||
tsession.test()
|
||||
eprintln(tsession.benchmark.total_message(title))
|
||||
if tsession.benchmark.nfail > 0 {
|
||||
eprintln('\nWARNING: failed $tsession.benchmark.nfail times.\n')
|
||||
eprintln('\nWARNING: failed ${tsession.benchmark.nfail} times.\n')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ fn main() {
|
||||
.ignore {}
|
||||
}
|
||||
} else {
|
||||
eprintln('\nUnrecognized test file `$targ`.\n `v test` can only be used with folders and/or _test.v files.\n')
|
||||
eprintln('\nUnrecognized test file `${targ}`.\n `v test` can only be used with folders and/or _test.v files.\n')
|
||||
show_usage()
|
||||
exit(1)
|
||||
}
|
||||
@ -198,7 +198,7 @@ fn (mut ctx Context) should_test_when_it_contains_matching_fns(path string, back
|
||||
}
|
||||
if tname.match_glob(pat) {
|
||||
if ctx.verbose {
|
||||
println('> compiling path: $path, since test fn `$tname` matches glob pattern `$pat`')
|
||||
println('> compiling path: ${path}, since test fn `${tname}` matches glob pattern `${pat}`')
|
||||
}
|
||||
return .test
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ fn main() {
|
||||
self_idx := os.args.index('tracev')
|
||||
args := os.args[1..self_idx]
|
||||
args_str := args.join(' ')
|
||||
options := if args.len > 0 { '($args_str)' } else { '' }
|
||||
options := if args.len > 0 { '(${args_str})' } else { '' }
|
||||
println('Compiling a `tracev` executable ${options}...')
|
||||
os.system('${os.quoted_path(vexe)} -cg -d trace_parser -d trace_checker -d trace_gen -o tracev $args_str cmd/v')
|
||||
os.system('${os.quoted_path(vexe)} -cg -d trace_parser -d trace_checker -d trace_gen -o tracev ${args_str} cmd/v')
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ fn main() {
|
||||
if !app.recompile_v() {
|
||||
app.show_current_v_version()
|
||||
eprintln('Recompiling V *failed*.')
|
||||
eprintln('Try running `$get_make_cmd_name()` .')
|
||||
eprintln('Try running `${get_make_cmd_name()}` .')
|
||||
exit(1)
|
||||
}
|
||||
app.recompile_vup()
|
||||
@ -75,14 +75,14 @@ fn (app App) update_from_master() {
|
||||
fn (app App) recompile_v() bool {
|
||||
// Note: app.vexe is more reliable than just v (which may be a symlink)
|
||||
opts := if app.is_prod { '-prod' } else { '' }
|
||||
vself := '${os.quoted_path(app.vexe)} $opts self'
|
||||
app.vprintln('> recompiling v itself with `$vself` ...')
|
||||
vself := '${os.quoted_path(app.vexe)} ${opts} self'
|
||||
app.vprintln('> recompiling v itself with `${vself}` ...')
|
||||
self_result := os.execute(vself)
|
||||
if self_result.exit_code == 0 {
|
||||
println(self_result.output.trim_space())
|
||||
return true
|
||||
} else {
|
||||
app.vprintln('`$vself` failed, running `make`...')
|
||||
app.vprintln('`${vself}` failed, running `make`...')
|
||||
app.vprintln(self_result.output.trim_space())
|
||||
}
|
||||
return app.make(vself)
|
||||
@ -102,7 +102,7 @@ fn (app App) make(vself string) bool {
|
||||
make := get_make_cmd_name()
|
||||
make_result := os.execute(make)
|
||||
if make_result.exit_code != 0 {
|
||||
eprintln('> $make failed:')
|
||||
eprintln('> ${make} failed:')
|
||||
eprintln('> make output:')
|
||||
eprintln(make_result.output)
|
||||
return false
|
||||
@ -117,30 +117,30 @@ fn (app App) show_current_v_version() {
|
||||
mut vversion := vout.output.trim_space()
|
||||
if vout.exit_code == 0 {
|
||||
latest_v_commit := vversion.split(' ').last().all_after('.')
|
||||
latest_v_commit_time := os.execute('git show -s --format=%ci $latest_v_commit')
|
||||
latest_v_commit_time := os.execute('git show -s --format=%ci ${latest_v_commit}')
|
||||
if latest_v_commit_time.exit_code == 0 {
|
||||
vversion += ', timestamp: ' + latest_v_commit_time.output.trim_space()
|
||||
}
|
||||
}
|
||||
println('Current V version: $vversion')
|
||||
println('Current V version: ${vversion}')
|
||||
}
|
||||
}
|
||||
|
||||
fn (app App) backup(file string) {
|
||||
backup_file := '${file}_old.exe'
|
||||
if os.exists(backup_file) {
|
||||
os.rm(backup_file) or { eprintln('failed removing $backup_file: $err.msg()') }
|
||||
os.rm(backup_file) or { eprintln('failed removing ${backup_file}: ${err.msg()}') }
|
||||
}
|
||||
os.mv(file, backup_file) or { eprintln('failed moving $file: $err.msg()') }
|
||||
os.mv(file, backup_file) or { eprintln('failed moving ${file}: ${err.msg()}') }
|
||||
}
|
||||
|
||||
fn (app App) git_command(command string) {
|
||||
app.vprintln('git_command: git $command')
|
||||
git_result := os.execute('git $command')
|
||||
app.vprintln('git_command: git ${command}')
|
||||
git_result := os.execute('git ${command}')
|
||||
if git_result.exit_code < 0 {
|
||||
app.get_git()
|
||||
// Try it again with (maybe) git installed
|
||||
os.execute_or_exit('git $command')
|
||||
os.execute_or_exit('git ${command}')
|
||||
}
|
||||
if git_result.exit_code != 0 {
|
||||
eprintln(git_result.output)
|
||||
@ -153,7 +153,7 @@ fn (app App) get_git() {
|
||||
$if windows {
|
||||
println('Downloading git 32 bit for Windows, please wait.')
|
||||
// We'll use 32 bit because maybe someone out there is using 32-bit windows
|
||||
res_download := os.execute('bitsadmin.exe /transfer "vgit" https://github.com/git-for-windows/git/releases/download/v2.30.0.windows.2/Git-2.30.0.2-32-bit.exe "$os.getwd()/git32.exe"')
|
||||
res_download := os.execute('bitsadmin.exe /transfer "vgit" https://github.com/git-for-windows/git/releases/download/v2.30.0.windows.2/Git-2.30.0.2-32-bit.exe "${os.getwd()}/git32.exe"')
|
||||
if res_download.exit_code != 0 {
|
||||
eprintln('Unable to install git automatically: please install git manually')
|
||||
panic(res_download.output)
|
||||
|
@ -50,14 +50,14 @@ fn main() {
|
||||
}
|
||||
for path in paths {
|
||||
if !os.exists(path) {
|
||||
eprintln('File/folder $path does not exist')
|
||||
eprintln('File/folder ${path} does not exist')
|
||||
continue
|
||||
}
|
||||
if os.is_file(path) {
|
||||
vt.vet_file(path)
|
||||
}
|
||||
if os.is_dir(path) {
|
||||
vt.vprintln("vetting folder: '$path' ...")
|
||||
vt.vprintln("vetting folder: '${path}' ...")
|
||||
vfiles := os.walk_ext(path, '.v')
|
||||
vvfiles := os.walk_ext(path, '.vv')
|
||||
mut files := []string{}
|
||||
@ -91,7 +91,7 @@ fn (mut vt Vet) vet_file(path string) {
|
||||
// skip all /tests/ files, since usually their content is not
|
||||
// important enough to be documented/vetted, and they may even
|
||||
// contain intentionally invalid code.
|
||||
vt.vprintln("skipping test file: '$path' ...")
|
||||
vt.vprintln("skipping test file: '${path}' ...")
|
||||
return
|
||||
}
|
||||
vt.file = path
|
||||
@ -99,7 +99,7 @@ fn (mut vt Vet) vet_file(path string) {
|
||||
prefs.is_vet = true
|
||||
prefs.is_vsh = path.ends_with('.vsh')
|
||||
table := ast.new_table()
|
||||
vt.vprintln("vetting file '$path'...")
|
||||
vt.vprintln("vetting file '${path}'...")
|
||||
_, errors := parser.parse_vet_file(path, table, prefs)
|
||||
// Transfer errors from scanner and parser
|
||||
vt.errors << errors
|
||||
@ -175,7 +175,7 @@ fn (mut vt Vet) vet_fn_documentation(lines []string, line string, lnumber int) {
|
||||
}
|
||||
if grab {
|
||||
clean_line := line.all_before_last('{').trim(' ')
|
||||
vt.warn('Function documentation seems to be missing for "$clean_line".',
|
||||
vt.warn('Function documentation seems to be missing for "${clean_line}".',
|
||||
lnumber, .doc)
|
||||
}
|
||||
} else {
|
||||
@ -189,14 +189,15 @@ fn (mut vt Vet) vet_fn_documentation(lines []string, line string, lnumber int) {
|
||||
prev_line := lines[j]
|
||||
if prev_line.contains('}') { // We've looked back to the above scope, stop here
|
||||
break
|
||||
} else if prev_line.starts_with('// $fn_name ') {
|
||||
} else if prev_line.starts_with('// ${fn_name} ') {
|
||||
grab = false
|
||||
break
|
||||
} else if prev_line.starts_with('// $fn_name') && !prev_prev_line.starts_with('//') {
|
||||
} else if prev_line.starts_with('// ${fn_name}')
|
||||
&& !prev_prev_line.starts_with('//') {
|
||||
grab = false
|
||||
clean_line := line.all_before_last('{').trim(' ')
|
||||
vt.warn('The documentation for "$clean_line" seems incomplete.', lnumber,
|
||||
.doc)
|
||||
vt.warn('The documentation for "${clean_line}" seems incomplete.',
|
||||
lnumber, .doc)
|
||||
break
|
||||
} else if prev_line.starts_with('[') {
|
||||
tags << collect_tags(prev_line)
|
||||
@ -207,7 +208,7 @@ fn (mut vt Vet) vet_fn_documentation(lines []string, line string, lnumber int) {
|
||||
}
|
||||
if grab {
|
||||
clean_line := line.all_before_last('{').trim(' ')
|
||||
vt.warn('A function name is missing from the documentation of "$clean_line".',
|
||||
vt.warn('A function name is missing from the documentation of "${clean_line}".',
|
||||
lnumber, .doc)
|
||||
}
|
||||
}
|
||||
@ -222,8 +223,8 @@ fn (vt &Vet) vprintln(s string) {
|
||||
}
|
||||
|
||||
fn (vt &Vet) e2string(err vet.Error) string {
|
||||
mut kind := '$err.kind:'
|
||||
mut location := '$err.file_path:$err.pos.line_nr:'
|
||||
mut kind := '${err.kind}:'
|
||||
mut location := '${err.file_path}:${err.pos.line_nr}:'
|
||||
if vt.opt.use_color {
|
||||
kind = match err.kind {
|
||||
.warning { term.magenta(kind) }
|
||||
@ -232,7 +233,7 @@ fn (vt &Vet) e2string(err vet.Error) string {
|
||||
kind = term.bold(kind)
|
||||
location = term.bold(location)
|
||||
}
|
||||
return '$location $kind $err.message'
|
||||
return '${location} ${kind} ${err.message}'
|
||||
}
|
||||
|
||||
fn (mut vt Vet) error(msg string, line int, fix vet.FixKind) {
|
||||
|
@ -99,11 +99,11 @@ mut:
|
||||
|
||||
[if debug_vwatch ?]
|
||||
fn (mut context Context) elog(msg string) {
|
||||
eprintln('> vwatch $context.pid, $msg')
|
||||
eprintln('> vwatch ${context.pid}, ${msg}')
|
||||
}
|
||||
|
||||
fn (context &Context) str() string {
|
||||
return 'Context{ pid: $context.pid, is_worker: $context.is_worker, check_period_ms: $context.check_period_ms, vexe: $context.vexe, opts: $context.opts, is_exiting: $context.is_exiting, vfiles: $context.vfiles'
|
||||
return 'Context{ pid: ${context.pid}, is_worker: ${context.is_worker}, check_period_ms: ${context.check_period_ms}, vexe: ${context.vexe}, opts: ${context.opts}, is_exiting: ${context.is_exiting}, vfiles: ${context.vfiles}'
|
||||
}
|
||||
|
||||
fn (mut context Context) get_stats_for_affected_vfiles() []VFileStat {
|
||||
@ -112,7 +112,7 @@ fn (mut context Context) get_stats_for_affected_vfiles() []VFileStat {
|
||||
// The next command will make V parse the program, and print all .v files,
|
||||
// needed for its compilation, without actually compiling it.
|
||||
copts := context.opts.join(' ')
|
||||
cmd := '"$context.vexe" -silent -print-v-files $copts'
|
||||
cmd := '"${context.vexe}" -silent -print-v-files ${copts}'
|
||||
// context.elog('> cmd: $cmd')
|
||||
mut paths := []string{}
|
||||
if context.add_files.len > 0 && context.add_files[0] != '' {
|
||||
@ -168,7 +168,7 @@ fn (mut context Context) get_changed_vfiles() int {
|
||||
if existing_vfs.path == vfs.path {
|
||||
found = true
|
||||
if existing_vfs.mtime != vfs.mtime {
|
||||
context.elog('> new updates for file: $vfs')
|
||||
context.elog('> new updates for file: ${vfs}')
|
||||
changed++
|
||||
}
|
||||
break
|
||||
@ -181,7 +181,7 @@ fn (mut context Context) get_changed_vfiles() int {
|
||||
}
|
||||
context.vfiles = newfiles
|
||||
if changed > 0 {
|
||||
context.elog('> get_changed_vfiles: $changed')
|
||||
context.elog('> get_changed_vfiles: ${changed}')
|
||||
}
|
||||
return changed
|
||||
}
|
||||
@ -219,23 +219,23 @@ fn (mut context Context) kill_pgroup() {
|
||||
|
||||
fn (mut context Context) run_before_cmd() {
|
||||
if context.cmd_before_run != '' {
|
||||
context.elog('> run_before_cmd: "$context.cmd_before_run"')
|
||||
context.elog('> run_before_cmd: "${context.cmd_before_run}"')
|
||||
os.system(context.cmd_before_run)
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut context Context) run_after_cmd() {
|
||||
if context.cmd_after_run != '' {
|
||||
context.elog('> run_after_cmd: "$context.cmd_after_run"')
|
||||
context.elog('> run_after_cmd: "${context.cmd_after_run}"')
|
||||
os.system(context.cmd_after_run)
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut context Context) compilation_runner_loop() {
|
||||
cmd := '"$context.vexe" ${context.opts.join(' ')}'
|
||||
cmd := '"${context.vexe}" ${context.opts.join(' ')}'
|
||||
_ := <-context.rerun_channel
|
||||
for {
|
||||
context.elog('>> loop: v_cycles: $context.v_cycles')
|
||||
context.elog('>> loop: v_cycles: ${context.v_cycles}')
|
||||
if context.clear_terminal {
|
||||
term.clear()
|
||||
}
|
||||
@ -246,7 +246,7 @@ fn (mut context Context) compilation_runner_loop() {
|
||||
context.child_process.set_args(context.opts)
|
||||
context.child_process.run()
|
||||
if !context.silent {
|
||||
eprintln('$timestamp: $cmd | pid: ${context.child_process.pid:7d} | reload cycle: ${context.v_cycles:5d}')
|
||||
eprintln('${timestamp}: ${cmd} | pid: ${context.child_process.pid:7d} | reload cycle: ${context.v_cycles:5d}')
|
||||
}
|
||||
for {
|
||||
mut notalive_count := 0
|
||||
@ -286,7 +286,7 @@ fn (mut context Context) compilation_runner_loop() {
|
||||
}
|
||||
}
|
||||
if !context.child_process.is_alive() {
|
||||
context.elog('> child_process is no longer alive | notalive_count: $notalive_count')
|
||||
context.elog('> child_process is no longer alive | notalive_count: ${notalive_count}')
|
||||
context.child_process.wait()
|
||||
context.child_process.close()
|
||||
if notalive_count == 0 {
|
||||
@ -333,17 +333,17 @@ fn main() {
|
||||
exit(0)
|
||||
}
|
||||
remaining_options := fp.finalize() or {
|
||||
eprintln('Error: $err')
|
||||
eprintln('Error: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
context.opts = remaining_options
|
||||
context.elog('>>> context.pid: $context.pid')
|
||||
context.elog('>>> context.vexe: $context.vexe')
|
||||
context.elog('>>> context.opts: $context.opts')
|
||||
context.elog('>>> context.is_worker: $context.is_worker')
|
||||
context.elog('>>> context.clear_terminal: $context.clear_terminal')
|
||||
context.elog('>>> context.add_files: $context.add_files')
|
||||
context.elog('>>> context.ignore_exts: $context.ignore_exts')
|
||||
context.elog('>>> context.pid: ${context.pid}')
|
||||
context.elog('>>> context.vexe: ${context.vexe}')
|
||||
context.elog('>>> context.opts: ${context.opts}')
|
||||
context.elog('>>> context.is_worker: ${context.is_worker}')
|
||||
context.elog('>>> context.clear_terminal: ${context.clear_terminal}')
|
||||
context.elog('>>> context.add_files: ${context.add_files}')
|
||||
context.elog('>>> context.ignore_exts: ${context.ignore_exts}')
|
||||
if context.is_worker {
|
||||
context.worker_main()
|
||||
} else {
|
||||
|
@ -41,14 +41,14 @@ fn (mut fdr Finder) configure_from_arguments(args []string) {
|
||||
}
|
||||
fdr.visib.set_from_str(cmdline.option(args, '-vis', '${Visibility.all}'))
|
||||
if fdr.symbol == .var && fdr.visib != .all {
|
||||
make_and_print_error('-vis $fdr.visib just can be setted with symbol_type:',
|
||||
make_and_print_error('-vis ${fdr.visib} just can be setted with symbol_type:',
|
||||
['fn', 'method', 'const', 'struct', 'enum', 'interface', 'regexp'],
|
||||
'$fdr.symbol')
|
||||
'${fdr.symbol}')
|
||||
}
|
||||
fdr.mutab.set_from_str(cmdline.option(args, '-mut', '${Mutability.any}'))
|
||||
if fdr.symbol != .var && fdr.mutab != .any {
|
||||
make_and_print_error('-mut $fdr.mutab just can be setted with symbol_type:',
|
||||
['var'], '$fdr.symbol')
|
||||
make_and_print_error('-mut ${fdr.mutab} just can be setted with symbol_type:',
|
||||
['var'], '${fdr.symbol}')
|
||||
}
|
||||
fdr.modul = cmdline.option(args, '-mod', '')
|
||||
fdr.dirs = cmdline.options(args, '-dir')
|
||||
@ -95,28 +95,32 @@ fn (mut fdr Finder) search_for_matches() {
|
||||
cp := r'\)'
|
||||
|
||||
// Build regex query
|
||||
sy := '$fdr.symbol'
|
||||
st := if fdr.receiver != '' { '$sp$op$sp[a-z].*$sp$fdr.receiver$cp$sp' } else { '.*' }
|
||||
na := '$fdr.name'
|
||||
sy := '${fdr.symbol}'
|
||||
st := if fdr.receiver != '' {
|
||||
'${sp}${op}${sp}[a-z].*${sp}${fdr.receiver}${cp}${sp}'
|
||||
} else {
|
||||
'.*'
|
||||
}
|
||||
na := '${fdr.name}'
|
||||
|
||||
query := match fdr.symbol {
|
||||
.@fn {
|
||||
'.*$sy$sp$na$sp${op}.*${cp}.*'
|
||||
'.*${sy}${sp}${na}${sp}${op}.*${cp}.*'
|
||||
}
|
||||
.method {
|
||||
'.*fn$st$na$sp${op}.*${cp}.*'
|
||||
'.*fn${st}${na}${sp}${op}.*${cp}.*'
|
||||
}
|
||||
.var {
|
||||
'.*$na$sp:=.*'
|
||||
'.*${na}${sp}:=.*'
|
||||
}
|
||||
.@const {
|
||||
'.*$na$sp = .*'
|
||||
'.*${na}${sp} = .*'
|
||||
}
|
||||
.regexp {
|
||||
'$na'
|
||||
'${na}'
|
||||
}
|
||||
else {
|
||||
'.*$sy$sp$na${sp}.*' // for struct, enum and interface
|
||||
'.*${sy}${sp}${na}${sp}.*' // for struct, enum and interface
|
||||
}
|
||||
}
|
||||
// println(query)
|
||||
@ -191,7 +195,7 @@ fn (fdr Finder) show_results() {
|
||||
println(maybe_color(term.bright_yellow, 'No Matches found'))
|
||||
} else if verbose || header {
|
||||
print(fdr)
|
||||
println(maybe_color(term.bright_green, '$fdr.matches.len matches Found\n'))
|
||||
println(maybe_color(term.bright_green, '${fdr.matches.len} matches Found\n'))
|
||||
for result in fdr.matches {
|
||||
result.show()
|
||||
}
|
||||
@ -203,13 +207,13 @@ fn (fdr Finder) show_results() {
|
||||
}
|
||||
|
||||
fn (fdr Finder) str() string {
|
||||
v := maybe_color(term.bright_red, '$fdr.visib')
|
||||
m := maybe_color(term.bright_red, '$fdr.mutab')
|
||||
st := if fdr.receiver != '' { ' ( _ $fdr.receiver)' } else { '' }
|
||||
s := maybe_color(term.bright_magenta, '$fdr.symbol')
|
||||
n := maybe_color(term.bright_cyan, '$fdr.name')
|
||||
v := maybe_color(term.bright_red, '${fdr.visib}')
|
||||
m := maybe_color(term.bright_red, '${fdr.mutab}')
|
||||
st := if fdr.receiver != '' { ' ( _ ${fdr.receiver})' } else { '' }
|
||||
s := maybe_color(term.bright_magenta, '${fdr.symbol}')
|
||||
n := maybe_color(term.bright_cyan, '${fdr.name}')
|
||||
|
||||
mm := if fdr.modul != '' { maybe_color(term.blue, '$fdr.modul') } else { '' }
|
||||
mm := if fdr.modul != '' { maybe_color(term.blue, '${fdr.modul}') } else { '' }
|
||||
dd := if fdr.dirs.len != 0 {
|
||||
fdr.dirs.map(maybe_color(term.blue, it))
|
||||
} else {
|
||||
@ -219,14 +223,14 @@ fn (fdr Finder) str() string {
|
||||
dm := if fdr.dirs.len == 0 && fdr.modul == '' {
|
||||
'all the project scope'
|
||||
} else if fdr.dirs.len == 0 && fdr.modul != '' {
|
||||
'module $mm'
|
||||
'module ${mm}'
|
||||
} else if fdr.dirs.len != 0 && fdr.modul == '' {
|
||||
'directories: $dd'
|
||||
'directories: ${dd}'
|
||||
} else {
|
||||
'module $mm searching within directories: $dd'
|
||||
'module ${mm} searching within directories: ${dd}'
|
||||
}
|
||||
|
||||
return '\nFind: $s$st $n | visibility: $v mutability: $m\nwithin $dm '
|
||||
return '\nFind: ${s}${st} ${n} | visibility: ${v} mutability: ${m}\nwithin ${dm} '
|
||||
}
|
||||
|
||||
// Match is one result of the search_for_matches() process
|
||||
@ -238,11 +242,11 @@ struct Match {
|
||||
|
||||
fn (mtc Match) show() {
|
||||
path := maybe_color(term.bright_magenta, mtc.path)
|
||||
line := maybe_color(term.bright_yellow, '$mtc.line')
|
||||
text := maybe_color(term.bright_green, '$mtc.text')
|
||||
line := maybe_color(term.bright_yellow, '${mtc.line}')
|
||||
text := maybe_color(term.bright_green, '${mtc.text}')
|
||||
if verbose || format {
|
||||
println('$path\n$line : [ $text ]\n')
|
||||
println('${path}\n${line} : [ ${text} ]\n')
|
||||
} else {
|
||||
println('$path:$line: $text')
|
||||
println('${path}:${line}: ${text}')
|
||||
}
|
||||
}
|
||||
|
@ -177,6 +177,6 @@ fn resolve_module(path string) !string {
|
||||
} else if os.is_dir(os.join_path(vlib_dir, path)) {
|
||||
return os.join_path(vlib_dir, path)
|
||||
} else {
|
||||
return error('Path: $path not found')
|
||||
return error('Path: ${path} not found')
|
||||
}
|
||||
}
|
||||
|
@ -13,5 +13,5 @@ fn wipe_path(cpath string, label string) {
|
||||
os.rmdir_all(cpath) or {}
|
||||
}
|
||||
os.mkdir_all(cpath) or {}
|
||||
println('$label folder $cpath was wiped.')
|
||||
println('${label} folder ${cpath} was wiped.')
|
||||
}
|
||||
|
@ -149,7 +149,7 @@ fn main() {
|
||||
all_commands << external_tools
|
||||
all_commands << other_commands
|
||||
all_commands.sort()
|
||||
eprintln(util.new_suggestion(command, all_commands).say('v: unknown command `$command`'))
|
||||
eprintln(util.new_suggestion(command, all_commands).say('v: unknown command `${command}`'))
|
||||
eprintln('Run ${term.highlight_command('v help')} for usage.')
|
||||
exit(1)
|
||||
}
|
||||
|
116
doc/docs.md
116
doc/docs.md
@ -416,9 +416,9 @@ In this way, their values can be swapped without an intermediary variable.
|
||||
```v
|
||||
mut a := 0
|
||||
mut b := 1
|
||||
println('$a, $b') // 0, 1
|
||||
println('${a}, ${b}') // 0, 1
|
||||
a, b = b, a
|
||||
println('$a, $b') // 1, 0
|
||||
println('${a}, ${b}') // 1, 0
|
||||
```
|
||||
|
||||
### Declaration errors
|
||||
@ -605,7 +605,7 @@ converted to a string and embedded into the literal:
|
||||
|
||||
```v
|
||||
name := 'Bob'
|
||||
println('Hello, $name!') // Hello, Bob!
|
||||
println('Hello, ${name}!') // Hello, Bob!
|
||||
```
|
||||
|
||||
It also works with fields: `'age = $user.age'`. If you need more complex expressions, use `${}`:
|
||||
@ -697,7 +697,7 @@ or use string interpolation (preferred):
|
||||
|
||||
```v
|
||||
age := 12
|
||||
println('age = $age')
|
||||
println('age = ${age}')
|
||||
```
|
||||
|
||||
See all methods of [string](https://modules.vlang.io/index.html#string)
|
||||
@ -1334,7 +1334,7 @@ m := {
|
||||
'abc': 'def'
|
||||
}
|
||||
if v := m['abc'] {
|
||||
println('the map value for that key is: $v')
|
||||
println('the map value for that key is: ${v}')
|
||||
}
|
||||
```
|
||||
|
||||
@ -1380,7 +1380,7 @@ import os
|
||||
fn main() {
|
||||
// read text from stdin
|
||||
name := os.input('Enter your name: ')
|
||||
println('Hello, $name!')
|
||||
println('Hello, ${name}!')
|
||||
}
|
||||
```
|
||||
This program can use any public definitions from the `os` module, such
|
||||
@ -1404,7 +1404,7 @@ import os { input }
|
||||
fn main() {
|
||||
// read text from stdin
|
||||
name := input('Enter your name: ')
|
||||
println('Hello, $name!')
|
||||
println('Hello, ${name}!')
|
||||
}
|
||||
```
|
||||
Note: This will import the module as well. Also, this is not allowed for
|
||||
@ -1416,7 +1416,7 @@ You can import several specific symbols at once:
|
||||
import os { input, user_os }
|
||||
|
||||
name := input('Enter your name: ')
|
||||
println('Name: $name')
|
||||
println('Name: ${name}')
|
||||
os := user_os()
|
||||
println('Your OS is ${os}.')
|
||||
```
|
||||
@ -1457,7 +1457,7 @@ fn main() {
|
||||
day: 25
|
||||
}
|
||||
println(time.new_time(my_time).utc_string())
|
||||
println('Century: $my_time.century()')
|
||||
println('Century: ${my_time.century()}')
|
||||
}
|
||||
```
|
||||
|
||||
@ -1469,11 +1469,11 @@ fn main() {
|
||||
a := 10
|
||||
b := 20
|
||||
if a < b {
|
||||
println('$a < $b')
|
||||
println('${a} < ${b}')
|
||||
} else if a > b {
|
||||
println('$a > $b')
|
||||
println('${a} > ${b}')
|
||||
} else {
|
||||
println('$a == $b')
|
||||
println('${a} == ${b}')
|
||||
}
|
||||
```
|
||||
|
||||
@ -1739,7 +1739,7 @@ for num in numbers {
|
||||
}
|
||||
names := ['Sam', 'Peter']
|
||||
for i, name in names {
|
||||
println('$i) $name')
|
||||
println('${i}) ${name}')
|
||||
// Output: 0) Sam
|
||||
// 1) Peter
|
||||
}
|
||||
@ -1807,7 +1807,7 @@ m := {
|
||||
'two': 2
|
||||
}
|
||||
for key, value in m {
|
||||
println('$key -> $value')
|
||||
println('${key} -> ${value}')
|
||||
// Output: one -> 1
|
||||
// two -> 2
|
||||
}
|
||||
@ -1971,7 +1971,7 @@ fn write_log(s State) !int {
|
||||
// the file will be closed after the `error()` function
|
||||
// has returned - so the error message will still report
|
||||
// it as open
|
||||
return error('nothing written; file open: $f.is_opened')
|
||||
return error('nothing written; file open: ${f.is_opened}')
|
||||
}
|
||||
// the file will be closed here, too
|
||||
return 0
|
||||
@ -1979,10 +1979,10 @@ fn write_log(s State) !int {
|
||||
|
||||
fn main() {
|
||||
n := write_log(.return_error) or {
|
||||
println('Error: $err')
|
||||
println('Error: ${err}')
|
||||
0
|
||||
}
|
||||
println('$n bytes written')
|
||||
println('${n} bytes written')
|
||||
}
|
||||
```
|
||||
|
||||
@ -2444,7 +2444,7 @@ clr2 := Rgba32{
|
||||
|
||||
sz := sizeof(Rgba32)
|
||||
unsafe {
|
||||
println('Size: ${sz}B,clr1.b: $clr1.b,clr2.b: $clr2.b')
|
||||
println('Size: ${sz}B,clr1.b: ${clr1.b},clr2.b: ${clr2.b}')
|
||||
}
|
||||
```
|
||||
|
||||
@ -2859,7 +2859,7 @@ struct Color {
|
||||
}
|
||||
|
||||
pub fn (c Color) str() string {
|
||||
return '{$c.r, $c.g, $c.b}'
|
||||
return '{${c.r}, ${c.g}, ${c.b}}'
|
||||
}
|
||||
|
||||
red := Color{
|
||||
@ -3024,7 +3024,7 @@ enum Grocery {
|
||||
g1 := int(Grocery.apple)
|
||||
g2 := int(Grocery.orange)
|
||||
g3 := int(Grocery.pear)
|
||||
println('Grocery IDs: $g1, $g2, $g3')
|
||||
println('Grocery IDs: ${g1}, ${g2}, ${g3}')
|
||||
```
|
||||
|
||||
Output: `Grocery IDs: 0, 5, 6`.
|
||||
@ -3177,7 +3177,7 @@ fn main() {
|
||||
arr << dog
|
||||
arr << cat
|
||||
for item in arr {
|
||||
println('a $item.breed says: $item.speak()')
|
||||
println('a ${item.breed} says: ${item.speak()}')
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -3240,9 +3240,9 @@ interface Something {}
|
||||
|
||||
fn announce(s Something) {
|
||||
if s is Dog {
|
||||
println('a $s.breed dog') // `s` is automatically cast to `Dog` (smart cast)
|
||||
println('a ${s.breed} dog') // `s` is automatically cast to `Dog` (smart cast)
|
||||
} else if s is Cat {
|
||||
println('a cat speaks $s.speak()')
|
||||
println('a cat speaks ${s.speak()}')
|
||||
} else {
|
||||
println('something else')
|
||||
}
|
||||
@ -3566,7 +3566,7 @@ fn (r Repo) find_user_by_id(id int) !User {
|
||||
return user
|
||||
}
|
||||
}
|
||||
return error('User $id not found')
|
||||
return error('User ${id} not found')
|
||||
}
|
||||
|
||||
// A version of the function using an optional
|
||||
@ -3709,7 +3709,7 @@ struct PathError {
|
||||
}
|
||||
|
||||
fn (err PathError) msg() string {
|
||||
return 'Failed to open path: $err.path'
|
||||
return 'Failed to open path: ${err.path}'
|
||||
}
|
||||
|
||||
fn try_open(path string) ? {
|
||||
@ -3750,7 +3750,7 @@ fn new_repo<T>(db DB) Repo<T> {
|
||||
// This is a generic function. V will generate it for every type it's used with.
|
||||
fn (r Repo<T>) find_by_id(id int) ?T {
|
||||
table_name := T.name // in this example getting the name of the type gives us the table name
|
||||
return r.db.query_one<T>('select * from $table_name where id = ?', id)
|
||||
return r.db.query_one<T>('select * from ${table_name} where id = ?', id)
|
||||
}
|
||||
|
||||
db := new_db()
|
||||
@ -3856,7 +3856,7 @@ fn main() {
|
||||
g := spawn get_hypot(54.06, 2.08) // spawn thread and get handle to it
|
||||
h1 := get_hypot(2.32, 16.74) // do some other calculation here
|
||||
h2 := g.wait() // get result from spawned thread
|
||||
println('Results: $h1, $h2') // prints `Results: 16.9, 54.1`
|
||||
println('Results: ${h1}, ${h2}') // prints `Results: 16.9, 54.1`
|
||||
}
|
||||
```
|
||||
|
||||
@ -3867,9 +3867,9 @@ using an array of threads.
|
||||
import time
|
||||
|
||||
fn task(id int, duration int) {
|
||||
println('task $id begin')
|
||||
println('task ${id} begin')
|
||||
time.sleep(duration * time.millisecond)
|
||||
println('task $id end')
|
||||
println('task ${id} end')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
@ -3906,7 +3906,7 @@ fn main() {
|
||||
}
|
||||
// Join all tasks
|
||||
r := threads.wait()
|
||||
println('All jobs finished: $r')
|
||||
println('All jobs finished: ${r}')
|
||||
}
|
||||
|
||||
// Output: All jobs finished: [1, 4, 9, 16, 25, 36, 49, 64, 81]
|
||||
@ -4008,16 +4008,16 @@ fn main() {
|
||||
select {
|
||||
a := <-ch {
|
||||
// do something with `a`
|
||||
eprintln('> a: $a')
|
||||
eprintln('> a: ${a}')
|
||||
}
|
||||
b = <-ch2 {
|
||||
// do something with predeclared variable `b`
|
||||
eprintln('> b: $b')
|
||||
eprintln('> b: ${b}')
|
||||
}
|
||||
ch3 <- c {
|
||||
// do something if `c` was sent
|
||||
time.sleep(5 * time.millisecond)
|
||||
eprintln('> c: $c was send on channel ch3')
|
||||
eprintln('> c: ${c} was send on channel ch3')
|
||||
}
|
||||
500 * time.millisecond {
|
||||
// do something if no channel has become ready within 0.5s
|
||||
@ -4141,7 +4141,7 @@ struct User {
|
||||
|
||||
data := '{ "name": "Frodo", "lastName": "Baggins", "age": 25 }'
|
||||
user := json.decode(User, data) or {
|
||||
eprintln('Failed to decode json, error: $err')
|
||||
eprintln('Failed to decode json, error: ${err}')
|
||||
return
|
||||
}
|
||||
println(user.name)
|
||||
@ -4212,7 +4212,7 @@ strings that interpolate variables, etc.
|
||||
```v
|
||||
fn test_assertion_with_extra_message_failure() {
|
||||
for i in 0 .. 100 {
|
||||
assert i * 2 - 45 < 75 + 10, 'assertion failed for i: $i'
|
||||
assert i * 2 - 45 < 75 + 10, 'assertion failed for i: ${i}'
|
||||
}
|
||||
}
|
||||
```
|
||||
@ -4388,8 +4388,8 @@ fn draw_scene() {
|
||||
// ...
|
||||
name1 := 'abc'
|
||||
name2 := 'def ghi'
|
||||
draw_text('hello $name1', 10, 10)
|
||||
draw_text('hello $name2', 100, 10)
|
||||
draw_text('hello ${name1}', 10, 10)
|
||||
draw_text('hello ${name2}', 100, 10)
|
||||
draw_text(strings.repeat(`X`, 10000), 10, 50)
|
||||
// ...
|
||||
}
|
||||
@ -4452,7 +4452,7 @@ struct RefStruct {
|
||||
|
||||
fn main() {
|
||||
q, w := f()
|
||||
println('q: $q.r.n, w: $w.n')
|
||||
println('q: ${q.r.n}, w: ${w.n}')
|
||||
}
|
||||
|
||||
fn f() (RefStruct, &MyStruct) {
|
||||
@ -4469,7 +4469,7 @@ fn f() (RefStruct, &MyStruct) {
|
||||
r: &b
|
||||
}
|
||||
x := a.n + c.n
|
||||
println('x: $x')
|
||||
println('x: ${x}')
|
||||
return e, &c
|
||||
}
|
||||
```
|
||||
@ -4494,7 +4494,7 @@ fn main() {
|
||||
n: 13
|
||||
}
|
||||
x := q.f(&w) // references of `q` and `w` are passed
|
||||
println('q: $q\nx: $x')
|
||||
println('q: ${q}\nx: ${x}')
|
||||
}
|
||||
|
||||
fn (mut a MyStruct) f(b &MyStruct) int {
|
||||
@ -4537,7 +4537,7 @@ fn main() {
|
||||
r: &m
|
||||
}
|
||||
r.g()
|
||||
println('r: $r')
|
||||
println('r: ${r}')
|
||||
}
|
||||
|
||||
fn (mut r RefStruct) g() {
|
||||
@ -4630,7 +4630,7 @@ fn use_stack() {
|
||||
x := 7.5
|
||||
y := 3.25
|
||||
z := x + y
|
||||
println('$x $y $z')
|
||||
println('${x} ${y} ${z}')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
@ -4640,7 +4640,7 @@ fn main() {
|
||||
}
|
||||
r.g()
|
||||
use_stack() // to erase invalid stack contents
|
||||
println('r: $r')
|
||||
println('r: ${r}')
|
||||
}
|
||||
|
||||
fn (mut r RefStruct) g() {
|
||||
@ -4711,20 +4711,20 @@ sql db {
|
||||
nr_customers := sql db {
|
||||
select count from Customer
|
||||
}
|
||||
println('number of all customers: $nr_customers')
|
||||
println('number of all customers: ${nr_customers}')
|
||||
// V syntax can be used to build queries
|
||||
uk_customers := sql db {
|
||||
select from Customer where country == 'uk' && nr_orders > 0
|
||||
}
|
||||
println(uk_customers.len)
|
||||
for customer in uk_customers {
|
||||
println('$customer.id - $customer.name')
|
||||
println('${customer.id} - ${customer.name}')
|
||||
}
|
||||
// by adding `limit 1` we tell V that there will be only one object
|
||||
customer := sql db {
|
||||
select from Customer where id == 1 limit 1
|
||||
}
|
||||
println('$customer.id - $customer.name')
|
||||
println('${customer.id} - ${customer.name}')
|
||||
// insert a new customer
|
||||
new_customer := Customer{
|
||||
name: 'Bob'
|
||||
@ -5198,7 +5198,7 @@ Another example, is if you want to embed the version/name from v.mod *inside* yo
|
||||
```v ignore
|
||||
import v.vmod
|
||||
vm := vmod.decode( @VMOD_FILE ) or { panic(err) }
|
||||
eprintln('$vm.name $vm.version\n $vm.description')
|
||||
eprintln('${vm.name} ${vm.version}\n ${vm.description}')
|
||||
```
|
||||
|
||||
### Compile-time reflection
|
||||
@ -5215,7 +5215,7 @@ struct User {
|
||||
fn main() {
|
||||
$for field in User.fields {
|
||||
$if field.typ is string {
|
||||
println('$field.name is of type string')
|
||||
println('${field.name} is of type string')
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5243,7 +5243,7 @@ fn main() {
|
||||
}
|
||||
// Usage as expression
|
||||
os := $if windows { 'Windows' } $else { 'UNIX' }
|
||||
println('Using $os')
|
||||
println('Using ${os}')
|
||||
// $else-$if branches
|
||||
$if tinyc {
|
||||
println('tinyc')
|
||||
@ -5578,7 +5578,7 @@ struct Vec {
|
||||
}
|
||||
|
||||
fn (a Vec) str() string {
|
||||
return '{$a.x, $a.y}'
|
||||
return '{${a.x}, ${a.y}}'
|
||||
}
|
||||
|
||||
fn (a Vec) + (b Vec) Vec {
|
||||
@ -5724,9 +5724,9 @@ fn main() {
|
||||
}
|
||||
races_won_by_change := t.wait()
|
||||
atom_new := C.atomic_load_u32(&atom)
|
||||
println('atom: $atom_new, #exchanges: ${races_won_by_main + races_won_by_change}')
|
||||
println('atom: ${atom_new}, #exchanges: ${races_won_by_main + races_won_by_change}')
|
||||
// prints `atom: 31, #exchanges: 10000000`)
|
||||
println('races won by\n- `main()`: $races_won_by_main\n- `change()`: $races_won_by_change')
|
||||
println('races won by\n- `main()`: ${races_won_by_main}\n- `change()`: ${races_won_by_change}')
|
||||
}
|
||||
```
|
||||
|
||||
@ -5932,7 +5932,7 @@ fn main() {
|
||||
C.sqlite3_step(stmt)
|
||||
nr_users := C.sqlite3_column_int(stmt, 0)
|
||||
C.sqlite3_finalize(stmt)
|
||||
println('There are $nr_users users in the database.')
|
||||
println('There are ${nr_users} users in the database.')
|
||||
//
|
||||
error_msg := &char(0)
|
||||
query_all_users := 'select * from users'
|
||||
@ -6219,9 +6219,9 @@ asm amd64 {
|
||||
; r (a) as a // input
|
||||
r (b) as b
|
||||
}
|
||||
println('a: $a') // 100
|
||||
println('b: $b') // 20
|
||||
println('c: $c') // 120
|
||||
println('a: ${a}') // 100
|
||||
println('b: ${b}') // 20
|
||||
println('c: ${c}') // 120
|
||||
```
|
||||
|
||||
For more examples, see [github.com/vlang/v/tree/master/vlib/v/tests/assembly/asm_test.amd64.v](https://github.com/vlang/v/tree/master/vlib/v/tests/assembly/asm_test.amd64.v)
|
||||
@ -6289,7 +6289,7 @@ An example `deploy.vsh`:
|
||||
|
||||
// print command then execute it
|
||||
fn sh(cmd string) {
|
||||
println('❯ $cmd')
|
||||
println('❯ ${cmd}')
|
||||
print(execute_or_exit(cmd).output)
|
||||
}
|
||||
|
||||
@ -6314,7 +6314,7 @@ sh('ls')
|
||||
// for file in files {
|
||||
// if file.ends_with('.v') {
|
||||
// mv(file, 'build/') or {
|
||||
// println('err: $err')
|
||||
// println('err: ${err}')
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
|
@ -492,7 +492,7 @@ fn (mut app App) ai_move() {
|
||||
bestprediction = predictions[move_idx]
|
||||
}
|
||||
}
|
||||
eprintln('Simulation time: ${think_time:4}ms | best $bestprediction')
|
||||
eprintln('Simulation time: ${think_time:4}ms | best ${bestprediction}')
|
||||
app.move(bestprediction.move)
|
||||
}
|
||||
|
||||
@ -613,8 +613,8 @@ fn (app &App) draw() {
|
||||
app.gg.draw_text(ww / 2, (m * 8 / 10) + ypad, msg2, app.label_format(.score_end))
|
||||
}
|
||||
// Draw at the end, so that it's on top of the victory / game over overlays
|
||||
app.gg.draw_text(labelx, labely, 'Points: $app.board.points', app.label_format(.points))
|
||||
app.gg.draw_text(ww - labelx, labely, 'Moves: $app.moves', app.label_format(.moves))
|
||||
app.gg.draw_text(labelx, labely, 'Points: ${app.board.points}', app.label_format(.points))
|
||||
app.gg.draw_text(ww - labelx, labely, 'Moves: ${app.moves}', app.label_format(.moves))
|
||||
}
|
||||
|
||||
fn (app &App) draw_tiles() {
|
||||
@ -654,13 +654,13 @@ fn (app &App) draw_tiles() {
|
||||
app.gg.draw_text(xpos, ypos, '${1 << tidx}', fmt)
|
||||
}
|
||||
.log {
|
||||
app.gg.draw_text(xpos, ypos, '$tidx', fmt)
|
||||
app.gg.draw_text(xpos, ypos, '${tidx}', fmt)
|
||||
}
|
||||
.exponent {
|
||||
app.gg.draw_text(xpos, ypos, '2', fmt)
|
||||
fs2 := int(f32(fmt.size) * 0.67)
|
||||
app.gg.draw_text(xpos + app.ui.tile_size / 10, ypos - app.ui.tile_size / 8,
|
||||
'$tidx', gx.TextCfg{
|
||||
'${tidx}', gx.TextCfg{
|
||||
...fmt
|
||||
size: fs2
|
||||
align: gx.HorizontalAlign.left
|
||||
|
@ -12,7 +12,7 @@ fn main() {
|
||||
r (b)
|
||||
}
|
||||
}
|
||||
println('a: $a') // 100
|
||||
println('b: $b') // 20
|
||||
println('c: $c') // 120
|
||||
println('a: ${a}') // 100
|
||||
println('b: ${b}') // 20
|
||||
println('c: ${c}') // 120
|
||||
}
|
||||
|
@ -130,16 +130,16 @@ fn main() {
|
||||
for i in vals {
|
||||
tree = tree.insert(i)
|
||||
}
|
||||
println('[1] after insertion tree size is $tree.size()') // 11
|
||||
println('[1] after insertion tree size is ${tree.size()}') // 11
|
||||
del_vals := [-0.3, 0.0, 0.3, 0.6, 1.0, 1.5]
|
||||
for i in del_vals {
|
||||
tree = tree.delete(i)
|
||||
}
|
||||
print('[2] after deletion tree size is $tree.size(), ') // 7
|
||||
print('[2] after deletion tree size is ${tree.size()}, ') // 7
|
||||
print('and these elements were deleted: ') // 0.0 0.3 0.6 1.0
|
||||
for i in vals {
|
||||
if !tree.search(i) {
|
||||
print('$i ')
|
||||
print('${i} ')
|
||||
}
|
||||
}
|
||||
println('')
|
||||
|
@ -61,6 +61,6 @@ fn main() {
|
||||
}
|
||||
|
||||
// print the state of the interpreter at the end
|
||||
println('Address: $address')
|
||||
println('Memory: $memory')
|
||||
println('Address: ${address}')
|
||||
println('Memory: ${memory}')
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ fn main() {
|
||||
mut r := io.new_buffered_reader(reader: conn)
|
||||
for {
|
||||
l := r.read_line() or { break }
|
||||
println('$l')
|
||||
println('${l}')
|
||||
// Make it nice and obvious that we are doing this line by line
|
||||
time.sleep(100 * time.millisecond)
|
||||
}
|
||||
|
@ -52,9 +52,9 @@ fn C.wkhtmltopdf_get_output(converter &C.wkhtmltopdf_converter, data &&char) int
|
||||
fn main() {
|
||||
// init
|
||||
init := C.wkhtmltopdf_init(0)
|
||||
println('wkhtmltopdf_init: $init')
|
||||
println('wkhtmltopdf_init: ${init}')
|
||||
version := unsafe { cstring_to_vstring(&char(C.wkhtmltopdf_version())) }
|
||||
println('wkhtmltopdf_version: $version')
|
||||
println('wkhtmltopdf_version: ${version}')
|
||||
global_settings := C.wkhtmltopdf_create_global_settings()
|
||||
println('wkhtmltopdf_create_global_settings: ${voidptr(global_settings)}')
|
||||
object_settings := C.wkhtmltopdf_create_object_settings()
|
||||
@ -63,24 +63,24 @@ fn main() {
|
||||
println('wkhtmltopdf_create_converter: ${voidptr(converter)}')
|
||||
// convert
|
||||
mut result := C.wkhtmltopdf_set_object_setting(object_settings, c'page', c'http://www.google.com.br')
|
||||
println('wkhtmltopdf_set_object_setting: $result [page = http://www.google.com.br]')
|
||||
println('wkhtmltopdf_set_object_setting: ${result} [page = http://www.google.com.br]')
|
||||
C.wkhtmltopdf_add_object(converter, object_settings, 0)
|
||||
println('wkhtmltopdf_add_object')
|
||||
result = C.wkhtmltopdf_convert(converter)
|
||||
println('wkhtmltopdf_convert: $result')
|
||||
println('wkhtmltopdf_convert: ${result}')
|
||||
error_code := C.wkhtmltopdf_http_error_code(converter)
|
||||
println('wkhtmltopdf_http_error_code: $error_code')
|
||||
println('wkhtmltopdf_http_error_code: ${error_code}')
|
||||
if result {
|
||||
pdata := &char(0)
|
||||
ppdata := &pdata
|
||||
size := C.wkhtmltopdf_get_output(converter, voidptr(ppdata))
|
||||
println('wkhtmltopdf_get_output: $size bytes')
|
||||
println('wkhtmltopdf_get_output: ${size} bytes')
|
||||
mut file := os.open_file('./google.pdf', 'w+', 0o666) or {
|
||||
println('ERR: $err')
|
||||
println('ERR: ${err}')
|
||||
return
|
||||
}
|
||||
wrote := unsafe { file.write_ptr(pdata, size) }
|
||||
println('write_bytes: $wrote [./google.pdf]')
|
||||
println('write_bytes: ${wrote} [./google.pdf]')
|
||||
file.flush()
|
||||
file.close()
|
||||
}
|
||||
@ -92,5 +92,5 @@ fn main() {
|
||||
C.wkhtmltopdf_destroy_global_settings(global_settings)
|
||||
println('wkhtmltopdf_destroy_global_settings')
|
||||
deinit := C.wkhtmltopdf_deinit()
|
||||
println('wkhtmltopdf_deinit: $deinit')
|
||||
println('wkhtmltopdf_deinit: ${deinit}')
|
||||
}
|
||||
|
@ -42,19 +42,21 @@ fn main() {
|
||||
}
|
||||
|
||||
fn greet_func(cmd Command) ! {
|
||||
language := cmd.flags.get_string('language') or { panic('Failed to get `language` flag: $err') }
|
||||
times := cmd.flags.get_int('times') or { panic('Failed to get `times` flag: $err') }
|
||||
language := cmd.flags.get_string('language') or {
|
||||
panic('Failed to get `language` flag: ${err}')
|
||||
}
|
||||
times := cmd.flags.get_int('times') or { panic('Failed to get `times` flag: ${err}') }
|
||||
name := cmd.args[0]
|
||||
for _ in 0 .. times {
|
||||
match language {
|
||||
'english', 'en' {
|
||||
println('Welcome $name')
|
||||
println('Welcome ${name}')
|
||||
}
|
||||
'german', 'de' {
|
||||
println('Willkommen $name')
|
||||
println('Willkommen ${name}')
|
||||
}
|
||||
'dutch', 'nl' {
|
||||
println('Welkom $name')
|
||||
println('Welkom ${name}')
|
||||
}
|
||||
else {
|
||||
println('Unsupported language')
|
||||
@ -63,9 +65,9 @@ fn greet_func(cmd Command) ! {
|
||||
}
|
||||
}
|
||||
}
|
||||
fun := cmd.flags.get_strings('fun') or { panic('Failed to get `fun` flag: $err') }
|
||||
fun := cmd.flags.get_strings('fun') or { panic('Failed to get `fun` flag: ${err}') }
|
||||
for f in fun {
|
||||
println('fun: $f')
|
||||
println('fun: ${f}')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -13,25 +13,25 @@ fn (mut app App) method_three(s string) string {
|
||||
fn main() {
|
||||
$for method in App.methods {
|
||||
$if method.typ is fn (string) string {
|
||||
println('$method.name IS `fn(string) string`')
|
||||
println('${method.name} IS `fn(string) string`')
|
||||
} $else {
|
||||
println('$method.name is NOT `fn(string) string`')
|
||||
println('${method.name} is NOT `fn(string) string`')
|
||||
}
|
||||
$if method.return_type !is int {
|
||||
println('$method.name does NOT return `int`')
|
||||
println('${method.name} does NOT return `int`')
|
||||
} $else {
|
||||
println('$method.name DOES return `int`')
|
||||
println('${method.name} DOES return `int`')
|
||||
}
|
||||
$if method.args[0].typ !is string {
|
||||
println("$method.name's first arg is NOT `string`")
|
||||
println("${method.name}'s first arg is NOT `string`")
|
||||
} $else {
|
||||
println("$method.name's first arg IS `string`")
|
||||
println("${method.name}'s first arg IS `string`")
|
||||
}
|
||||
// TODO: Double inversion, should this even be allowed?
|
||||
$if method.typ is fn () {
|
||||
println('$method.name IS a void method')
|
||||
println('${method.name} IS a void method')
|
||||
} $else {
|
||||
println('$method.name is NOT a void method')
|
||||
println('${method.name} is NOT a void method')
|
||||
}
|
||||
println('')
|
||||
}
|
||||
|
@ -2,9 +2,9 @@ import time
|
||||
|
||||
// Simulate expensive computing using sleep function
|
||||
fn expensive_computing(id int, duration int) {
|
||||
println('Executing expensive computing task ($id)...')
|
||||
println('Executing expensive computing task (${id})...')
|
||||
time.sleep(duration * time.millisecond)
|
||||
println('Finish task $id on $duration ms')
|
||||
println('Finish task ${id} on ${duration} ms')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
@ -9,5 +9,5 @@ fn main() {
|
||||
}
|
||||
// Join all tasks
|
||||
r := threads.wait()
|
||||
println('All jobs finished: $r')
|
||||
println('All jobs finished: ${r}')
|
||||
}
|
||||
|
@ -9,13 +9,13 @@ fn main() {
|
||||
db.exec("insert into users (name) values ('Kate')")
|
||||
|
||||
nr_users := db.q_int('select count(*) from users')
|
||||
println('nr users = $nr_users')
|
||||
println('nr users = ${nr_users}')
|
||||
|
||||
name := db.q_string('select name from users where id = 1')
|
||||
assert name == 'Sam'
|
||||
|
||||
users, code := db.exec('select * from users')
|
||||
println('SQL Result code: $code')
|
||||
println('SQL Result code: ${code}')
|
||||
for row in users {
|
||||
println(row.vals)
|
||||
}
|
||||
|
@ -15,5 +15,5 @@ fn main() {
|
||||
f := FNAdder(dl.sym_opt(handle, 'add_1')!)
|
||||
eprintln('f: ${ptr_str(f)}')
|
||||
res := f(1, 2)
|
||||
eprintln('res: $res')
|
||||
eprintln('res: ${res}')
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ fn test_can_compile_and_use_library_with_skip_unused() {
|
||||
}
|
||||
|
||||
fn v_compile(vopts string) os.Result {
|
||||
cmd := '${os.quoted_path(vexe)} -showcc $vopts'
|
||||
cmd := '${os.quoted_path(vexe)} -showcc ${vopts}'
|
||||
// dump(cmd)
|
||||
res := os.execute_or_exit(cmd)
|
||||
// dump(res)
|
||||
|
@ -3,7 +3,7 @@ import net.http
|
||||
|
||||
fn main() {
|
||||
resp := http.get('https://vlang.io/utc_now') or {
|
||||
eprintln('Failed to fetch data from the server. Error: $err')
|
||||
eprintln('Failed to fetch data from the server. Error: ${err}')
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -254,10 +254,10 @@ fn (app &App) display() {
|
||||
app.bird)
|
||||
}
|
||||
}
|
||||
app.gg.draw_text_def(10, 25, 'Score: $app.score')
|
||||
app.gg.draw_text_def(10, 50, 'Max Score: $app.max_score')
|
||||
app.gg.draw_text_def(10, 75, 'Generation: $app.generation')
|
||||
app.gg.draw_text_def(10, 100, 'Alive: $app.alives / $app.nv.population')
|
||||
app.gg.draw_text_def(10, 25, 'Score: ${app.score}')
|
||||
app.gg.draw_text_def(10, 50, 'Max Score: ${app.max_score}')
|
||||
app.gg.draw_text_def(10, 75, 'Generation: ${app.generation}')
|
||||
app.gg.draw_text_def(10, 100, 'Alive: ${app.alives} / ${app.nv.population}')
|
||||
}
|
||||
|
||||
fn (app &App) draw() {
|
||||
|
@ -38,7 +38,7 @@ fn main() {
|
||||
}
|
||||
|
||||
rnd := rand.f32()
|
||||
url := 'https://api.caiyunapp.com/v2.5/96Ly7wgKGq6FhllM/116.391912,40.010711/weather.jsonp?hourlysteps=120&random=$rnd'
|
||||
url := 'https://api.caiyunapp.com/v2.5/96Ly7wgKGq6FhllM/116.391912,40.010711/weather.jsonp?hourlysteps=120&random=${rnd}'
|
||||
// println(url)
|
||||
|
||||
resp := http.fetch(http.FetchConfig{ ...config, url: url }) or {
|
||||
|
@ -52,11 +52,11 @@ fn on_frame(mut app App) {
|
||||
start := math.tau * app.mouse.y / (win_width * app.gg.scale)
|
||||
end := math.tau * app.mouse.x / (win_width * app.gg.scale)
|
||||
|
||||
segs := if app.sel == .segs { '[$app.segs]' } else { '$app.segs' }
|
||||
app.gg.draw_text_def(10, 10, 'Segments: $segs')
|
||||
segs := if app.sel == .segs { '[${app.segs}]' } else { '${app.segs}' }
|
||||
app.gg.draw_text_def(10, 10, 'Segments: ${segs}')
|
||||
app.gg.draw_text_def(250, 10, 'Drawing Angles (radians)')
|
||||
app.gg.draw_text_def(200, 26, 'Start: $start°')
|
||||
app.gg.draw_text_def(350, 26, 'End: $end°')
|
||||
app.gg.draw_text_def(200, 26, 'Start: ${start}°')
|
||||
app.gg.draw_text_def(350, 26, 'End: ${end}°')
|
||||
mut x, mut y := 0, -80
|
||||
|
||||
y += 150
|
||||
|
@ -7,7 +7,7 @@ import sokol.sapp
|
||||
|
||||
const (
|
||||
max_files = 12
|
||||
text = 'Drag&Drop here max $max_files files.'
|
||||
text = 'Drag&Drop here max ${max_files} files.'
|
||||
text_size = 16
|
||||
)
|
||||
|
||||
@ -62,7 +62,7 @@ fn frame(mut app App) {
|
||||
|
||||
mut y := 40
|
||||
for c, f in app.dropped_file_list {
|
||||
app.gg.draw_text(12, y, '[$c] $f', txt_conf)
|
||||
app.gg.draw_text(12, y, '[${c}] ${f}', txt_conf)
|
||||
y += text_size
|
||||
}
|
||||
|
||||
|
@ -191,7 +191,7 @@ fn graphics_keydown(code gg.KeyCode, mod gg.Modifier, mut state AppState) {
|
||||
// movement
|
||||
mut d_x, mut d_y := 0.0, 0.0
|
||||
if code == .enter {
|
||||
println('> ViewRect{$state.view.x_min, $state.view.x_max, $state.view.y_min, $state.view.y_max}')
|
||||
println('> ViewRect{${state.view.x_min}, ${state.view.x_max}, ${state.view.y_min}, ${state.view.y_max}}')
|
||||
}
|
||||
if state.gg.pressed_keys[int(gg.KeyCode.left)] {
|
||||
d_x -= s_x
|
||||
|
@ -77,7 +77,7 @@ fn frame(mut app App) {
|
||||
app.counter += i64(f64(count) / time.second)
|
||||
}
|
||||
|
||||
label := '$app.counter'
|
||||
label := '${app.counter}'
|
||||
label_width := (f64(label.len * text_cfg.size) / 4.0)
|
||||
label_height := (f64(1 * text_cfg.size) / 2.0)
|
||||
mut x := f32(size.width) * 0.5 - label_width
|
||||
|
@ -45,7 +45,7 @@ fn print_sol(dist []int) {
|
||||
n_vertex := dist.len
|
||||
print('\n Vertex Distance from Source')
|
||||
for i in 0 .. n_vertex {
|
||||
print('\n $i --> ${dist[i]}')
|
||||
print('\n ${i} --> ${dist[i]}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -154,7 +154,7 @@ fn main() {
|
||||
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
||||
// allways starting by node 0
|
||||
start_node := 0
|
||||
println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: $start_node)')
|
||||
println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: ${start_node})')
|
||||
bellman_ford(graph, start_node)
|
||||
}
|
||||
println('\n BYE -- OK')
|
||||
|
@ -7,9 +7,9 @@ fn main() {
|
||||
'E': ['B', 'F']
|
||||
'F': ['C', 'E']
|
||||
}
|
||||
println('Graph: $graph')
|
||||
println('Graph: ${graph}')
|
||||
path := breadth_first_search_path(graph, 'A', 'F')
|
||||
println('The shortest path from node A to node F is: $path')
|
||||
println('The shortest path from node A to node F is: ${path}')
|
||||
assert path == ['A', 'C', 'F']
|
||||
}
|
||||
|
||||
|
@ -10,9 +10,9 @@ fn main() {
|
||||
'E': ['B', 'F']
|
||||
'F': ['C', 'E']
|
||||
}
|
||||
println('Graph: $graph')
|
||||
println('Graph: ${graph}')
|
||||
path := breadth_first_search_path(graph, 'A', 'F')
|
||||
println('\n The shortest path from node A to node F is: $path.reverse()')
|
||||
println('\n The shortest path from node A to node F is: ${path.reverse()}')
|
||||
}
|
||||
|
||||
// Breadth-First Search (BFS) allows you to find the shortest distance between two nodes in the graph.
|
||||
@ -35,7 +35,7 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
|
||||
return path
|
||||
}
|
||||
// Expansion of node removed from queue
|
||||
print('\n Expansion of node $node (true/false): ${graph[node]}')
|
||||
print('\n Expansion of node ${node} (true/false): ${graph[node]}')
|
||||
// take all nodes from the node
|
||||
for vertex in graph[node] { // println("\n ...${vertex}")
|
||||
// not explored yet
|
||||
@ -43,7 +43,7 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
|
||||
queue << vertex
|
||||
}
|
||||
}
|
||||
print('\n QUEUE: $queue (only not visited) \n Visited: $visited')
|
||||
print('\n QUEUE: ${queue} (only not visited) \n Visited: ${visited}')
|
||||
}
|
||||
}
|
||||
path = ['Path not found, problem in the Graph, start or end nodes! ']
|
||||
@ -70,7 +70,7 @@ fn visited_init(a_graph map[string][]string) map[string]bool {
|
||||
|
||||
// Based in the current node that is final, search for its parent, already visited, up to the root or start node
|
||||
fn build_path_reverse(graph map[string][]string, start string, final string, visited map[string]bool) []string {
|
||||
print('\n\n Nodes visited (true) or no (false): $visited')
|
||||
print('\n\n Nodes visited (true) or no (false): ${visited}')
|
||||
array_of_nodes := graph.keys()
|
||||
mut current := final
|
||||
mut path := []string{}
|
||||
|
@ -25,9 +25,9 @@ fn main() {
|
||||
}
|
||||
// println('Graph: $graph')
|
||||
path_01 := depth_first_search_path(graph_01, 'A', 'F')
|
||||
println('\n Graph_01: a first path from node A to node F is: $path_01.reverse()')
|
||||
println('\n Graph_01: a first path from node A to node F is: ${path_01.reverse()}')
|
||||
path_02 := depth_first_search_path(graph_02, 'A', 'H')
|
||||
println('\n Graph_02: a first path from node A to node F is: $path_02.reverse()')
|
||||
println('\n Graph_02: a first path from node A to node F is: ${path_02.reverse()}')
|
||||
}
|
||||
|
||||
// Depth-First Search (BFS) allows you to find a path between two nodes in the graph.
|
||||
@ -51,7 +51,7 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
|
||||
return path
|
||||
}
|
||||
// Exploring of node removed from stack and add its relatives
|
||||
print('\n Exploring of node $node (true/false): ${graph[node]}')
|
||||
print('\n Exploring of node ${node} (true/false): ${graph[node]}')
|
||||
// graph[node].reverse() take a classical choice for DFS
|
||||
// at most os left in this case.
|
||||
// use vertex in graph[node] the choice is right
|
||||
@ -64,7 +64,7 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
|
||||
stack << vertex
|
||||
}
|
||||
}
|
||||
print('\n Stack: $stack (only not visited) \n Visited: $visited')
|
||||
print('\n Stack: ${stack} (only not visited) \n Visited: ${visited}')
|
||||
}
|
||||
}
|
||||
path = ['Path not found, problem in the Graph, start or end nodes! ']
|
||||
@ -84,7 +84,7 @@ fn visited_init(a_graph map[string][]string) map[string]bool {
|
||||
|
||||
// Based in the current node that is final, search for his parent, that is already visited, up to the root or start node
|
||||
fn build_path_reverse(graph map[string][]string, start string, final string, visited map[string]bool) []string {
|
||||
print('\n\n Nodes visited (true) or no (false): $visited')
|
||||
print('\n\n Nodes visited (true) or no (false): ${visited}')
|
||||
array_of_nodes := graph.keys()
|
||||
mut current := final
|
||||
mut path := []string{}
|
||||
|
@ -69,7 +69,7 @@ fn updating_priority<T>(mut prior_queue []T, search_data int, new_priority int)
|
||||
i++
|
||||
// all the list was examined
|
||||
if i >= lenght_pq {
|
||||
print('\n This data $search_data does exist ... PRIORITY QUEUE problem\n')
|
||||
print('\n This data ${search_data} does exist ... PRIORITY QUEUE problem\n')
|
||||
exit(1) // panic(s string)
|
||||
}
|
||||
} // end for
|
||||
@ -98,7 +98,7 @@ fn all_adjacents<T>(g [][]T, v int) []int {
|
||||
fn print_solution<T>(dist []T) {
|
||||
print('Vertex \tDistance from Source')
|
||||
for node in 0 .. (dist.len) {
|
||||
print('\n $node ==> \t ${dist[node]}')
|
||||
print('\n ${node} ==> \t ${dist[node]}')
|
||||
}
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ fn print_paths_dist<T>(path []T, dist []T) {
|
||||
print('\n Read the nodes from right to left (a path): \n')
|
||||
|
||||
for node in 1 .. (path.len) {
|
||||
print('\n $node ')
|
||||
print('\n ${node} ')
|
||||
mut i := node
|
||||
for path[i] != -1 {
|
||||
print(' <= ${path[i]} ')
|
||||
@ -231,7 +231,7 @@ fn main() {
|
||||
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
||||
// allways starting by node 0
|
||||
start_node := 0
|
||||
println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: $start_node)')
|
||||
println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: ${start_node})')
|
||||
dijkstra(graph, start_node)
|
||||
}
|
||||
|
||||
|
@ -100,13 +100,13 @@ fn print_solution(path []int, g [][]int) {
|
||||
mut sum := 0
|
||||
for node in 0 .. (path.len) {
|
||||
if path[node] == -1 {
|
||||
print('\n $node <== reference or start node')
|
||||
print('\n ${node} <== reference or start node')
|
||||
} else {
|
||||
print('\n $node <--> ${path[node]} \t${g[node][path[node]]}')
|
||||
print('\n ${node} <--> ${path[node]} \t${g[node][path[node]]}')
|
||||
sum += g[node][path[node]]
|
||||
}
|
||||
}
|
||||
print('\n Minimum Cost Spanning Tree: $sum\n\n')
|
||||
print('\n Minimum Cost Spanning Tree: ${sum}\n\n')
|
||||
}
|
||||
|
||||
// check structure from: https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
|
||||
|
@ -9,7 +9,7 @@
|
||||
// THE DFS RECURSIVE .... classical searchig for leaves nodes
|
||||
// the arguments are used in the function to avoid global variables....
|
||||
fn dfs_recursive(u string, mut visited map[string]bool, graph map[string][]string, mut top_sorting []string) {
|
||||
print(' Visiting: $u -> ')
|
||||
print(' Visiting: ${u} -> ')
|
||||
visited[u] = true
|
||||
|
||||
for v in graph[u] {
|
||||
@ -67,7 +67,7 @@ fn main() {
|
||||
|
||||
mut graph := map[string][]string{} // the graph: adjacency matrix
|
||||
for index, g_value in [graph_01, graph_02, graph_03] {
|
||||
println('Topological sorting for the graph $index using a DFS recursive')
|
||||
println('Topological sorting for the graph ${index} using a DFS recursive')
|
||||
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
||||
|
||||
// mut n_nodes := graph.len
|
||||
@ -82,7 +82,7 @@ fn main() {
|
||||
}
|
||||
}
|
||||
|
||||
print('\n A topological sorting of graph $index : ')
|
||||
print('\n A topological sorting of graph ${index} : ')
|
||||
// println(g_value)
|
||||
println(top_sorting.reverse())
|
||||
println('')
|
||||
|
@ -25,7 +25,7 @@ fn topog_sort_greedy(graph map[string][]string) []string {
|
||||
Maybe it seems the Kahn's algorithm
|
||||
*/
|
||||
mut v_degree := in_degree(graph) // return: map [string] int
|
||||
print('V Degree $v_degree')
|
||||
print('V Degree ${v_degree}')
|
||||
mut small_degree := min_degree(v_degree)
|
||||
mut new_graph := remove_node_from_graph(small_degree, graph)
|
||||
top_order << small_degree
|
||||
@ -33,7 +33,7 @@ fn topog_sort_greedy(graph map[string][]string) []string {
|
||||
|
||||
for (count < n_nodes) {
|
||||
v_degree = in_degree(new_graph) // return: map [string] int
|
||||
print('\nV Degree $v_degree')
|
||||
print('\nV Degree ${v_degree}')
|
||||
small_degree = min_degree(v_degree)
|
||||
new_graph = remove_node_from_graph(small_degree, new_graph)
|
||||
|
||||
|
@ -8,7 +8,7 @@ fn main() {
|
||||
}
|
||||
|
||||
fn move(n int, a string, b string) {
|
||||
println('Disc $n from $a to ${b}...')
|
||||
println('Disc ${n} from ${a} to ${b}...')
|
||||
}
|
||||
|
||||
fn hanoi(n int, a string, b string, c string) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
fn main() {
|
||||
for i in 0 .. 3 {
|
||||
println('Hello from V.js ($i)')
|
||||
println('Hello from V.js (${i})')
|
||||
}
|
||||
}
|
||||
|
@ -77,7 +77,7 @@ Create a service to request the benchmarks data by http
|
||||
Decode the info to `FrameworkBenchmarkResponse`
|
||||
```v ignore
|
||||
fn typescript_sqlite_memory() ?FrameworkBenchmarkResponse {
|
||||
url := 'http://localhost:3000/sqlite-memory/$benchmark_loop_length'
|
||||
url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
|
||||
res := http.get(url) or { panic(err) }
|
||||
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
|
||||
return framework_benchmark_response
|
||||
@ -89,7 +89,7 @@ Create a service to request the benchmarks data by http
|
||||
Decode the info to `FrameworkBenchmarkResponse`
|
||||
```v ignore
|
||||
fn typescript_sqlite_memory() ?FrameworkBenchmarkResponse {
|
||||
url := 'http://localhost:3000/sqlite-memory/$benchmark_loop_length'
|
||||
url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
|
||||
res := http.get(url) or { panic(err) }
|
||||
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
|
||||
return framework_benchmark_response
|
||||
|
@ -146,14 +146,14 @@ fn update_framework_benchmark_times() !FrameworkPlatform {
|
||||
}
|
||||
|
||||
fn typescript_sqlite_memory() !FrameworkBenchmarkResponse {
|
||||
url := 'http://localhost:3000/sqlite-memory/$benchmark_loop_length'
|
||||
url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
|
||||
res := http.get(url) or { panic(err) }
|
||||
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
|
||||
return framework_benchmark_response
|
||||
}
|
||||
|
||||
fn v_sqlite_memory() !FrameworkBenchmarkResponse {
|
||||
url := 'http://localhost:4000/sqlite-memory/$benchmark_loop_length'
|
||||
url := 'http://localhost:4000/sqlite-memory/${benchmark_loop_length}'
|
||||
res := http.get(url) or { panic(err) }
|
||||
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
|
||||
return framework_benchmark_response
|
||||
|
@ -14,16 +14,16 @@ fn main() {
|
||||
return
|
||||
}
|
||||
for user in users {
|
||||
println('$user.name: $user.age')
|
||||
println('${user.name}: ${user.age}')
|
||||
}
|
||||
println('')
|
||||
for i, user in users {
|
||||
println('$i) $user.name')
|
||||
println('${i}) ${user.name}')
|
||||
if !user.can_register() {
|
||||
println('Cannot register $user.name, they are too young')
|
||||
println('Cannot register ${user.name}, they are too young')
|
||||
} else {
|
||||
users[i].register()
|
||||
println('$user.name is registered')
|
||||
println('${user.name} is registered')
|
||||
}
|
||||
}
|
||||
// Let's encode users again just for fun
|
||||
|
@ -26,7 +26,7 @@ fn (l Lander) deorbit() {
|
||||
}
|
||||
|
||||
fn (l Lander) open_parachutes(n int) {
|
||||
println('opening $n parachutes')
|
||||
println('opening ${n} parachutes')
|
||||
}
|
||||
|
||||
fn wait() {
|
||||
|
@ -6,7 +6,7 @@ fn main() {
|
||||
// Make a new file called info.log in the current folder
|
||||
l.set_full_logpath('./info.log')
|
||||
l.log_to_console_too()
|
||||
println('Please check the file: $l.output_file_name after this example crashes.')
|
||||
println('Please check the file: ${l.output_file_name} after this example crashes.')
|
||||
|
||||
l.info('info')
|
||||
l.warn('warn')
|
||||
|
@ -39,7 +39,7 @@ mut:
|
||||
}
|
||||
|
||||
fn (app &MyApp) on_menu_item_click(item TrayMenuItem) {
|
||||
println('click $item.id')
|
||||
println('click ${item.id}')
|
||||
if item.id == 'quit' {
|
||||
C.tray_app_exit(app.tray_info)
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ fn expr_to_rev_pol(expr string) ![]string {
|
||||
stack.delete(stack.len - 1)
|
||||
}
|
||||
else {
|
||||
return error('err: invalid character `$op`')
|
||||
return error('err: invalid character `${op}`')
|
||||
}
|
||||
}
|
||||
pos++
|
||||
@ -115,7 +115,7 @@ fn main() {
|
||||
mut expr_count := 0
|
||||
for {
|
||||
expr_count++
|
||||
expr := os.input_opt('[$expr_count] ') or {
|
||||
expr := os.input_opt('[${expr_count}] ') or {
|
||||
println('')
|
||||
break
|
||||
}.trim_space()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user