1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

v.gen.js: add source map creation support (inline only, no src) (#10384)

This commit is contained in:
Andreas Heissenberger 2021-06-10 07:33:46 +02:00 committed by GitHub
parent 14519bbf5c
commit 96c8d147b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 1275 additions and 39 deletions

22
cmd/v/help/build-js.txt Normal file
View File

@ -0,0 +1,22 @@
Usage: v -b js [-options] ['run'] <target.v|target_directory> [run options]
This command compiles the given target, along with their dependencies, into an Javascript source file.
For more general build help, see also `v help build`.
# Interfacing the Javascript Backend code generation, passing options to it:
-prod
Do not create any JS Doc comments
-sourcemap
Create a source map for debugging
-sourcemap-inline
Embed the source map directly into the JavaScript source file
(currently default, external source map files not implemented)
-sourcemap-src-include
Include the orginal V source files into the generated source map
(default false, all files in the source map are currently referenced by their absolute system file path)
The supported targets for the JS backend are: ES5 strict

View File

@ -21,6 +21,15 @@ pub struct JS.Array {
pub struct JS.Map {} pub struct JS.Map {}
// browser: https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Error
// node: https://nodejs.org/api/errors.html#errors_class_error
pub struct JS.Error {
pub:
name string
message string
stack string
}
// Type prototype functions // Type prototype functions
fn (v JS.String) toString() JS.String fn (v JS.String) toString() JS.String
fn (v JS.Number) toString() JS.String fn (v JS.Number) toString() JS.String

View File

@ -7,6 +7,23 @@
module builtin module builtin
pub struct JS.node_process {
pub:
arch string
argsv []string
env []string
platform string
version string
// TODO: add all properties
}
// hack to access process properties
pub fn js_node_process() JS.node_process {
#return process
return JS.node_process{}
}
fn JS.process.exit(int) fn JS.process.exit(int)
fn JS.process.stdout.write(string) bool fn JS.process.stdout.write(string) bool
fn JS.process.stdout.writeln(string) bool fn JS.process.stdout.writeln(string) bool

View File

@ -166,6 +166,15 @@ pub fn stderr() File {
} }
} }
// read implements the Reader interface.
pub fn (f &File) read(mut buf []byte) ?int {
if buf.len == 0 {
return 0
}
nbytes := fread(buf.data, 1, buf.len, f.cfile) ?
return nbytes
}
// **************************** Write ops *************************** // **************************** Write ops ***************************
// write implements the Writer interface. // write implements the Writer interface.
// It returns how many bytes were actually written. // It returns how many bytes were actually written.
@ -439,15 +448,6 @@ pub fn (f &File) read_bytes_into(pos u64, mut buf []byte) ?int {
return error('Could not read file') return error('Could not read file')
} }
// read implements the Reader interface.
pub fn (f &File) read(mut buf []byte) ?int {
if buf.len == 0 {
return 0
}
nbytes := fread(buf.data, 1, buf.len, f.cfile) ?
return nbytes
}
// read_at reads `buf.len` bytes starting at file byte offset `pos`, in `buf`. // read_at reads `buf.len` bytes starting at file byte offset `pos`, in `buf`.
[deprecated: 'use File.read_from() instead'] [deprecated: 'use File.read_from() instead']
pub fn (f &File) read_at(pos u64, mut buf []byte) ?int { pub fn (f &File) read_at(pos u64, mut buf []byte) ?int {

View File

@ -6,6 +6,8 @@ import v.token
import v.pref import v.pref
import v.util import v.util
import v.depgraph import v.depgraph
import encoding.base64
import v.gen.js.sourcemap
const ( const (
// https://ecma-international.org/ecma-262/#sec-reserved-words // https://ecma-international.org/ecma-262/#sec-reserved-words
@ -22,14 +24,21 @@ const (
.int_literal, .float_literal, .size_t, .bool, .string] .int_literal, .float_literal, .size_t, .bool, .string]
) )
struct SourcemapHelper {
src_path string
src_line u32
ns_pos u32
}
struct Namespace { struct Namespace {
name string name string
mut: mut:
out strings.Builder = strings.new_builder(128) out strings.Builder = strings.new_builder(128)
pub_vars []string pub_vars []string
imports map[string]string imports map[string]string
indent int indent int
methods map[string][]ast.FnDecl methods map[string][]ast.FnDecl
sourcemap_helper []SourcemapHelper
} }
[heap] [heap]
@ -60,6 +69,8 @@ mut:
empty_line bool empty_line bool
cast_stack []ast.Type cast_stack []ast.Type
call_stack []ast.CallExpr call_stack []ast.CallExpr
is_vlines_enabled bool // is it safe to generate #line directives when -g is passed
sourcemap sourcemap.SourceMap // maps lines in generated javascrip file to original source files and line
} }
pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string { pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
@ -78,9 +89,14 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
// TODO: Add '[-no]-jsdoc' flag // TODO: Add '[-no]-jsdoc' flag
if pref.is_prod { if pref.is_prod {
g.enable_doc = false g.enable_doc = false
g.is_vlines_enabled = false
} }
g.init() g.init()
mut graph := depgraph.new_dep_graph() mut graph := depgraph.new_dep_graph()
if g.pref.sourcemap {
mut sg := sourcemap.generate_empty_map()
g.sourcemap = sg.add_map('', '', g.pref.sourcemap_src_included, 0, 0)
}
// Get class methods // Get class methods
for file in files { for file in files {
g.file = file g.file = file
@ -135,8 +151,29 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
out += val out += val
} }
out += ') {\n\t' out += ') {\n\t'
// private scope namespace_code := namespace.out.str()
out += namespace.out.str().trim_space() if g.pref.sourcemap {
// calculate current output start line
mut current_line := u32(out.count('\n') + 1)
mut sm_pos := u32(0)
for sourcemap_ns_entry in namespace.sourcemap_helper {
// calculate final generated location in output based on position
current_segment := namespace_code.substr(int(sm_pos), int(sourcemap_ns_entry.ns_pos))
current_line += u32(current_segment.count('\n'))
current_column := if last_nl_pos := current_segment.last_index('\n') {
u32(current_segment.len - last_nl_pos - 1)
} else {
u32(0)
}
g.sourcemap.add_mapping(sourcemap_ns_entry.src_path, sourcemap.SourcePosition{
source_line: sourcemap_ns_entry.src_line
source_column: 0 // sourcemap_ns_entry.src_column
}, current_line, current_column, '')
sm_pos = sourcemap_ns_entry.ns_pos
}
}
out += namespace_code
// public scope // public scope
out += '\n' out += '\n'
if g.enable_doc { if g.enable_doc {
@ -195,6 +232,18 @@ pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
out += 'if (typeof module === "object" && module.exports) module.exports = $export;\n' out += 'if (typeof module === "object" && module.exports) module.exports = $export;\n'
} }
out += '\n' out += '\n'
if g.pref.sourcemap {
out += g.create_sourcemap()
}
return out
}
fn (g JsGen) create_sourcemap() string {
mut sm := g.sourcemap
mut out := '\n//# sourceMappingURL=data:application/json;base64,'
out += base64.encode(sm.to_json().str().bytes())
out += '\n'
return out return out
} }
@ -354,6 +403,21 @@ fn (mut g JsGen) stmts(stmts []ast.Stmt) {
g.dec_indent() g.dec_indent()
} }
[inline]
fn (mut g JsGen) write_v_source_line_info(pos token.Position) {
// g.inside_ternary == 0 &&
if g.pref.sourcemap {
g.ns.sourcemap_helper << SourcemapHelper{
src_path: util.vlines_escape_path(g.file.path, g.pref.ccompiler)
src_line: u32(pos.line_nr + 1)
ns_pos: u32(g.ns.out.len)
}
}
if g.pref.is_vlines && g.is_vlines_enabled {
g.write(' /* ${pos.line_nr + 1} $g.ns.out.len */ ')
}
}
fn (mut g JsGen) stmt(node ast.Stmt) { fn (mut g JsGen) stmt(node ast.Stmt) {
g.stmt_start_pos = g.ns.out.len g.stmt_start_pos = g.ns.out.len
match node { match node {
@ -362,45 +426,56 @@ fn (mut g JsGen) stmt(node ast.Stmt) {
panic('inline asm is not supported by js') panic('inline asm is not supported by js')
} }
ast.AssertStmt { ast.AssertStmt {
g.write_v_source_line_info(node.pos)
g.gen_assert_stmt(node) g.gen_assert_stmt(node)
} }
ast.AssignStmt { ast.AssignStmt {
g.write_v_source_line_info(node.pos)
g.gen_assign_stmt(node) g.gen_assign_stmt(node)
} }
ast.Block { ast.Block {
g.write_v_source_line_info(node.pos)
g.gen_block(node) g.gen_block(node)
g.writeln('') g.writeln('')
} }
ast.BranchStmt { ast.BranchStmt {
g.write_v_source_line_info(node.pos)
g.gen_branch_stmt(node) g.gen_branch_stmt(node)
} }
ast.CompFor {} ast.CompFor {}
ast.ConstDecl { ast.ConstDecl {
g.write_v_source_line_info(node.pos)
g.gen_const_decl(node) g.gen_const_decl(node)
} }
ast.DeferStmt { ast.DeferStmt {
g.defer_stmts << node g.defer_stmts << node
} }
ast.EnumDecl { ast.EnumDecl {
g.write_v_source_line_info(node.pos)
g.gen_enum_decl(node) g.gen_enum_decl(node)
g.writeln('') g.writeln('')
} }
ast.ExprStmt { ast.ExprStmt {
g.write_v_source_line_info(node.pos)
g.gen_expr_stmt(node) g.gen_expr_stmt(node)
} }
ast.FnDecl { ast.FnDecl {
g.write_v_source_line_info(node.pos)
g.fn_decl = unsafe { &node } g.fn_decl = unsafe { &node }
g.gen_fn_decl(node) g.gen_fn_decl(node)
} }
ast.ForCStmt { ast.ForCStmt {
g.write_v_source_line_info(node.pos)
g.gen_for_c_stmt(node) g.gen_for_c_stmt(node)
g.writeln('') g.writeln('')
} }
ast.ForInStmt { ast.ForInStmt {
g.write_v_source_line_info(node.pos)
g.gen_for_in_stmt(node) g.gen_for_in_stmt(node)
g.writeln('') g.writeln('')
} }
ast.ForStmt { ast.ForStmt {
g.write_v_source_line_info(node.pos)
g.gen_for_stmt(node) g.gen_for_stmt(node)
g.writeln('') g.writeln('')
} }
@ -408,18 +483,21 @@ fn (mut g JsGen) stmt(node ast.Stmt) {
// TODO // TODO
} }
ast.GotoLabel { ast.GotoLabel {
g.write_v_source_line_info(node.pos)
g.writeln('${g.js_name(node.name)}:') g.writeln('${g.js_name(node.name)}:')
} }
ast.GotoStmt { ast.GotoStmt {
// skip: JS has no goto // skip: JS has no goto
} }
ast.HashStmt { ast.HashStmt {
g.write_v_source_line_info(node.pos)
g.gen_hash_stmt(node) g.gen_hash_stmt(node)
} }
ast.Import { ast.Import {
g.ns.imports[node.mod] = node.alias g.ns.imports[node.mod] = node.alias
} }
ast.InterfaceDecl { ast.InterfaceDecl {
g.write_v_source_line_info(node.pos)
g.gen_interface_decl(node) g.gen_interface_decl(node)
} }
ast.Module { ast.Module {
@ -434,6 +512,7 @@ fn (mut g JsGen) stmt(node ast.Stmt) {
} }
ast.SqlStmt {} ast.SqlStmt {}
ast.StructDecl { ast.StructDecl {
g.write_v_source_line_info(node.pos)
g.gen_struct_decl(node) g.gen_struct_decl(node)
} }
ast.TypeDecl { ast.TypeDecl {

View File

@ -1,9 +1,10 @@
import os import os
const ( const (
test_dir = os.join_path('vlib', 'v', 'gen', 'js', 'tests') test_dir = os.join_path('vlib', 'v', 'gen', 'js', 'tests')
output_dir = '_js_tests/' output_dir = '_js_tests/'
v_options = '-b js -w' v_options = '-b js -w'
node_options = ''
) )
fn testsuite_end() { fn testsuite_end() {
@ -12,6 +13,8 @@ fn testsuite_end() {
const there_is_node_available = is_nodejs_working() const there_is_node_available = is_nodejs_working()
const there_is_grep_available = is_grep_working()
fn test_example_compilation() { fn test_example_compilation() {
vexe := os.getenv('VEXE') vexe := os.getenv('VEXE')
os.chdir(os.dir(vexe)) os.chdir(os.dir(vexe))
@ -20,7 +23,17 @@ fn test_example_compilation() {
for file in files { for file in files {
path := os.join_path(test_dir, file) path := os.join_path(test_dir, file)
println('Testing $file') println('Testing $file')
v_code := os.system('$vexe $v_options -o $output_dir${file}.js $path') mut v_options_file := v_options
mut node_options_file := node_options
should_create_source_map := file.ends_with('_sourcemap.v')
if should_create_source_map {
println('activate -sourcemap creation')
v_options_file += ' -sourcemap' // activate souremap generation
println('add node option: --enable-source-maps') // requieres node >=12.12.0
node_options_file += ' --enable-source-maps' // activate souremap generation
}
v_code := os.system('$vexe $v_options_file -o $output_dir${file}.js $path')
if v_code != 0 { if v_code != 0 {
assert false assert false
} }
@ -36,6 +49,15 @@ fn test_example_compilation() {
} }
// Running failed // Running failed
assert js_code == 0 assert js_code == 0
if should_create_source_map {
if there_is_grep_available {
grep_code_sourcemap_found := os.system('grep -q -E "//#\\ssourceMappingURL=data:application/json;base64,[-A-Za-z0-9+/=]+$" $output_dir${file}.js')
assert grep_code_sourcemap_found == 0
println('file has a source map embeded')
} else {
println(' ... skipping testing for sourcemap $file, there is no grep present')
}
}
} }
} }
@ -54,3 +76,11 @@ fn is_nodejs_working() bool {
} }
return true return true
} }
fn is_grep_working() bool {
node_res := os.execute('grep --version')
if node_res.exit_code != 0 {
return false
}
return true
}

View File

@ -0,0 +1,158 @@
module sourcemap
fn test_simple() {
mut sg := generate_empty_map()
mut sm := sg.add_map('hello.js', '/', true, 0, 0)
sm.set_source_content('hello.v', "fn main(){nprintln('Hello World! Helo \$a')\n}")
mlist := [
MappingInput{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 0
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 2
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 9
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 7
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 10
}
name: 'hello_name'
source_position: SourcePosition{
source_line: 1
source_column: 8
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 13
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 14
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 12
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 27
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 28
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 2
gen_column: 29
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
MappingInput{
GenPosition: GenPosition{
gen_line: 3
gen_column: 0
}
name: ''
source_position: SourcePosition{
source_line: 1
source_column: 0
}
},
]
sm.add_mapping_list('hello.v', mlist) or { panic('x') }
json_data := sm.to_json()
expected := '{"version":3,"file":"hello.js","sourceRoot":"\\/","sources":["hello.v"],"sourcesContent":["fn main(){nprintln(\'Hello World! Helo \$a\')\\n}"],"names":["hello_name"],"mappings":"AAAA;AAAA,EAAA,OAAO,CAACA,GAAR,CAAY,aAAZ,CAAA,CAAA;AAAA"}'
assert json_data.str() == expected
}
fn test_source_null() {
mut sg := generate_empty_map()
mut sm := sg.add_map('hello.js', '/', true, 0, 0)
sm.add_mapping('hello.v', SourcePosition{
source_line: 0
source_column: 0
}, 1, 1, '')
sm.add_mapping('hello_lib1.v', SourcePosition{
source_line: 0
source_column: 0
}, 2, 1, '')
sm.add_mapping('hello_lib2.v', SourcePosition{
source_line: 0
source_column: 0
}, 3, 1, '')
json_data := sm.to_json()
expected := '{"version":3,"file":"hello.js","sourceRoot":"\\/","sources":["hello.v","hello_lib1.v","hello_lib2.v"],"sourcesContent":[null,null,null],"names":[],"mappings":"CA+\\/\\/\\/\\/\\/HA;CCAA;CCAA"}'
assert json_data.str() == expected
}

View File

@ -0,0 +1,322 @@
module sourcemap
fn test_cmp_eq() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: IndexNumber(3)
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: IndexNumber(3)
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
assert !compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_name() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: IndexNumber(3)
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: IndexNumber(4)
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
assert compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_name_empty() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: IndexNumber(3)
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
assert compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_name_empty_empty() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
assert !compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_source_position_empty_eq() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: Empty{}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: Empty{}
}
assert !compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_source_position_empty_diff() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: Empty{}
}
assert compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_source_position_column_diff() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 99
}
}
assert compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_source_position_line_diff() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 88
source_column: 99
}
}
assert compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_sources() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 99
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
assert compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_gen_column() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 99
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
assert compare_by_generated_positions_inflated(a, b)
}
fn test_cmp_gen_line() {
a := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 0
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
b := Mapping{
GenPosition: GenPosition{
gen_line: 1
gen_column: 99
}
sources_ind: 2
names_ind: Empty{}
source_position: SourcePosition{
source_line: 4
source_column: 5
}
}
assert compare_by_generated_positions_inflated(a, b)
}

View File

@ -0,0 +1,169 @@
module sourcemap
import v.gen.js.sourcemap.vlq
import io
struct Empty {}
pub struct SourcePosition {
source_line u32
source_column u32
}
type IndexNumber = u32
type SourcePositionType = Empty | SourcePosition
type NameIndexType = Empty | IndexNumber
struct GenPosition {
gen_line u32
gen_column u32
}
struct MappingInput {
GenPosition
name string
source_position SourcePositionType
}
struct Mapping {
GenPosition
sources_ind u32
names_ind NameIndexType
source_position SourcePositionType
}
struct Mappings {
mut:
sorted bool
last Mapping
values []Mapping
}
fn new_mappings() Mappings {
return Mappings{
last: Mapping{
GenPosition: {
gen_column: 0
gen_line: 0
}
}
sorted: true
}
}
// Add the given source mapping
fn (mut m Mappings) add_mapping(gen_line u32, gen_column u32, sources_ind u32, source_position SourcePositionType, names_ind NameIndexType) {
if !(gen_line > m.last.gen_line
|| (gen_line == m.last.gen_line && gen_column >= m.last.gen_column)) {
m.sorted = false
}
m.values << Mapping{
GenPosition: GenPosition{
gen_line: gen_line
gen_column: gen_column
}
sources_ind: sources_ind
names_ind: names_ind
source_position: source_position
}
}
// Returns the flat, sorted array of mappings. The mappings are sorted by generated position.
fn (mut m Mappings) get_sorted_array() []Mapping {
if !m.sorted {
panic('not implemented')
}
return m.values
}
fn (mut m Mappings) export_mappings(mut output io.Writer) ? {
mut previous_generated_line := u32(1)
mut previous_generated_column := u32(0)
mut previous_source_index := i64(0)
mut previous_source_line := i64(0)
mut previous_source_column := i64(0)
mut previous_name_index := i64(0)
line_mappings := m.get_sorted_array()
len := line_mappings.len
for i := 0; i < len; i++ {
mapping := line_mappings[i]
cloned_generated_line := mapping.gen_line
if cloned_generated_line > 0 {
// Write a ';' for each line between this and last line, way more efficient than storing empty lines or looping...
output.write(';'.repeat(int(cloned_generated_line - previous_generated_line)).bytes()) or {
panic('Writing vql failed!')
}
}
if cloned_generated_line != previous_generated_line {
previous_generated_column = 0
previous_generated_line = cloned_generated_line
} else {
if i > 0 {
if !compare_by_generated_positions_inflated(mapping, line_mappings[i - 1]) {
continue
}
output.write(','.bytes()) or { panic('Writing vql failed!') }
}
}
vlq.encode(i64(mapping.gen_column - previous_generated_column), mut &output) ?
previous_generated_column = mapping.gen_column
match mapping.source_position {
Empty {}
SourcePosition {
vlq.encode(i64(mapping.sources_ind - previous_source_index), mut &output) ?
previous_source_index = mapping.sources_ind
// lines are stored 0-based in SourceMap spec version 3
vlq.encode(i64(mapping.source_position.source_line - 1 - previous_source_line), mut
output) ?
previous_source_line = mapping.source_position.source_line - 1
vlq.encode(i64(mapping.source_position.source_column - previous_source_column), mut
output) ?
previous_source_column = mapping.source_position.source_column
match mapping.names_ind {
Empty {}
IndexNumber {
vlq.encode(i64(mapping.names_ind - previous_name_index), mut &output) ?
previous_name_index = mapping.names_ind
}
}
}
}
}
}
fn compare_by_generated_positions_inflated(mapping_a Mapping, mapping_b Mapping) bool {
if mapping_a.gen_line != mapping_b.gen_line {
return true
}
if mapping_a.gen_column != mapping_b.gen_column {
return true
}
if mapping_a.sources_ind != mapping_b.sources_ind {
return true
}
if mapping_a.source_position.type_name() == mapping_b.source_position.type_name()
&& mapping_b.source_position is SourcePosition {
if
(mapping_a.source_position as SourcePosition).source_line != (mapping_b.source_position as SourcePosition).source_line || (mapping_a.source_position as SourcePosition).source_column != (mapping_b.source_position as SourcePosition).source_column {
return true
}
} else {
if mapping_a.source_position.type_name() != mapping_b.source_position.type_name() {
return true
}
}
if mapping_a.names_ind.type_name() == mapping_b.names_ind.type_name()
&& mapping_a.names_ind is IndexNumber {
return (mapping_a.names_ind as IndexNumber) != (mapping_b.names_ind as IndexNumber)
} else {
return mapping_a.names_ind.type_name() != mapping_b.names_ind.type_name()
}
}

View File

@ -0,0 +1,16 @@
module sourcemap
struct Sets {
mut:
value map[string]u32
}
// adds a new element to a Set if new and returns index position of new or existing element
fn (mut s Sets) add(element string) u32 {
index := s.value[element] or {
index := u32(s.value.len)
s.value[element] = index
return index
}
return index
}

View File

@ -0,0 +1,131 @@
module sourcemap
import io
import os
import x.json2
const (
source_map_version = 3
)
type SourceMapJson = map[string]json2.Any
struct SourceMap {
pub mut:
version int [json: version]
file string [json: file]
source_root string [json: source_root]
sources Sets [json: sources]
sources_content map[string]string
names Sets
mappings Mappings
sources_content_inline bool
}
struct StringWriter {
pub mut:
bytes []byte
}
pub fn new_sourcemap(file string, source_root string, sources_content_inline bool) SourceMap {
return SourceMap{
version: sourcemap.source_map_version
file: file
source_root: source_root
mappings: new_mappings()
sources_content_inline: sources_content_inline
}
}
// Add a single mapping from original source line and column to the generated source's line and column for this source map being created.
pub fn (mut sm SourceMap) add_mapping(source_name string, source_position SourcePositionType, gen_line u32, gen_column u32, name string) {
assert source_name.len != 0
sources_ind := sm.sources.add(source_name)
names_ind := if name.len != 0 {
NameIndexType(IndexNumber(sm.names.add(name)))
} else {
NameIndexType(Empty{})
}
sm.mappings.add_mapping(gen_line, gen_column, sources_ind, source_position, names_ind)
}
// Add multiple mappings from the same source
pub fn (mut sm SourceMap) add_mapping_list(source_name string, mapping_list []MappingInput) ? {
assert source_name.len != 0
sources_ind := sm.sources.add(source_name)
for mapping in mapping_list {
names_ind := if mapping.name.len != 0 {
NameIndexType(IndexNumber(sm.names.add(mapping.name)))
} else {
NameIndexType(Empty{})
}
sm.mappings.add_mapping(mapping.gen_line, mapping.gen_column, sources_ind, mapping.source_position,
names_ind)
}
}
// Set the source content for a source file.
pub fn (mut sm SourceMap) set_source_content(source_name string, source_content string) {
sm.sources_content[source_name] = source_content
}
fn (mut sm SourceMap) export_mappings(mut writer io.Writer) {
sm.mappings.export_mappings(mut writer) or { panic('export failed') }
}
fn (mut sm SourceMap) export_mappings_string() string {
mut output := StringWriter{}
sm.mappings.export_mappings(mut output) or { panic('export failed') }
return output.bytes.bytestr()
}
// create a JSON representing the sourcemap
// Sourcemap Specs http://sourcemaps.info/spec.html
pub fn (mut sm SourceMap) to_json() SourceMapJson {
mut source_map_json := map[string]json2.Any{}
source_map_json['version'] = sm.version
if sm.file != '' {
source_map_json['file'] = json2.Any(sm.file)
}
if sm.source_root != '' {
source_map_json['sourceRoot'] = json2.Any(sm.source_root)
}
mut sources_json := []json2.Any{}
mut sources_content_json := []json2.Any{}
for source_file, _ in sm.sources.value {
sources_json << source_file
if source_file in sm.sources_content {
sources_content_json << sm.sources_content[source_file]
} else {
if sm.sources_content_inline {
if source_file_content := os.read_file(source_file) {
sources_content_json << source_file_content
} else {
sources_content_json << json2.null
}
} else {
sources_content_json << json2.null
}
}
}
source_map_json['sources'] = json2.Any(sources_json)
source_map_json['sourcesContent'] = json2.Any(sources_content_json)
mut names_json := []json2.Any{}
for name, _ in sm.names.value {
names_json << name
}
source_map_json['names'] = json2.Any(names_json)
source_map_json['mappings'] = sm.export_mappings_string()
return source_map_json
}
fn (mut w StringWriter) write(buf []byte) ?int {
w.bytes << buf
return buf.len
}

View File

@ -0,0 +1,46 @@
module sourcemap
struct V3 {
SourceMap
pub:
sections []Section [json: sections]
}
struct Offset {
pub mut:
line int [json: line]
column int [json: column]
}
struct Section {
pub mut:
offset Offset [json: offset]
source_map SourceMap [json: map]
}
struct Generator {
mut:
file string
// source_root string
sections []Section
}
pub fn generate_empty_map() &Generator {
return &Generator{}
}
pub fn (mut g Generator) add_map(file string, source_root string, sources_content_inline bool, line_offset int, column_offset int) &SourceMap {
source_map := new_sourcemap(file, source_root, sources_content_inline)
offset := Offset{
line: line_offset
column: column_offset
}
g.sections << Section{
offset: offset
source_map: source_map
}
return &source_map
}

View File

@ -0,0 +1,115 @@
module vlq
import io
const (
shift = byte(5)
mask = byte((1 << shift) - 1)
continued = byte(1 << shift)
max_i64 = u64(9223372036854775807)
// index start is: byte - vlq.enc_char_special_plus
enc_index = [62, 0, 0, 0, 63, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 0, 0, 0,
0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 0, 0, 0, 0, 0, 0, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51]!
enc_table = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
enc_char_start_au = 65
enc_char_end_zu = 90
enc_char_start_al = 97
enc_char_end_zl = 122
enc_char_start_zero = 48
enc_char_end_nine = 57
enc_char_special_plus = 43
enc_char_special_slash = 47
)
[inline]
fn abs64(x i64) u64 {
return if x < 0 { u64(-x) } else { u64(x) }
}
// Decode a single base64 digit.
[inline]
fn decode64(input byte) byte {
$if debug {
assert input >= vlq.enc_char_special_plus
assert input <= vlq.enc_char_end_zl
}
return byte(vlq.enc_index[input - vlq.enc_char_special_plus])
}
// Decode a single VLQ value from the input stream, returning the value.
//
// # Range
//
// Supports all numbers that can be represented by a sign bit and a 63 bit
// absolute value: `[-(2^63 - 1), 2^63 - 1]`.
//
// Note that `i64::MIN = -(2^63)` cannot be represented in that form, and this
// NOT IMPLEMENTED: function will return `Error::Overflowed` when attempting to decode it.
pub fn decode(mut input io.Reader) ?i64 {
mut buf := []byte{len: 1}
mut accum := u64(0)
mut shifter := 0
mut digit := byte(0)
mut keep_going := true
for keep_going {
len := input.read(mut buf) or { return error('Unexpected EOF') }
if len == 0 {
return error('no content')
}
digit = decode64(buf[0])
keep_going = (digit & vlq.continued) != 0
digit_value := u64(digit & vlq.mask) << u32(shifter) // TODO: check Overflow
accum += digit_value
shifter += vlq.shift
}
abs_value := accum / 2
if abs_value > vlq.max_i64 {
return error('Overflow')
}
// The low bit holds the sign.
return if (accum & 1) != 0 { (-i64(abs_value)) } else { i64(abs_value) }
}
[inline]
fn encode64(input byte) byte {
$if debug {
assert input < 64
}
return vlq.enc_table[input]
}
// Encode a value as Base64 VLQ, sending it to the writer
pub fn encode(value i64, mut output io.Writer) ? {
signed := value < 0
mut value_u64 := abs64(value) << 1
if signed {
if value_u64 == 0 {
// Wrapped
value_u64 = vlq.max_i64 + 1
}
value_u64 |= 1
}
for {
mut digit := byte(value_u64) & vlq.mask
value_u64 >>= vlq.shift
if value_u64 > 0 {
digit |= vlq.continued
}
bytes := [encode64(digit)]
output.write(bytes) or { return error('Write failed') }
if value_u64 == 0 {
break
}
}
}

View File

@ -0,0 +1,52 @@
module vlq
import io
struct TestReader {
pub:
bytes []byte
mut:
i int
}
struct TestData {
decode_val string
expected i64
}
type TestDataList = []TestData
fn test_decode_a() ? {
decode_values := [
TestData{'A', 0},
TestData{'C', 1},
TestData{'D', -1},
TestData{'2H', 123},
TestData{'qxmvrH', 123456789},
TestData{'+/////B', 1073741823} /* 2^30-1 */,
// TestData{'hgggggggggggI', 9_223_372_036_854_775_808} /* 2^63 */,
]
for _, test_data in decode_values {
mut input := make_test_reader(test_data.decode_val)
res := decode(mut &input) ?
assert res == test_data.expected
}
}
fn (mut b TestReader) read(mut buf []byte) ?int {
if !(b.i < b.bytes.len) {
return none
}
n := copy(buf, b.bytes[b.i..])
b.i += n
return n
}
fn make_test_reader(data string) io.Reader {
buf := &TestReader{
bytes: data.bytes()
}
return io.new_buffered_reader(reader: buf)
}

View File

@ -0,0 +1,35 @@
module vlq
struct TestData {
expected string
data_val i64
}
struct TestWriter {
pub mut:
bytes []byte
}
fn test_encode_a() ? {
decode_values := [
TestData{'A', 0},
TestData{'C', 1},
TestData{'D', -1},
TestData{'2H', 123},
TestData{'qxmvrH', 123456789},
TestData{'+/////B', 1073741823} /* 2^30-1 */,
// TestData{'hgggggggggggI', 9_223_372_036_854_775_808} /* 2^63 */,
]
for _, test_data in decode_values {
mut output := TestWriter{}
encode(test_data.data_val, mut &output) ?
// dump(output.bytes)
assert output.bytes == test_data.expected.bytes()
}
}
fn (mut w TestWriter) write(buf []byte) ?int {
w.bytes << buf
return buf.len
}

View File

@ -0,0 +1,23 @@
module main
fn main() {
e := JS.Error{}
s := e.stack
node_version := js_node_process().version
node_main := get_node_main_version(node_version)
if node_main >= 12 {
if s.contains('simple_sourcemap.v:') {
panic('node found no source map!')
} else {
println('source map is working')
}
} else {
println('skiping test! node version >=12.12.0 required. Current Version is $node_version')
}
}
fn get_node_main_version(str string) int {
a := str.slice(1, int(str.len))
b := a.split('.')
return b[0].int()
}

View File

@ -85,25 +85,28 @@ pub mut:
// verbosity VerboseLevel // verbosity VerboseLevel
is_verbose bool is_verbose bool
// nofmt bool // disable vfmt // nofmt bool // disable vfmt
is_test bool // `v test string_test.v` is_test bool // `v test string_test.v`
is_script bool // single file mode (`v program.v`), main function can be skipped is_script bool // single file mode (`v program.v`), main function can be skipped
is_vsh bool // v script (`file.vsh`) file, the `os` module should be made global is_vsh bool // v script (`file.vsh`) file, the `os` module should be made global
is_livemain bool // main program that contains live/hot code is_livemain bool // main program that contains live/hot code
is_liveshared bool // a shared library, that will be used in a -live main program is_liveshared bool // a shared library, that will be used in a -live main program
is_shared bool // an ordinary shared library, -shared, no matter if it is live or not is_shared bool // an ordinary shared library, -shared, no matter if it is live or not
is_prof bool // benchmark every function is_prof bool // benchmark every function
profile_file string // the profile results will be stored inside profile_file profile_file string // the profile results will be stored inside profile_file
profile_no_inline bool // when true, [inline] functions would not be profiled profile_no_inline bool // when true, [inline] functions would not be profiled
translated bool // `v translate doom.v` are we running V code translated from C? allow globals, ++ expressions, etc translated bool // `v translate doom.v` are we running V code translated from C? allow globals, ++ expressions, etc
is_prod bool // use "-O2" is_prod bool // use "-O2"
obfuscate bool // `v -obf program.v`, renames functions to "f_XXX" obfuscate bool // `v -obf program.v`, renames functions to "f_XXX"
is_repl bool is_repl bool
is_run bool is_run bool
sanitize bool // use Clang's new "-fsanitize" option sanitize bool // use Clang's new "-fsanitize" option
is_debug bool // false by default, turned on by -g or -cg, it tells v to pass -g to the C backend compiler. is_debug bool // false by default, turned on by -g or -cg, it tells v to pass -g to the C backend compiler.
is_vlines bool // turned on by -g, false by default (it slows down .tmp.c generation slightly). sourcemap bool // JS Backend: -sourcemap will create a source map - default false
show_cc bool // -showcc, print cc command sourcemap_inline bool = true // JS Backend: -sourcemap-inline will embed the source map in the generated JaaScript file - currently default true only implemented
show_c_output bool // -show-c-output, print all cc output even if the code was compiled correctly sourcemap_src_included bool // JS Backend: -sourcemap-src-included includes V source code in source map - default false
is_vlines bool // turned on by -g, false by default (it slows down .tmp.c generation slightly).
show_cc bool // -showcc, print cc command
show_c_output bool // -show-c-output, print all cc output even if the code was compiled correctly
// NB: passing -cg instead of -g will set is_vlines to false and is_debug to true, thus making v generate cleaner C files, // NB: passing -cg instead of -g will set is_vlines to false and is_debug to true, thus making v generate cleaner C files,
// which are sometimes easier to debug / inspect manually than the .tmp.c files by plain -g (when/if v line number generation breaks). // which are sometimes easier to debug / inspect manually than the .tmp.c files by plain -g (when/if v line number generation breaks).
// use cached modules to speed up compilation. // use cached modules to speed up compilation.
@ -322,6 +325,15 @@ pub fn parse_args(known_external_commands []string, args []string) (&Preferences
res.show_cc = true res.show_cc = true
res.show_c_output = true res.show_c_output = true
} }
'-sourcemap' {
res.sourcemap = true
}
'-sourcemap-src-included' {
res.sourcemap_src_included = true
}
'-sourcemap-inline' {
res.sourcemap_inline = true
}
'-repl' { '-repl' {
res.is_repl = true res.is_repl = true
} }