Skip to content

Feature/preprocessor sourcemaps #5428

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 26 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
e223c35
add source map support for preprocessors
halfnelson Jun 6, 2020
43c5d5e
preprocessor sourcemaps: prettify code
milahu Sep 5, 2020
b8e9b68
preprocessor sourcemaps: prettify code 2
milahu Sep 5, 2020
4a03b10
preprocessor sourcemaps: prettify code 3
milahu Sep 5, 2020
307276a
handle empty attributes and content
milahu Sep 5, 2020
dee3aab
move fn replace_async, etc
milahu Sep 19, 2020
e753489
fix test/preprocess
milahu Sep 19, 2020
6d06b7b
refactor test/sourcemaps
milahu Sep 19, 2020
433213a
lint commas
milahu Sep 19, 2020
e76f37d
move fn get_replacement
milahu Sep 20, 2020
b7d5974
bugfix in fn merge_tables
milahu Sep 20, 2020
7cccff1
remove hack
milahu Sep 20, 2020
6668f12
trigger test on travis ci
milahu Sep 20, 2020
459dd88
refactor
milahu Sep 21, 2020
880f556
ignore names in sourcemap
milahu Sep 21, 2020
2cf1ae6
handle sourcemap.names
milahu Sep 22, 2020
1073120
remove unnecessary sourcemap encode
milahu Sep 23, 2020
cf600f7
add tests, fix empty map.sources, cleanup gitignore
milahu Sep 23, 2020
38f4ce4
fix decode, dont fix missing map.sources
milahu Sep 24, 2020
47ffc05
optimize concat
milahu Sep 24, 2020
b739bdb
optimize merge_tables, verbose remapper error
milahu Sep 25, 2020
e7abdfa
Merge branch 'master' into feature/preprocessor-sourcemaps
milahu Sep 25, 2020
422cc0d
optimize: use mutable data, unswitch loops
milahu Sep 25, 2020
3d053d9
support default + named import
milahu Sep 26, 2020
a0eb41f
support multiple source files, fix types
milahu Sep 29, 2020
18003d6
fix tests, use decoded mappings, show warnings
milahu Oct 4, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
refactor
  • Loading branch information
milahu committed Sep 21, 2020
commit 459dd88b71813d4dcea58f88f81c7cdaeeaf2570
23 changes: 11 additions & 12 deletions src/compiler/preprocess/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,22 +95,21 @@ function get_replacement(
prefix: string,
suffix: string
): StringWithSourcemap {
const generated_prefix = StringWithSourcemap.from_source(
const prefix_with_map = StringWithSourcemap.from_source(
filename, prefix, get_location(offset));
const generated_suffix = StringWithSourcemap.from_source(
const suffix_with_map = StringWithSourcemap.from_source(
filename, suffix, get_location(offset + prefix.length + original.length));

let generated;
let processed_map_shifted;
if (processed.map) {
const full_map = typeof processed.map === "string" ? JSON.parse(processed.map) : processed.map;
const decoded_map = { ...full_map, mappings: sourcemap_decode(full_map.mappings) };
const decoded_map = typeof processed.map === "string" ? JSON.parse(processed.map) : processed.map;
decoded_map.mappings = sourcemap_decode(decoded_map.mappings);
const processed_offset = get_location(offset + prefix.length);
generated = StringWithSourcemap.from_generated(processed.code, sourcemap_add_offset(processed_offset, decoded_map));
} else {
generated = StringWithSourcemap.from_generated(processed.code);
processed_map_shifted = sourcemap_add_offset(decoded_map, processed_offset);
}
const map = generated_prefix.concat(generated).concat(generated_suffix);
return map;
const processed_with_map = StringWithSourcemap.from_processed(processed.code, processed_map_shifted);

return prefix_with_map.concat(processed_with_map).concat(suffix_with_map);
}

export default async function preprocess(
Expand Down Expand Up @@ -177,7 +176,7 @@ export default async function preprocess(
: no_change();
}
);
source = res.generated;
source = res.string;
sourcemap_list.unshift(res.get_sourcemap());
}

Expand Down Expand Up @@ -210,7 +209,7 @@ export default async function preprocess(
: no_change();
}
);
source = res.generated;
source = res.string;
sourcemap_list.unshift(res.get_sourcemap());
}

Expand Down
230 changes: 74 additions & 156 deletions src/compiler/utils/string_with_sourcemap.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,234 +16,152 @@ type SourceLocation = {
column: number;
};

function get_end_location(s: string): SourceLocation {
const parts = s.split("\n");
return {
line: parts.length - 1,
column: parts[parts.length - 1].length - 1
};
function last_line_length(s: string) {
return s.length - s.lastIndexOf('\n') - 1;
}

export function sourcemap_add_offset(
offset: SourceLocation,
map: SourceMappings
map: SourceMappings, offset: SourceLocation
): SourceMappings {
const new_mappings = map.mappings.map((line) =>
line.map((seg) => {
if (seg.length < 3) return seg;
const new_seg = seg.slice() as MappingSegment;
new_seg[2] = new_seg[2] + offset.line;
return new_seg;
})
);

// column changed in first line
if (new_mappings.length > 0) {
new_mappings[0] = new_mappings[0].map((seg) => {
if (seg.length < 4) return seg;
const newSeg = seg.slice() as MappingSegment;
newSeg[3] = newSeg[3] + offset.column;
return newSeg;
});
}

return {
sources: map.sources,
mappings: new_mappings
sources: map.sources.slice(),
mappings: map.mappings.map((line, line_idx) =>
line.map(seg => {
const new_seg = seg.slice() as MappingSegment;
if (seg.length >= 4) {
new_seg[2] = new_seg[2] + offset.line;
if (line_idx == 0)
new_seg[3] = new_seg[3] + offset.column;
}
return new_seg;
})
)
} as SourceMappings;
}

function merge_tables<T>(
original: T[],
extended: T[]
): { table: T[]; new_idx: number[] } {
const table = original.slice();
const new_idx = [];
if (extended) {
for (let j = 0; j < extended.length; j++) {
const current = extended[j];
const existing = table.indexOf(current);
if (existing == -1) {
table.push(current);
new_idx[j] = table.length - 1;
} else {
new_idx[j] = existing;
}
function merge_tables<T>(this_table: T[], other_table): [T[], number[]] {
const new_table = this_table.slice();
const idx_map = [];
other_table = other_table || [];
for (const [other_idx, other_val] of other_table.entries()) {
const this_idx = this_table.indexOf(other_val);
if (this_idx >= 0) {
idx_map[other_idx] = this_idx;
} else {
const new_idx = new_table.length;
new_table[new_idx] = other_val;
idx_map[other_idx] = new_idx;
}
}
return { table, new_idx };
return [new_table, idx_map];
}

export class StringWithSourcemap {
readonly generated: string;
readonly string: string;
readonly map: SourceMappings;

constructor(generated: string, map: SourceMappings) {
this.generated = generated;
constructor(string: string, map: SourceMappings) {
this.string = string;
this.map = map;
}

get_sourcemap() {
return {
version: 3,
sources: this.map.sources,
sources: this.map.sources.slice(),
names: [],
mappings: sourcemap_encode(this.map.mappings as any)
};
}

concat(other: StringWithSourcemap): StringWithSourcemap {
// if one is empty, return the other
if (this.generated.length == 0) return other;
if (other.generated.length == 0) return this;

// combine sources
const {
table: new_sources,
new_idx: other_source_idx
} = merge_tables(
this.map.sources,
other.map.sources
);
// noop: if one is empty, return the other
if (this.string == '') return other;
if (other.string == '') return this;

// combine names
const {
table: new_names,
new_idx: other_name_idx
} = merge_tables(
this.map.names,
other.map.names
);
// combine sources and names
const [sources, new_source_idx] = merge_tables(this.map.sources, other.map.sources);
const [names, new_name_idx] = merge_tables(this.map.names, other.map.names);

// update source refs and name refs in segments
const other_mappings = other.map.mappings.map((line) =>
line.map((seg) => {
// to reduce allocations,
// we only return a new segment if a value has changed
if (
// new source idx
(seg.length > 1 && other_source_idx[seg[1]] != seg[1]) ||
// new name idx
(seg.length == 5 && other_name_idx[seg[4]] != seg[4])
) {
const new_seg = seg.slice() as MappingSegment;
new_seg[1] = other_source_idx[seg[1]];
if (seg.length == 5) {
new_seg[4] = other_name_idx[seg[4]];
}
return new_seg;
} else {
return seg;
}
line.map(seg => {
const new_seg = seg.slice() as MappingSegment;
if (seg[1]) new_seg[1] = new_source_idx[seg[1]];
if (seg[4]) new_seg[4] = new_name_idx[seg[4]];
return new_seg;
})
);

// combine the mappings

// this.map is read-only, so we copy
let new_mappings = this.map.mappings.slice();

// combine:
// combine
// 1. last line of first map
// 2. first line of second map
// columns of 2 must be shifted
const end = get_end_location(this.generated);
const col_offset = end.column + 1;

const first_line =
const col_offset = last_line_length(this.string);

const first_line: MappingSegment[] =
other_mappings.length == 0
? []
: col_offset == 0
? other_mappings[0]
? other_mappings[0].slice() as MappingSegment[]
: other_mappings[0].map((seg) => {
// shift columns
const new_seg = seg.slice() as MappingSegment;
new_seg[0] = seg[0] + col_offset;
return new_seg;
});

// append segments to last line of first map
new_mappings[new_mappings.length - 1] =
new_mappings[new_mappings.length - 1].concat(first_line);

// the other lines don't need modification and can just be appended
new_mappings = new_mappings.concat(
other_mappings.slice(1) as MappingSegment[][]
);
const mappings: MappingSegment[][] =
this.map.mappings.slice(0, -1)
.concat([
this.map.mappings.slice(-1)[0] // last line
.concat(first_line)
])
.concat(other_mappings.slice(1) as MappingSegment[][]);

return new StringWithSourcemap(
this.generated + other.generated, {
sources: new_sources,
names: new_names,
mappings: new_mappings
});
this.string + other.string,
{ sources, names, mappings }
);
}

static from_generated(
generated: string,
map?: SourceMappings
): StringWithSourcemap {
if (map) return new StringWithSourcemap(generated, map);

const replacement_map: SourceMappings = {
names: [],
sources: [],
mappings: []
};

if (generated.length == 0)
return new StringWithSourcemap(generated, replacement_map);

// we generate a mapping
// where the source was overwritten by the generated
const end = get_end_location(generated);
for (let i = 0; i <= end.line; i++) {
replacement_map.mappings.push([]); // unmapped line
}

return new StringWithSourcemap(generated, replacement_map);
static from_processed(string: string, map?: SourceMappings): StringWithSourcemap {
if (map) return new StringWithSourcemap(string, map);
map = { names: [], sources: [], mappings: [] };
if (string == '') return new StringWithSourcemap(string, map);
// add empty MappingSegment[] for every line
const lineCount = string.split('\n').length;
map.mappings = Array.from({length: lineCount}).map(_ => []);
return new StringWithSourcemap(string, map);
}

static from_source(
source_file: string,
source: string,
offset_in_source?: SourceLocation
source_file: string, source: string, offset_in_source?: SourceLocation
): StringWithSourcemap {
const offset = offset_in_source || { line: 0, column: 0 };
const map: SourceMappings = {
names: [],
sources: [source_file],
mappings: []
};

const map: SourceMappings = { names: [], sources: [source_file], mappings: [] };
if (source.length == 0) return new StringWithSourcemap(source, map);

// we create a high resolution identity map here,
// we know that it will eventually be merged with svelte's map,
// at which stage the resolution will decrease.
const lines = source.split("\n");
let pos = 0;
const identity_map = lines.map((line, line_idx) => {
const segs = line
.split(/([^\d\w\s]|\s+)/g)
.filter((s) => s !== "")
.map((s) => {
map.mappings = source.split("\n").map((line, line_idx) => {
let pos = 0;
const segs = line.split(/([^\d\w\s]|\s+)/g)
.filter(s => s !== "").map(s => {
const seg: MappingSegment = [
pos,
0,
pos, 0,
line_idx + offset.line,
// shift first line
pos + (line_idx == 0 ? offset.column : 0)
pos + (line_idx == 0 ? offset.column : 0) // shift first line
];
pos = pos + s.length;
return seg;
});
pos = 0;
return segs;
});

map.mappings = identity_map;

return new StringWithSourcemap(source, map);
}
}
2 changes: 1 addition & 1 deletion test/sourcemaps/samples/binding-shorthand.skip/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ export function test({ assert, input, js }) {
start = js.locate('potato');
start = js.locate('potato', start.character + 1);
start = js.locate('potato', start.character + 1);
// we need the third instance of 'potato'
// we need the third instance of 'potato'

const actual = js.mapConsumer.originalPositionFor({
line: start.line + 1,
Expand Down