Skip to content

stage2: Add "tilde pointing" to errors for multi-character offenses #9201

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 55 additions & 1 deletion src/Compilation.zig
Original file line number Diff line number Diff line change
Expand Up @@ -317,6 +317,7 @@ pub const AllErrors = struct {
byte_offset: u32,
/// Does not include the trailing newline.
source_line: ?[]const u8,
lexeme_length: u32,
notes: []Message = &.{},
},
plain: struct {
Expand Down Expand Up @@ -364,7 +365,12 @@ pub const AllErrors = struct {
try stderr.writeByte('\n');
try stderr.writeByteNTimes(' ', src.column);
ttyconf.setColor(stderr, .Green);
try stderr.writeAll("^\n");
if (src.lexeme_length > 1) {
try stderr.writeByteNTimes('~', src.lexeme_length);
try stderr.writeByte('\n');
} else {
try stderr.writeAll("^\n");
}
ttyconf.setColor(stderr, .Reset);
}
}
Expand Down Expand Up @@ -404,6 +410,15 @@ pub const AllErrors = struct {
const byte_offset = try module_note.src_loc.byteOffset(module.gpa);
const loc = std.zig.findLineColumn(source, byte_offset);
const sub_file_path = module_note.src_loc.file_scope.sub_file_path;
const lexeme_length = blk: {
var tokenizer = std.zig.Tokenizer{
.buffer = loc.source_line,
.index = loc.column,
.pending_invalid_token = null,
};
const token = tokenizer.next();
break :blk @intCast(u32, token.loc.end - token.loc.start);
};
note.* = .{
.src = .{
.src_path = try arena.allocator.dupe(u8, sub_file_path),
Expand All @@ -412,6 +427,7 @@ pub const AllErrors = struct {
.line = @intCast(u32, loc.line),
.column = @intCast(u32, loc.column),
.source_line = try arena.allocator.dupe(u8, loc.source_line),
.lexeme_length = lexeme_length,
},
};
}
Expand All @@ -427,6 +443,15 @@ pub const AllErrors = struct {
const byte_offset = try module_err_msg.src_loc.byteOffset(module.gpa);
const loc = std.zig.findLineColumn(source, byte_offset);
const sub_file_path = module_err_msg.src_loc.file_scope.sub_file_path;
const lexeme_length = blk: {
var tokenizer = std.zig.Tokenizer{
.buffer = loc.source_line,
.index = loc.column,
.pending_invalid_token = null,
};
const token = tokenizer.next();
break :blk @intCast(u32, token.loc.end - token.loc.start);
};
try errors.append(.{
.src = .{
.src_path = try arena.allocator.dupe(u8, sub_file_path),
Expand All @@ -436,6 +461,7 @@ pub const AllErrors = struct {
.column = @intCast(u32, loc.column),
.notes = notes,
.source_line = try arena.allocator.dupe(u8, loc.source_line),
.lexeme_length = lexeme_length,
},
});
}
Expand Down Expand Up @@ -476,6 +502,18 @@ pub const AllErrors = struct {
}
break :blk token_starts[note_item.data.token] + note_item.data.byte_offset;
};
const lexeme_length = blk: {
const token = tok: {
if (note_item.data.node != 0) {
const main_tokens = file.tree.nodes.items(.main_token);
const main_token = main_tokens[item.data.node];
break :tok main_token;
} else {
break :tok note_item.data.token;
}
};
break :blk @intCast(u32, file.tree.tokenSlice(token).len);
};
const loc = std.zig.findLineColumn(file.source, byte_offset);

note.* = .{
Expand All @@ -487,6 +525,7 @@ pub const AllErrors = struct {
.column = @intCast(u32, loc.column),
.notes = &.{}, // TODO rework this function to be recursive
.source_line = try arena.dupe(u8, loc.source_line),
.lexeme_length = lexeme_length,
},
};
}
Expand All @@ -502,6 +541,18 @@ pub const AllErrors = struct {
}
break :blk token_starts[item.data.token] + item.data.byte_offset;
};
const lexeme_length = blk: {
const token = tok: {
if (item.data.node != 0) {
const main_tokens = file.tree.nodes.items(.main_token);
const main_token = main_tokens[item.data.node];
break :tok main_token;
} else {
break :tok item.data.token;
}
};
break :blk @intCast(u32, file.tree.tokenSlice(token).len);
};
const loc = std.zig.findLineColumn(file.source, byte_offset);

try errors.append(.{
Expand All @@ -513,6 +564,7 @@ pub const AllErrors = struct {
.column = @intCast(u32, loc.column),
.notes = notes,
.source_line = try arena.dupe(u8, loc.source_line),
.lexeme_length = lexeme_length,
},
});
}
Expand Down Expand Up @@ -555,6 +607,7 @@ pub const AllErrors = struct {
.column = src.column,
.byte_offset = src.byte_offset,
.source_line = if (src.source_line) |s| try arena.dupe(u8, s) else null,
.lexeme_length = src.lexeme_length,
.notes = try dupeList(src.notes, arena),
} },
.plain => |plain| .{ .plain = .{
Expand Down Expand Up @@ -1801,6 +1854,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
.line = err_msg.line,
.column = err_msg.column,
.source_line = null, // TODO
.lexeme_length = 1, // TODO: extract from `source_line` using `std.zig.Tokenizer`
},
});
}
Expand Down
2 changes: 2 additions & 0 deletions src/main.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3292,6 +3292,7 @@ fn printErrMsgToStdErr(
const lok_token = parse_error.token;
const start_loc = tree.tokenLocation(0, lok_token);
const source_line = tree.source[start_loc.line_start..start_loc.line_end];
const lexeme_length = @intCast(u32, tree.tokenSlice(lok_token).len);

var text_buf = std.ArrayList(u8).init(gpa);
defer text_buf.deinit();
Expand All @@ -3307,6 +3308,7 @@ fn printErrMsgToStdErr(
.line = @intCast(u32, start_loc.line),
.column = @intCast(u32, start_loc.column),
.source_line = source_line,
.lexeme_length = lexeme_length,
},
};

Expand Down