Skip to content

Commit 69f0a08

Browse files
committed
Update all std.mem.tokenize calls to their appropriate function
Everywhere that can now use `tokenizeScalar` should get a nice little performance boost.
1 parent e96c07c commit 69f0a08

24 files changed

+79
-79
lines changed

build.zig

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ pub fn build(b: *std.Build) !void {
284284
// That means we also have to rely on stage1 compiled c++ files. We parse config.h to find
285285
// the information passed on to us from cmake.
286286
if (cfg.cmake_prefix_path.len > 0) {
287-
var it = mem.tokenize(u8, cfg.cmake_prefix_path, ";");
287+
var it = mem.tokenizeScalar(u8, cfg.cmake_prefix_path, ';');
288288
while (it.next()) |path| {
289289
b.addSearchPrefix(path);
290290
}
@@ -687,7 +687,7 @@ fn addCxxKnownPath(
687687
if (!std.process.can_spawn)
688688
return error.RequiredLibraryNotFound;
689689
const path_padded = b.exec(&.{ ctx.cxx_compiler, b.fmt("-print-file-name={s}", .{objname}) });
690-
var tokenizer = mem.tokenize(u8, path_padded, "\r\n");
690+
var tokenizer = mem.tokenizeAny(u8, path_padded, "\r\n");
691691
const path_unpadded = tokenizer.next().?;
692692
if (mem.eql(u8, path_unpadded, objname)) {
693693
if (errtxt) |msg| {
@@ -710,7 +710,7 @@ fn addCxxKnownPath(
710710
}
711711

712712
fn addCMakeLibraryList(exe: *std.Build.Step.Compile, list: []const u8) void {
713-
var it = mem.tokenize(u8, list, ";");
713+
var it = mem.tokenizeScalar(u8, list, ';');
714714
while (it.next()) |lib| {
715715
if (mem.startsWith(u8, lib, "-l")) {
716716
exe.linkSystemLibrary(lib["-l".len..]);
@@ -855,7 +855,7 @@ fn parseConfigH(b: *std.Build, config_h_text: []const u8) ?CMakeConfig {
855855
// .prefix = ZIG_LLVM_LINK_MODE parsed manually below
856856
};
857857

858-
var lines_it = mem.tokenize(u8, config_h_text, "\r\n");
858+
var lines_it = mem.tokenizeAny(u8, config_h_text, "\r\n");
859859
while (lines_it.next()) |line| {
860860
inline for (mappings) |mapping| {
861861
if (mem.startsWith(u8, line, mapping.prefix)) {

lib/std/Build.zig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1358,7 +1358,7 @@ pub fn findProgram(self: *Build, names: []const []const u8, paths: []const []con
13581358
if (fs.path.isAbsolute(name)) {
13591359
return name;
13601360
}
1361-
var it = mem.tokenize(u8, PATH, &[_]u8{fs.path.delimiter});
1361+
var it = mem.tokenizeScalar(u8, PATH, fs.path.delimiter);
13621362
while (it.next()) |path| {
13631363
const full_path = self.pathJoin(&.{
13641364
path,

lib/std/Build/Cache.zig

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -467,7 +467,7 @@ pub const Manifest = struct {
467467

468468
const input_file_count = self.files.items.len;
469469
var any_file_changed = false;
470-
var line_iter = mem.tokenize(u8, file_contents, "\n");
470+
var line_iter = mem.tokenizeScalar(u8, file_contents, '\n');
471471
var idx: usize = 0;
472472
while (line_iter.next()) |line| {
473473
defer idx += 1;
@@ -484,7 +484,7 @@ pub const Manifest = struct {
484484
break :blk new;
485485
};
486486

487-
var iter = mem.tokenize(u8, line, " ");
487+
var iter = mem.tokenizeScalar(u8, line, ' ');
488488
const size = iter.next() orelse return error.InvalidFormat;
489489
const inode = iter.next() orelse return error.InvalidFormat;
490490
const mtime_nsec_str = iter.next() orelse return error.InvalidFormat;

lib/std/Build/Step/CheckObject.zig

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -103,8 +103,8 @@ const Action = struct {
103103
assert(act.tag == .match or act.tag == .not_present);
104104
const phrase = act.phrase.resolve(b, step);
105105
var candidate_var: ?struct { name: []const u8, value: u64 } = null;
106-
var hay_it = mem.tokenize(u8, mem.trim(u8, haystack, " "), " ");
107-
var needle_it = mem.tokenize(u8, mem.trim(u8, phrase, " "), " ");
106+
var hay_it = mem.tokenizeScalar(u8, mem.trim(u8, haystack, " "), ' ');
107+
var needle_it = mem.tokenizeScalar(u8, mem.trim(u8, phrase, " "), ' ');
108108

109109
while (needle_it.next()) |needle_tok| {
110110
const hay_tok = hay_it.next() orelse return false;
@@ -155,7 +155,7 @@ const Action = struct {
155155
var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa);
156156
var values = std.ArrayList(u64).init(gpa);
157157

158-
var it = mem.tokenize(u8, phrase, " ");
158+
var it = mem.tokenizeScalar(u8, phrase, ' ');
159159
while (it.next()) |next| {
160160
if (mem.eql(u8, next, "+")) {
161161
try op_stack.append(.add);
@@ -365,7 +365,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
365365
var vars = std.StringHashMap(u64).init(gpa);
366366

367367
for (self.checks.items) |chk| {
368-
var it = mem.tokenize(u8, output, "\r\n");
368+
var it = mem.tokenizeAny(u8, output, "\r\n");
369369
for (chk.actions.items) |act| {
370370
switch (act.tag) {
371371
.match => {

lib/std/Build/Step/Compile.zig

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -777,7 +777,7 @@ fn runPkgConfig(self: *Compile, lib_name: []const u8) ![]const []const u8 {
777777
var zig_args = ArrayList([]const u8).init(b.allocator);
778778
defer zig_args.deinit();
779779

780-
var it = mem.tokenize(u8, stdout, " \r\n\t");
780+
var it = mem.tokenizeAny(u8, stdout, " \r\n\t");
781781
while (it.next()) |tok| {
782782
if (mem.eql(u8, tok, "-I")) {
783783
const dir = it.next() orelse return error.PkgConfigInvalidOutput;
@@ -2017,10 +2017,10 @@ fn execPkgConfigList(self: *std.Build, out_code: *u8) (PkgConfigError || ExecErr
20172017
const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore);
20182018
var list = ArrayList(PkgConfigPkg).init(self.allocator);
20192019
errdefer list.deinit();
2020-
var line_it = mem.tokenize(u8, stdout, "\r\n");
2020+
var line_it = mem.tokenizeAny(u8, stdout, "\r\n");
20212021
while (line_it.next()) |line| {
20222022
if (mem.trim(u8, line, " \t").len == 0) continue;
2023-
var tok_it = mem.tokenize(u8, line, " \t");
2023+
var tok_it = mem.tokenizeAny(u8, line, " \t");
20242024
try list.append(PkgConfigPkg{
20252025
.name = tok_it.next() orelse return error.PkgConfigInvalidOutput,
20262026
.desc = tok_it.rest(),

lib/std/Build/Step/ConfigHeader.zig

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ fn render_autoconf(
257257
try output.appendSlice("\n");
258258
continue;
259259
}
260-
var it = std.mem.tokenize(u8, line[1..], " \t\r");
260+
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
261261
const undef = it.next().?;
262262
if (!std.mem.eql(u8, undef, "undef")) {
263263
try output.appendSlice(line);
@@ -304,7 +304,7 @@ fn render_cmake(
304304
try output.appendSlice("\n");
305305
continue;
306306
}
307-
var it = std.mem.tokenize(u8, line[1..], " \t\r");
307+
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
308308
const cmakedefine = it.next().?;
309309
if (!std.mem.eql(u8, cmakedefine, "cmakedefine")) {
310310
try output.appendSlice(line);

lib/std/child_process.zig

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -850,7 +850,7 @@ pub const ChildProcess = struct {
850850
return original_err;
851851
}
852852

853-
var it = mem.tokenize(u16, PATH, &[_]u16{';'});
853+
var it = mem.tokenizeScalar(u16, PATH, ';');
854854
while (it.next()) |search_path| {
855855
dir_buf.clearRetainingCapacity();
856856
try dir_buf.appendSlice(self.allocator, search_path);
@@ -1067,7 +1067,7 @@ fn windowsCreateProcessPathExt(
10671067
// Now we know that at least *a* file matching the wildcard exists, we can loop
10681068
// through PATHEXT in order and exec any that exist
10691069

1070-
var ext_it = mem.tokenize(u16, pathext, &[_]u16{';'});
1070+
var ext_it = mem.tokenizeScalar(u16, pathext, ';');
10711071
while (ext_it.next()) |ext| {
10721072
if (!windowsCreateProcessSupportsExtension(ext)) continue;
10731073

lib/std/fs.zig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3022,7 +3022,7 @@ pub fn selfExePath(out_buffer: []u8) SelfExePathError![]u8 {
30223022
} else if (argv0.len != 0) {
30233023
// argv[0] is not empty (and not a path): search it inside PATH
30243024
const PATH = std.os.getenvZ("PATH") orelse return error.FileNotFound;
3025-
var path_it = mem.tokenize(u8, PATH, &[_]u8{path.delimiter});
3025+
var path_it = mem.tokenizeScalar(u8, PATH, path.delimiter);
30263026
while (path_it.next()) |a_path| {
30273027
var resolved_path_buf: [MAX_PATH_BYTES - 1:0]u8 = undefined;
30283028
const resolved_path = std.fmt.bufPrintZ(&resolved_path_buf, "{s}/{s}", .{

lib/std/fs/path.zig

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -358,7 +358,7 @@ pub fn windowsParsePath(path: []const u8) WindowsPath {
358358
return relative_path;
359359
}
360360

361-
var it = mem.tokenize(u8, path, &[_]u8{this_sep});
361+
var it = mem.tokenizeScalar(u8, path, this_sep);
362362
_ = (it.next() orelse return relative_path);
363363
_ = (it.next() orelse return relative_path);
364364
return WindowsPath{
@@ -420,8 +420,8 @@ fn networkShareServersEql(ns1: []const u8, ns2: []const u8) bool {
420420
const sep1 = ns1[0];
421421
const sep2 = ns2[0];
422422

423-
var it1 = mem.tokenize(u8, ns1, &[_]u8{sep1});
424-
var it2 = mem.tokenize(u8, ns2, &[_]u8{sep2});
423+
var it1 = mem.tokenizeScalar(u8, ns1, sep1);
424+
var it2 = mem.tokenizeScalar(u8, ns2, sep2);
425425

426426
// TODO ASCII is wrong, we actually need full unicode support to compare paths.
427427
return ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
@@ -441,8 +441,8 @@ fn compareDiskDesignators(kind: WindowsPath.Kind, p1: []const u8, p2: []const u8
441441
const sep1 = p1[0];
442442
const sep2 = p2[0];
443443

444-
var it1 = mem.tokenize(u8, p1, &[_]u8{sep1});
445-
var it2 = mem.tokenize(u8, p2, &[_]u8{sep2});
444+
var it1 = mem.tokenizeScalar(u8, p1, sep1);
445+
var it2 = mem.tokenizeScalar(u8, p2, sep2);
446446

447447
// TODO ASCII is wrong, we actually need full unicode support to compare paths.
448448
return ascii.eqlIgnoreCase(it1.next().?, it2.next().?) and ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
@@ -535,7 +535,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
535535
break :l disk_designator.len;
536536
},
537537
.NetworkShare => {
538-
var it = mem.tokenize(u8, paths[first_index], "/\\");
538+
var it = mem.tokenizeAny(u8, paths[first_index], "/\\");
539539
const server_name = it.next().?;
540540
const other_name = it.next().?;
541541

@@ -570,7 +570,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
570570
if (!correct_disk_designator) {
571571
continue;
572572
}
573-
var it = mem.tokenize(u8, p[parsed.disk_designator.len..], "/\\");
573+
var it = mem.tokenizeAny(u8, p[parsed.disk_designator.len..], "/\\");
574574
while (it.next()) |component| {
575575
if (mem.eql(u8, component, ".")) {
576576
continue;
@@ -657,7 +657,7 @@ pub fn resolvePosix(allocator: Allocator, paths: []const []const u8) Allocator.E
657657
negative_count = 0;
658658
result.clearRetainingCapacity();
659659
}
660-
var it = mem.tokenize(u8, p, "/");
660+
var it = mem.tokenizeScalar(u8, p, '/');
661661
while (it.next()) |component| {
662662
if (mem.eql(u8, component, ".")) {
663663
continue;
@@ -1078,8 +1078,8 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
10781078
return resolved_to;
10791079
}
10801080

1081-
var from_it = mem.tokenize(u8, resolved_from, "/\\");
1082-
var to_it = mem.tokenize(u8, resolved_to, "/\\");
1081+
var from_it = mem.tokenizeAny(u8, resolved_from, "/\\");
1082+
var to_it = mem.tokenizeAny(u8, resolved_to, "/\\");
10831083
while (true) {
10841084
const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
10851085
const to_rest = to_it.rest();
@@ -1102,7 +1102,7 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
11021102
result_index += 3;
11031103
}
11041104

1105-
var rest_it = mem.tokenize(u8, to_rest, "/\\");
1105+
var rest_it = mem.tokenizeAny(u8, to_rest, "/\\");
11061106
while (rest_it.next()) |to_component| {
11071107
result[result_index] = '\\';
11081108
result_index += 1;
@@ -1124,8 +1124,8 @@ pub fn relativePosix(allocator: Allocator, from: []const u8, to: []const u8) ![]
11241124
const resolved_to = try resolvePosix(allocator, &[_][]const u8{ cwd, to });
11251125
defer allocator.free(resolved_to);
11261126

1127-
var from_it = mem.tokenize(u8, resolved_from, "/");
1128-
var to_it = mem.tokenize(u8, resolved_to, "/");
1127+
var from_it = mem.tokenizeScalar(u8, resolved_from, '/');
1128+
var to_it = mem.tokenizeScalar(u8, resolved_to, '/');
11291129
while (true) {
11301130
const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
11311131
const to_rest = to_it.rest();

lib/std/http/Client.zig

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,7 @@ pub const Response = struct {
366366
};
367367

368368
pub fn parse(res: *Response, bytes: []const u8, trailing: bool) ParseError!void {
369-
var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n");
369+
var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n");
370370

371371
const first_line = it.next() orelse return error.HttpHeadersInvalid;
372372
if (first_line.len < 12)
@@ -392,7 +392,7 @@ pub const Response = struct {
392392
else => {},
393393
}
394394

395-
var line_it = mem.tokenize(u8, line, ": ");
395+
var line_it = mem.tokenizeAny(u8, line, ": ");
396396
const header_name = line_it.next() orelse return error.HttpHeadersInvalid;
397397
const header_value = line_it.rest();
398398

0 commit comments

Comments
 (0)