Skip to content

Commit

Permalink
Update all std.mem.tokenize calls to their appropriate function
Browse files Browse the repository at this point in the history
Everywhere that can now use `tokenizeScalar` should get a nice little performance boost.
  • Loading branch information
squeek502 committed May 5, 2023
1 parent 47dd94f commit 41a99fd
Show file tree
Hide file tree
Showing 24 changed files with 79 additions and 79 deletions.
8 changes: 4 additions & 4 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ pub fn build(b: *std.Build) !void {
// That means we also have to rely on stage1 compiled c++ files. We parse config.h to find
// the information passed on to us from cmake.
if (cfg.cmake_prefix_path.len > 0) {
var it = mem.tokenize(u8, cfg.cmake_prefix_path, ";");
var it = mem.tokenizeScalar(u8, cfg.cmake_prefix_path, ';');
while (it.next()) |path| {
b.addSearchPrefix(path);
}
Expand Down Expand Up @@ -687,7 +687,7 @@ fn addCxxKnownPath(
if (!std.process.can_spawn)
return error.RequiredLibraryNotFound;
const path_padded = b.exec(&.{ ctx.cxx_compiler, b.fmt("-print-file-name={s}", .{objname}) });
var tokenizer = mem.tokenize(u8, path_padded, "\r\n");
var tokenizer = mem.tokenizeAny(u8, path_padded, "\r\n");
const path_unpadded = tokenizer.next().?;
if (mem.eql(u8, path_unpadded, objname)) {
if (errtxt) |msg| {
Expand All @@ -710,7 +710,7 @@ fn addCxxKnownPath(
}

fn addCMakeLibraryList(exe: *std.Build.Step.Compile, list: []const u8) void {
var it = mem.tokenize(u8, list, ";");
var it = mem.tokenizeScalar(u8, list, ';');
while (it.next()) |lib| {
if (mem.startsWith(u8, lib, "-l")) {
exe.linkSystemLibrary(lib["-l".len..]);
Expand Down Expand Up @@ -855,7 +855,7 @@ fn parseConfigH(b: *std.Build, config_h_text: []const u8) ?CMakeConfig {
// .prefix = ZIG_LLVM_LINK_MODE parsed manually below
};

var lines_it = mem.tokenize(u8, config_h_text, "\r\n");
var lines_it = mem.tokenizeAny(u8, config_h_text, "\r\n");
while (lines_it.next()) |line| {
inline for (mappings) |mapping| {
if (mem.startsWith(u8, line, mapping.prefix)) {
Expand Down
2 changes: 1 addition & 1 deletion lib/std/Build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1358,7 +1358,7 @@ pub fn findProgram(self: *Build, names: []const []const u8, paths: []const []con
if (fs.path.isAbsolute(name)) {
return name;
}
var it = mem.tokenize(u8, PATH, &[_]u8{fs.path.delimiter});
var it = mem.tokenizeScalar(u8, PATH, fs.path.delimiter);
while (it.next()) |path| {
const full_path = self.pathJoin(&.{
path,
Expand Down
4 changes: 2 additions & 2 deletions lib/std/Build/Cache.zig
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ pub const Manifest = struct {

const input_file_count = self.files.items.len;
var any_file_changed = false;
var line_iter = mem.tokenize(u8, file_contents, "\n");
var line_iter = mem.tokenizeScalar(u8, file_contents, '\n');
var idx: usize = 0;
while (line_iter.next()) |line| {
defer idx += 1;
Expand All @@ -484,7 +484,7 @@ pub const Manifest = struct {
break :blk new;
};

var iter = mem.tokenize(u8, line, " ");
var iter = mem.tokenizeScalar(u8, line, ' ');
const size = iter.next() orelse return error.InvalidFormat;
const inode = iter.next() orelse return error.InvalidFormat;
const mtime_nsec_str = iter.next() orelse return error.InvalidFormat;
Expand Down
8 changes: 4 additions & 4 deletions lib/std/Build/Step/CheckObject.zig
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ const Action = struct {
assert(act.tag == .match or act.tag == .not_present);
const phrase = act.phrase.resolve(b, step);
var candidate_var: ?struct { name: []const u8, value: u64 } = null;
var hay_it = mem.tokenize(u8, mem.trim(u8, haystack, " "), " ");
var needle_it = mem.tokenize(u8, mem.trim(u8, phrase, " "), " ");
var hay_it = mem.tokenizeScalar(u8, mem.trim(u8, haystack, " "), ' ');
var needle_it = mem.tokenizeScalar(u8, mem.trim(u8, phrase, " "), ' ');

while (needle_it.next()) |needle_tok| {
const hay_tok = hay_it.next() orelse return false;
Expand Down Expand Up @@ -155,7 +155,7 @@ const Action = struct {
var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa);
var values = std.ArrayList(u64).init(gpa);

var it = mem.tokenize(u8, phrase, " ");
var it = mem.tokenizeScalar(u8, phrase, ' ');
while (it.next()) |next| {
if (mem.eql(u8, next, "+")) {
try op_stack.append(.add);
Expand Down Expand Up @@ -365,7 +365,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
var vars = std.StringHashMap(u64).init(gpa);

for (self.checks.items) |chk| {
var it = mem.tokenize(u8, output, "\r\n");
var it = mem.tokenizeAny(u8, output, "\r\n");
for (chk.actions.items) |act| {
switch (act.tag) {
.match => {
Expand Down
6 changes: 3 additions & 3 deletions lib/std/Build/Step/Compile.zig
Original file line number Diff line number Diff line change
Expand Up @@ -777,7 +777,7 @@ fn runPkgConfig(self: *Compile, lib_name: []const u8) ![]const []const u8 {
var zig_args = ArrayList([]const u8).init(b.allocator);
defer zig_args.deinit();

var it = mem.tokenize(u8, stdout, " \r\n\t");
var it = mem.tokenizeAny(u8, stdout, " \r\n\t");
while (it.next()) |tok| {
if (mem.eql(u8, tok, "-I")) {
const dir = it.next() orelse return error.PkgConfigInvalidOutput;
Expand Down Expand Up @@ -2017,10 +2017,10 @@ fn execPkgConfigList(self: *std.Build, out_code: *u8) (PkgConfigError || ExecErr
const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore);
var list = ArrayList(PkgConfigPkg).init(self.allocator);
errdefer list.deinit();
var line_it = mem.tokenize(u8, stdout, "\r\n");
var line_it = mem.tokenizeAny(u8, stdout, "\r\n");
while (line_it.next()) |line| {
if (mem.trim(u8, line, " \t").len == 0) continue;
var tok_it = mem.tokenize(u8, line, " \t");
var tok_it = mem.tokenizeAny(u8, line, " \t");
try list.append(PkgConfigPkg{
.name = tok_it.next() orelse return error.PkgConfigInvalidOutput,
.desc = tok_it.rest(),
Expand Down
4 changes: 2 additions & 2 deletions lib/std/Build/Step/ConfigHeader.zig
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ fn render_autoconf(
try output.appendSlice("\n");
continue;
}
var it = std.mem.tokenize(u8, line[1..], " \t\r");
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
const undef = it.next().?;
if (!std.mem.eql(u8, undef, "undef")) {
try output.appendSlice(line);
Expand Down Expand Up @@ -304,7 +304,7 @@ fn render_cmake(
try output.appendSlice("\n");
continue;
}
var it = std.mem.tokenize(u8, line[1..], " \t\r");
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
const cmakedefine = it.next().?;
if (!std.mem.eql(u8, cmakedefine, "cmakedefine")) {
try output.appendSlice(line);
Expand Down
4 changes: 2 additions & 2 deletions lib/std/child_process.zig
Original file line number Diff line number Diff line change
Expand Up @@ -850,7 +850,7 @@ pub const ChildProcess = struct {
return original_err;
}

var it = mem.tokenize(u16, PATH, &[_]u16{';'});
var it = mem.tokenizeScalar(u16, PATH, ';');
while (it.next()) |search_path| {
dir_buf.clearRetainingCapacity();
try dir_buf.appendSlice(self.allocator, search_path);
Expand Down Expand Up @@ -1067,7 +1067,7 @@ fn windowsCreateProcessPathExt(
// Now we know that at least *a* file matching the wildcard exists, we can loop
// through PATHEXT in order and exec any that exist

var ext_it = mem.tokenize(u16, pathext, &[_]u16{';'});
var ext_it = mem.tokenizeScalar(u16, pathext, ';');
while (ext_it.next()) |ext| {
if (!windowsCreateProcessSupportsExtension(ext)) continue;

Expand Down
2 changes: 1 addition & 1 deletion lib/std/fs.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3022,7 +3022,7 @@ pub fn selfExePath(out_buffer: []u8) SelfExePathError![]u8 {
} else if (argv0.len != 0) {
// argv[0] is not empty (and not a path): search it inside PATH
const PATH = std.os.getenvZ("PATH") orelse return error.FileNotFound;
var path_it = mem.tokenize(u8, PATH, &[_]u8{path.delimiter});
var path_it = mem.tokenizeScalar(u8, PATH, path.delimiter);
while (path_it.next()) |a_path| {
var resolved_path_buf: [MAX_PATH_BYTES - 1:0]u8 = undefined;
const resolved_path = std.fmt.bufPrintZ(&resolved_path_buf, "{s}/{s}", .{
Expand Down
26 changes: 13 additions & 13 deletions lib/std/fs/path.zig
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ pub fn windowsParsePath(path: []const u8) WindowsPath {
return relative_path;
}

var it = mem.tokenize(u8, path, &[_]u8{this_sep});
var it = mem.tokenizeScalar(u8, path, this_sep);
_ = (it.next() orelse return relative_path);
_ = (it.next() orelse return relative_path);
return WindowsPath{
Expand Down Expand Up @@ -420,8 +420,8 @@ fn networkShareServersEql(ns1: []const u8, ns2: []const u8) bool {
const sep1 = ns1[0];
const sep2 = ns2[0];

var it1 = mem.tokenize(u8, ns1, &[_]u8{sep1});
var it2 = mem.tokenize(u8, ns2, &[_]u8{sep2});
var it1 = mem.tokenizeScalar(u8, ns1, sep1);
var it2 = mem.tokenizeScalar(u8, ns2, sep2);

// TODO ASCII is wrong, we actually need full unicode support to compare paths.
return ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
Expand All @@ -441,8 +441,8 @@ fn compareDiskDesignators(kind: WindowsPath.Kind, p1: []const u8, p2: []const u8
const sep1 = p1[0];
const sep2 = p2[0];

var it1 = mem.tokenize(u8, p1, &[_]u8{sep1});
var it2 = mem.tokenize(u8, p2, &[_]u8{sep2});
var it1 = mem.tokenizeScalar(u8, p1, sep1);
var it2 = mem.tokenizeScalar(u8, p2, sep2);

// TODO ASCII is wrong, we actually need full unicode support to compare paths.
return ascii.eqlIgnoreCase(it1.next().?, it2.next().?) and ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
Expand Down Expand Up @@ -535,7 +535,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
break :l disk_designator.len;
},
.NetworkShare => {
var it = mem.tokenize(u8, paths[first_index], "/\\");
var it = mem.tokenizeAny(u8, paths[first_index], "/\\");
const server_name = it.next().?;
const other_name = it.next().?;

Expand Down Expand Up @@ -570,7 +570,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
if (!correct_disk_designator) {
continue;
}
var it = mem.tokenize(u8, p[parsed.disk_designator.len..], "/\\");
var it = mem.tokenizeAny(u8, p[parsed.disk_designator.len..], "/\\");
while (it.next()) |component| {
if (mem.eql(u8, component, ".")) {
continue;
Expand Down Expand Up @@ -657,7 +657,7 @@ pub fn resolvePosix(allocator: Allocator, paths: []const []const u8) Allocator.E
negative_count = 0;
result.clearRetainingCapacity();
}
var it = mem.tokenize(u8, p, "/");
var it = mem.tokenizeScalar(u8, p, '/');
while (it.next()) |component| {
if (mem.eql(u8, component, ".")) {
continue;
Expand Down Expand Up @@ -1078,8 +1078,8 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
return resolved_to;
}

var from_it = mem.tokenize(u8, resolved_from, "/\\");
var to_it = mem.tokenize(u8, resolved_to, "/\\");
var from_it = mem.tokenizeAny(u8, resolved_from, "/\\");
var to_it = mem.tokenizeAny(u8, resolved_to, "/\\");
while (true) {
const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
const to_rest = to_it.rest();
Expand All @@ -1102,7 +1102,7 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
result_index += 3;
}

var rest_it = mem.tokenize(u8, to_rest, "/\\");
var rest_it = mem.tokenizeAny(u8, to_rest, "/\\");
while (rest_it.next()) |to_component| {
result[result_index] = '\\';
result_index += 1;
Expand All @@ -1124,8 +1124,8 @@ pub fn relativePosix(allocator: Allocator, from: []const u8, to: []const u8) ![]
const resolved_to = try resolvePosix(allocator, &[_][]const u8{ cwd, to });
defer allocator.free(resolved_to);

var from_it = mem.tokenize(u8, resolved_from, "/");
var to_it = mem.tokenize(u8, resolved_to, "/");
var from_it = mem.tokenizeScalar(u8, resolved_from, '/');
var to_it = mem.tokenizeScalar(u8, resolved_to, '/');
while (true) {
const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
const to_rest = to_it.rest();
Expand Down
4 changes: 2 additions & 2 deletions lib/std/http/Client.zig
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ pub const Response = struct {
};

pub fn parse(res: *Response, bytes: []const u8, trailing: bool) ParseError!void {
var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n");
var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n");

const first_line = it.next() orelse return error.HttpHeadersInvalid;
if (first_line.len < 12)
Expand All @@ -392,7 +392,7 @@ pub const Response = struct {
else => {},
}

var line_it = mem.tokenize(u8, line, ": ");
var line_it = mem.tokenizeAny(u8, line, ": ");
const header_name = line_it.next() orelse return error.HttpHeadersInvalid;
const header_value = line_it.rest();

Expand Down
4 changes: 2 additions & 2 deletions lib/std/http/Server.zig
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ pub const Request = struct {
};

pub fn parse(req: *Request, bytes: []const u8) ParseError!void {
var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n");
var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n");

const first_line = it.next() orelse return error.HttpHeadersInvalid;
if (first_line.len < 10)
Expand Down Expand Up @@ -245,7 +245,7 @@ pub const Request = struct {
else => {},
}

var line_it = mem.tokenize(u8, line, ": ");
var line_it = mem.tokenizeAny(u8, line, ": ");
const header_name = line_it.next() orelse return error.HttpHeadersInvalid;
const header_value = line_it.rest();

Expand Down
6 changes: 3 additions & 3 deletions lib/std/net.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1266,7 +1266,7 @@ fn linuxLookupNameFromHosts(
var split_it = mem.split(u8, line, "#");
const no_comment_line = split_it.first();

var line_it = mem.tokenize(u8, no_comment_line, " \t");
var line_it = mem.tokenizeAny(u8, no_comment_line, " \t");
const ip_text = line_it.next() orelse continue;
var first_name_text: ?[]const u8 = null;
while (line_it.next()) |name_text| {
Expand Down Expand Up @@ -1346,7 +1346,7 @@ fn linuxLookupNameFromDnsSearch(
@memcpy(canon.items, canon_name);
try canon.append('.');

var tok_it = mem.tokenize(u8, search, " \t");
var tok_it = mem.tokenizeAny(u8, search, " \t");
while (tok_it.next()) |tok| {
canon.shrinkRetainingCapacity(canon_name.len + 1);
try canon.appendSlice(tok);
Expand Down Expand Up @@ -1468,7 +1468,7 @@ fn getResolvConf(allocator: mem.Allocator, rc: *ResolvConf) !void {
var split = mem.split(u8, line, "#");
break :no_comment_line split.first();
};
var line_it = mem.tokenize(u8, no_comment_line, " \t");
var line_it = mem.tokenizeAny(u8, no_comment_line, " \t");

const token = line_it.next() orelse continue;
if (mem.eql(u8, token, "options")) {
Expand Down
2 changes: 1 addition & 1 deletion lib/std/os.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1867,7 +1867,7 @@ pub fn execvpeZ_expandArg0(
// Use of MAX_PATH_BYTES here is valid as the path_buf will be passed
// directly to the operating system in execveZ.
var path_buf: [MAX_PATH_BYTES]u8 = undefined;
var it = mem.tokenize(u8, PATH, ":");
var it = mem.tokenizeScalar(u8, PATH, ':');
var seen_eacces = false;
var err: ExecveError = error.FileNotFound;

Expand Down
2 changes: 1 addition & 1 deletion lib/std/process.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1200,7 +1200,7 @@ fn totalSystemMemoryLinux() !usize {
var buf: [50]u8 = undefined;
const amt = try file.read(&buf);
if (amt != 50) return error.Unexpected;
var it = std.mem.tokenize(u8, buf[0..amt], " \n");
var it = std.mem.tokenizeAny(u8, buf[0..amt], " \n");
const label = it.next().?;
if (!std.mem.eql(u8, label, "MemTotal:")) return error.Unexpected;
const int_text = it.next() orelse return error.Unexpected;
Expand Down
10 changes: 5 additions & 5 deletions lib/std/zig/system/NativePaths.zig
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
defer allocator.free(nix_cflags_compile);

is_nix = true;
var it = mem.tokenize(u8, nix_cflags_compile, " ");
var it = mem.tokenizeScalar(u8, nix_cflags_compile, ' ');
while (true) {
const word = it.next() orelse break;
if (mem.eql(u8, word, "-isystem")) {
Expand Down Expand Up @@ -62,7 +62,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
defer allocator.free(nix_ldflags);

is_nix = true;
var it = mem.tokenize(u8, nix_ldflags, " ");
var it = mem.tokenizeScalar(u8, nix_ldflags, ' ');
while (true) {
const word = it.next() orelse break;
if (mem.eql(u8, word, "-rpath")) {
Expand Down Expand Up @@ -147,21 +147,21 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
// We use os.getenv here since this part won't be executed on
// windows, to get rid of unnecessary error handling.
if (std.os.getenv("C_INCLUDE_PATH")) |c_include_path| {
var it = mem.tokenize(u8, c_include_path, ":");
var it = mem.tokenizeScalar(u8, c_include_path, ':');
while (it.next()) |dir| {
try self.addIncludeDir(dir);
}
}

if (std.os.getenv("CPLUS_INCLUDE_PATH")) |cplus_include_path| {
var it = mem.tokenize(u8, cplus_include_path, ":");
var it = mem.tokenizeScalar(u8, cplus_include_path, ':');
while (it.next()) |dir| {
try self.addIncludeDir(dir);
}
}

if (std.os.getenv("LIBRARY_PATH")) |library_path| {
var it = mem.tokenize(u8, library_path, ":");
var it = mem.tokenizeScalar(u8, library_path, ':');
while (it.next()) |dir| {
try self.addLibDir(dir);
}
Expand Down
4 changes: 2 additions & 2 deletions lib/std/zig/system/NativeTargetInfo.zig
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ fn detectAbiAndDynamicLinker(
const newline = mem.indexOfScalar(u8, buffer[0..len], '\n') orelse break :blk file;
const line = buffer[0..newline];
if (!mem.startsWith(u8, line, "#!")) break :blk file;
var it = mem.tokenize(u8, line[2..], " ");
var it = mem.tokenizeScalar(u8, line[2..], ' ');
file_name = it.next() orelse return defaultAbiAndDynamicLinker(cpu, os, cross_target);
file.close();
}
Expand Down Expand Up @@ -811,7 +811,7 @@ pub fn abiAndDynamicLinkerFromFile(
const strtab = strtab_buf[0..strtab_read_len];

const rpath_list = mem.sliceTo(strtab, 0);
var it = mem.tokenize(u8, rpath_list, ":");
var it = mem.tokenizeScalar(u8, rpath_list, ':');
while (it.next()) |rpath| {
if (glibcVerFromRPath(rpath)) |ver| {
result.target.os.version_range.linux.glibc = ver;
Expand Down
Loading

0 comments on commit 41a99fd

Please sign in to comment.