Commit 815e53b147

Ryan Liptak <squeek502@hotmail.com>
2023-05-05 03:05:40
Update all std.mem.tokenize calls to their appropriate function
Everywhere that can now use `tokenizeScalar` should get a nice little performance boost.
1 parent ce9f3ec
lib/std/Build/Step/CheckObject.zig
@@ -103,8 +103,8 @@ const Action = struct {
         assert(act.tag == .match or act.tag == .not_present);
         const phrase = act.phrase.resolve(b, step);
         var candidate_var: ?struct { name: []const u8, value: u64 } = null;
-        var hay_it = mem.tokenize(u8, mem.trim(u8, haystack, " "), " ");
-        var needle_it = mem.tokenize(u8, mem.trim(u8, phrase, " "), " ");
+        var hay_it = mem.tokenizeScalar(u8, mem.trim(u8, haystack, " "), ' ');
+        var needle_it = mem.tokenizeScalar(u8, mem.trim(u8, phrase, " "), ' ');
 
         while (needle_it.next()) |needle_tok| {
             const hay_tok = hay_it.next() orelse return false;
@@ -155,7 +155,7 @@ const Action = struct {
         var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa);
         var values = std.ArrayList(u64).init(gpa);
 
-        var it = mem.tokenize(u8, phrase, " ");
+        var it = mem.tokenizeScalar(u8, phrase, ' ');
         while (it.next()) |next| {
             if (mem.eql(u8, next, "+")) {
                 try op_stack.append(.add);
@@ -365,7 +365,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
     var vars = std.StringHashMap(u64).init(gpa);
 
     for (self.checks.items) |chk| {
-        var it = mem.tokenize(u8, output, "\r\n");
+        var it = mem.tokenizeAny(u8, output, "\r\n");
         for (chk.actions.items) |act| {
             switch (act.tag) {
                 .match => {
lib/std/Build/Step/Compile.zig
@@ -777,7 +777,7 @@ fn runPkgConfig(self: *Compile, lib_name: []const u8) ![]const []const u8 {
     var zig_args = ArrayList([]const u8).init(b.allocator);
     defer zig_args.deinit();
 
-    var it = mem.tokenize(u8, stdout, " \r\n\t");
+    var it = mem.tokenizeAny(u8, stdout, " \r\n\t");
     while (it.next()) |tok| {
         if (mem.eql(u8, tok, "-I")) {
             const dir = it.next() orelse return error.PkgConfigInvalidOutput;
@@ -2017,10 +2017,10 @@ fn execPkgConfigList(self: *std.Build, out_code: *u8) (PkgConfigError || ExecErr
     const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore);
     var list = ArrayList(PkgConfigPkg).init(self.allocator);
     errdefer list.deinit();
-    var line_it = mem.tokenize(u8, stdout, "\r\n");
+    var line_it = mem.tokenizeAny(u8, stdout, "\r\n");
     while (line_it.next()) |line| {
         if (mem.trim(u8, line, " \t").len == 0) continue;
-        var tok_it = mem.tokenize(u8, line, " \t");
+        var tok_it = mem.tokenizeAny(u8, line, " \t");
         try list.append(PkgConfigPkg{
             .name = tok_it.next() orelse return error.PkgConfigInvalidOutput,
             .desc = tok_it.rest(),
lib/std/Build/Step/ConfigHeader.zig
@@ -257,7 +257,7 @@ fn render_autoconf(
             try output.appendSlice("\n");
             continue;
         }
-        var it = std.mem.tokenize(u8, line[1..], " \t\r");
+        var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
         const undef = it.next().?;
         if (!std.mem.eql(u8, undef, "undef")) {
             try output.appendSlice(line);
@@ -304,7 +304,7 @@ fn render_cmake(
             try output.appendSlice("\n");
             continue;
         }
-        var it = std.mem.tokenize(u8, line[1..], " \t\r");
+        var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
         const cmakedefine = it.next().?;
         if (!std.mem.eql(u8, cmakedefine, "cmakedefine") and
             !std.mem.eql(u8, cmakedefine, "cmakedefine01"))
lib/std/Build/Cache.zig
@@ -434,7 +434,7 @@ pub const Manifest = struct {
 
             const input_file_count = self.files.items.len;
             var any_file_changed = false;
-            var line_iter = mem.tokenize(u8, file_contents, "\n");
+            var line_iter = mem.tokenizeScalar(u8, file_contents, '\n');
             var idx: usize = 0;
             if (if (line_iter.next()) |line| !std.mem.eql(u8, line, manifest_header) else true) {
                 if (try self.upgradeToExclusiveLock()) continue;
@@ -463,7 +463,7 @@ pub const Manifest = struct {
                     break :blk new;
                 };
 
-                var iter = mem.tokenize(u8, line, " ");
+                var iter = mem.tokenizeScalar(u8, line, ' ');
                 const size = iter.next() orelse return error.InvalidFormat;
                 const inode = iter.next() orelse return error.InvalidFormat;
                 const mtime_nsec_str = iter.next() orelse return error.InvalidFormat;
lib/std/fs/path.zig
@@ -358,7 +358,7 @@ pub fn windowsParsePath(path: []const u8) WindowsPath {
                 return relative_path;
             }
 
-            var it = mem.tokenize(u8, path, &[_]u8{this_sep});
+            var it = mem.tokenizeScalar(u8, path, this_sep);
             _ = (it.next() orelse return relative_path);
             _ = (it.next() orelse return relative_path);
             return WindowsPath{
@@ -420,8 +420,8 @@ fn networkShareServersEql(ns1: []const u8, ns2: []const u8) bool {
     const sep1 = ns1[0];
     const sep2 = ns2[0];
 
-    var it1 = mem.tokenize(u8, ns1, &[_]u8{sep1});
-    var it2 = mem.tokenize(u8, ns2, &[_]u8{sep2});
+    var it1 = mem.tokenizeScalar(u8, ns1, sep1);
+    var it2 = mem.tokenizeScalar(u8, ns2, sep2);
 
     // TODO ASCII is wrong, we actually need full unicode support to compare paths.
     return ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
@@ -441,8 +441,8 @@ fn compareDiskDesignators(kind: WindowsPath.Kind, p1: []const u8, p2: []const u8
             const sep1 = p1[0];
             const sep2 = p2[0];
 
-            var it1 = mem.tokenize(u8, p1, &[_]u8{sep1});
-            var it2 = mem.tokenize(u8, p2, &[_]u8{sep2});
+            var it1 = mem.tokenizeScalar(u8, p1, sep1);
+            var it2 = mem.tokenizeScalar(u8, p2, sep2);
 
             // TODO ASCII is wrong, we actually need full unicode support to compare paths.
             return ascii.eqlIgnoreCase(it1.next().?, it2.next().?) and ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
@@ -535,7 +535,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
                 break :l disk_designator.len;
             },
             .NetworkShare => {
-                var it = mem.tokenize(u8, paths[first_index], "/\\");
+                var it = mem.tokenizeAny(u8, paths[first_index], "/\\");
                 const server_name = it.next().?;
                 const other_name = it.next().?;
 
@@ -570,7 +570,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
         if (!correct_disk_designator) {
             continue;
         }
-        var it = mem.tokenize(u8, p[parsed.disk_designator.len..], "/\\");
+        var it = mem.tokenizeAny(u8, p[parsed.disk_designator.len..], "/\\");
         while (it.next()) |component| {
             if (mem.eql(u8, component, ".")) {
                 continue;
@@ -657,7 +657,7 @@ pub fn resolvePosix(allocator: Allocator, paths: []const []const u8) Allocator.E
             negative_count = 0;
             result.clearRetainingCapacity();
         }
-        var it = mem.tokenize(u8, p, "/");
+        var it = mem.tokenizeScalar(u8, p, '/');
         while (it.next()) |component| {
             if (mem.eql(u8, component, ".")) {
                 continue;
@@ -1078,8 +1078,8 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
         return resolved_to;
     }
 
-    var from_it = mem.tokenize(u8, resolved_from, "/\\");
-    var to_it = mem.tokenize(u8, resolved_to, "/\\");
+    var from_it = mem.tokenizeAny(u8, resolved_from, "/\\");
+    var to_it = mem.tokenizeAny(u8, resolved_to, "/\\");
     while (true) {
         const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
         const to_rest = to_it.rest();
@@ -1102,7 +1102,7 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
             result_index += 3;
         }
 
-        var rest_it = mem.tokenize(u8, to_rest, "/\\");
+        var rest_it = mem.tokenizeAny(u8, to_rest, "/\\");
         while (rest_it.next()) |to_component| {
             result[result_index] = '\\';
             result_index += 1;
@@ -1124,8 +1124,8 @@ pub fn relativePosix(allocator: Allocator, from: []const u8, to: []const u8) ![]
     const resolved_to = try resolvePosix(allocator, &[_][]const u8{ cwd, to });
     defer allocator.free(resolved_to);
 
-    var from_it = mem.tokenize(u8, resolved_from, "/");
-    var to_it = mem.tokenize(u8, resolved_to, "/");
+    var from_it = mem.tokenizeScalar(u8, resolved_from, '/');
+    var to_it = mem.tokenizeScalar(u8, resolved_to, '/');
     while (true) {
         const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
         const to_rest = to_it.rest();
lib/std/http/Client.zig
@@ -386,7 +386,7 @@ pub const Response = struct {
     };
 
     pub fn parse(res: *Response, bytes: []const u8, trailing: bool) ParseError!void {
-        var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n");
+        var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n");
 
         const first_line = it.next() orelse return error.HttpHeadersInvalid;
         if (first_line.len < 12)
@@ -412,7 +412,7 @@ pub const Response = struct {
                 else => {},
             }
 
-            var line_it = mem.tokenize(u8, line, ": ");
+            var line_it = mem.tokenizeAny(u8, line, ": ");
             const header_name = line_it.next() orelse return error.HttpHeadersInvalid;
             const header_value = line_it.rest();
 
lib/std/http/Server.zig
@@ -231,7 +231,7 @@ pub const Request = struct {
     };
 
     pub fn parse(req: *Request, bytes: []const u8) ParseError!void {
-        var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n");
+        var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n");
 
         const first_line = it.next() orelse return error.HttpHeadersInvalid;
         if (first_line.len < 10)
@@ -265,7 +265,7 @@ pub const Request = struct {
                 else => {},
             }
 
-            var line_it = mem.tokenize(u8, line, ": ");
+            var line_it = mem.tokenizeAny(u8, line, ": ");
             const header_name = line_it.next() orelse return error.HttpHeadersInvalid;
             const header_value = line_it.rest();
 
lib/std/zig/system/NativePaths.zig
@@ -31,7 +31,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
         defer allocator.free(nix_cflags_compile);
 
         is_nix = true;
-        var it = mem.tokenize(u8, nix_cflags_compile, " ");
+        var it = mem.tokenizeScalar(u8, nix_cflags_compile, ' ');
         while (true) {
             const word = it.next() orelse break;
             if (mem.eql(u8, word, "-isystem")) {
@@ -62,7 +62,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
         defer allocator.free(nix_ldflags);
 
         is_nix = true;
-        var it = mem.tokenize(u8, nix_ldflags, " ");
+        var it = mem.tokenizeScalar(u8, nix_ldflags, ' ');
         while (true) {
             const word = it.next() orelse break;
             if (mem.eql(u8, word, "-rpath")) {
@@ -147,21 +147,21 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
         // We use os.getenv here since this part won't be executed on
         // windows, to get rid of unnecessary error handling.
         if (std.os.getenv("C_INCLUDE_PATH")) |c_include_path| {
-            var it = mem.tokenize(u8, c_include_path, ":");
+            var it = mem.tokenizeScalar(u8, c_include_path, ':');
             while (it.next()) |dir| {
                 try self.addIncludeDir(dir);
             }
         }
 
         if (std.os.getenv("CPLUS_INCLUDE_PATH")) |cplus_include_path| {
-            var it = mem.tokenize(u8, cplus_include_path, ":");
+            var it = mem.tokenizeScalar(u8, cplus_include_path, ':');
             while (it.next()) |dir| {
                 try self.addIncludeDir(dir);
             }
         }
 
         if (std.os.getenv("LIBRARY_PATH")) |library_path| {
-            var it = mem.tokenize(u8, library_path, ":");
+            var it = mem.tokenizeScalar(u8, library_path, ':');
             while (it.next()) |dir| {
                 try self.addLibDir(dir);
             }
lib/std/zig/system/NativeTargetInfo.zig
@@ -354,7 +354,7 @@ fn detectAbiAndDynamicLinker(
             const newline = mem.indexOfScalar(u8, buffer[0..len], '\n') orelse break :blk file;
             const line = buffer[0..newline];
             if (!mem.startsWith(u8, line, "#!")) break :blk file;
-            var it = mem.tokenize(u8, line[2..], " ");
+            var it = mem.tokenizeScalar(u8, line[2..], ' ');
             file_name = it.next() orelse return defaultAbiAndDynamicLinker(cpu, os, cross_target);
             file.close();
         }
@@ -811,7 +811,7 @@ pub fn abiAndDynamicLinkerFromFile(
                 const strtab = strtab_buf[0..strtab_read_len];
 
                 const rpath_list = mem.sliceTo(strtab, 0);
-                var it = mem.tokenize(u8, rpath_list, ":");
+                var it = mem.tokenizeScalar(u8, rpath_list, ':');
                 while (it.next()) |rpath| {
                     if (glibcVerFromRPath(rpath)) |ver| {
                         result.target.os.version_range.linux.glibc = ver;
lib/std/Build.zig
@@ -1358,7 +1358,7 @@ pub fn findProgram(self: *Build, names: []const []const u8, paths: []const []con
             if (fs.path.isAbsolute(name)) {
                 return name;
             }
-            var it = mem.tokenize(u8, PATH, &[_]u8{fs.path.delimiter});
+            var it = mem.tokenizeScalar(u8, PATH, fs.path.delimiter);
             while (it.next()) |path| {
                 const full_path = self.pathJoin(&.{
                     path,
lib/std/child_process.zig
@@ -850,7 +850,7 @@ pub const ChildProcess = struct {
                     return original_err;
                 }
 
-                var it = mem.tokenize(u16, PATH, &[_]u16{';'});
+                var it = mem.tokenizeScalar(u16, PATH, ';');
                 while (it.next()) |search_path| {
                     dir_buf.clearRetainingCapacity();
                     try dir_buf.appendSlice(self.allocator, search_path);
@@ -1067,7 +1067,7 @@ fn windowsCreateProcessPathExt(
     // Now we know that at least *a* file matching the wildcard exists, we can loop
     // through PATHEXT in order and exec any that exist
 
-    var ext_it = mem.tokenize(u16, pathext, &[_]u16{';'});
+    var ext_it = mem.tokenizeScalar(u16, pathext, ';');
     while (ext_it.next()) |ext| {
         if (!windowsCreateProcessSupportsExtension(ext)) continue;
 
lib/std/fs.zig
@@ -3021,7 +3021,7 @@ pub fn selfExePath(out_buffer: []u8) SelfExePathError![]u8 {
             } else if (argv0.len != 0) {
                 // argv[0] is not empty (and not a path): search it inside PATH
                 const PATH = std.os.getenvZ("PATH") orelse return error.FileNotFound;
-                var path_it = mem.tokenize(u8, PATH, &[_]u8{path.delimiter});
+                var path_it = mem.tokenizeScalar(u8, PATH, path.delimiter);
                 while (path_it.next()) |a_path| {
                     var resolved_path_buf: [MAX_PATH_BYTES - 1:0]u8 = undefined;
                     const resolved_path = std.fmt.bufPrintZ(&resolved_path_buf, "{s}/{s}", .{
lib/std/net.zig
@@ -1266,7 +1266,7 @@ fn linuxLookupNameFromHosts(
         var split_it = mem.split(u8, line, "#");
         const no_comment_line = split_it.first();
 
-        var line_it = mem.tokenize(u8, no_comment_line, " \t");
+        var line_it = mem.tokenizeAny(u8, no_comment_line, " \t");
         const ip_text = line_it.next() orelse continue;
         var first_name_text: ?[]const u8 = null;
         while (line_it.next()) |name_text| {
@@ -1346,7 +1346,7 @@ fn linuxLookupNameFromDnsSearch(
     @memcpy(canon.items, canon_name);
     try canon.append('.');
 
-    var tok_it = mem.tokenize(u8, search, " \t");
+    var tok_it = mem.tokenizeAny(u8, search, " \t");
     while (tok_it.next()) |tok| {
         canon.shrinkRetainingCapacity(canon_name.len + 1);
         try canon.appendSlice(tok);
@@ -1468,7 +1468,7 @@ fn getResolvConf(allocator: mem.Allocator, rc: *ResolvConf) !void {
             var split = mem.split(u8, line, "#");
             break :no_comment_line split.first();
         };
-        var line_it = mem.tokenize(u8, no_comment_line, " \t");
+        var line_it = mem.tokenizeAny(u8, no_comment_line, " \t");
 
         const token = line_it.next() orelse continue;
         if (mem.eql(u8, token, "options")) {
lib/std/os.zig
@@ -1878,7 +1878,7 @@ pub fn execvpeZ_expandArg0(
     // Use of MAX_PATH_BYTES here is valid as the path_buf will be passed
     // directly to the operating system in execveZ.
     var path_buf: [MAX_PATH_BYTES]u8 = undefined;
-    var it = mem.tokenize(u8, PATH, ":");
+    var it = mem.tokenizeScalar(u8, PATH, ':');
     var seen_eacces = false;
     var err: ExecveError = error.FileNotFound;
 
lib/std/process.zig
@@ -1200,7 +1200,7 @@ fn totalSystemMemoryLinux() !usize {
     var buf: [50]u8 = undefined;
     const amt = try file.read(&buf);
     if (amt != 50) return error.Unexpected;
-    var it = std.mem.tokenize(u8, buf[0..amt], " \n");
+    var it = std.mem.tokenizeAny(u8, buf[0..amt], " \n");
     const label = it.next().?;
     if (!std.mem.eql(u8, label, "MemTotal:")) return error.Unexpected;
     const int_text = it.next() orelse return error.Unexpected;
src/arch/x86_64/CodeGen.zig
@@ -8409,9 +8409,9 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
     }
 
     const asm_source = mem.sliceAsBytes(self.air.extra[extra_i..])[0..extra.data.source_len];
-    var line_it = mem.tokenize(u8, asm_source, "\n\r;");
+    var line_it = mem.tokenizeAny(u8, asm_source, "\n\r;");
     while (line_it.next()) |line| {
-        var mnem_it = mem.tokenize(u8, line, " \t");
+        var mnem_it = mem.tokenizeAny(u8, line, " \t");
         const mnem_str = mnem_it.next() orelse continue;
         if (mem.startsWith(u8, mnem_str, "#")) continue;
 
@@ -8435,7 +8435,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
                 return self.fail("Invalid mnemonic: '{s}'", .{mnem_str});
         } };
 
-        var op_it = mem.tokenize(u8, mnem_it.rest(), ",");
+        var op_it = mem.tokenizeScalar(u8, mnem_it.rest(), ',');
         var ops = [1]encoder.Instruction.Operand{.none} ** 4;
         for (&ops) |*op| {
             const op_str = mem.trim(u8, op_it.next() orelse break, " \t");
src/link/Plan9.zig
@@ -264,7 +264,7 @@ fn putFn(self: *Plan9, decl_index: Module.Decl.Index, out: FnDeclOutput) !void {
 
 fn addPathComponents(self: *Plan9, path: []const u8, a: *std.ArrayList(u8)) !void {
     const sep = std.fs.path.sep;
-    var it = std.mem.tokenize(u8, path, &.{sep});
+    var it = std.mem.tokenizeScalar(u8, path, sep);
     while (it.next()) |component| {
         if (self.file_segments.get(component)) |num| {
             try a.writer().writeIntBig(u16, num);
src/glibc.zig
@@ -109,7 +109,7 @@ pub fn loadMetaData(gpa: Allocator, contents: []const u8) LoadMetaDataError!*ABI
             const target_name = mem.sliceTo(contents[index..], 0);
             index += target_name.len + 1;
 
-            var component_it = mem.tokenize(u8, target_name, "-");
+            var component_it = mem.tokenizeScalar(u8, target_name, '-');
             const arch_name = component_it.next() orelse {
                 log.err("abilists: expected arch name", .{});
                 return error.ZigInstallationCorrupt;
src/libc_installation.zig
@@ -60,7 +60,7 @@ pub const LibCInstallation = struct {
         const contents = try std.fs.cwd().readFileAlloc(allocator, libc_file, std.math.maxInt(usize));
         defer allocator.free(contents);
 
-        var it = std.mem.tokenize(u8, contents, "\n");
+        var it = std.mem.tokenizeScalar(u8, contents, '\n');
         while (it.next()) |line| {
             if (line.len == 0 or line[0] == '#') continue;
             var line_it = std.mem.split(u8, line, "=");
@@ -293,7 +293,7 @@ pub const LibCInstallation = struct {
             },
         }
 
-        var it = std.mem.tokenize(u8, exec_res.stderr, "\n\r");
+        var it = std.mem.tokenizeAny(u8, exec_res.stderr, "\n\r");
         var search_paths = std.ArrayList([]const u8).init(allocator);
         defer search_paths.deinit();
         while (it.next()) |line| {
@@ -613,7 +613,7 @@ fn ccPrintFileName(args: CCPrintFileNameOptions) ![:0]u8 {
         },
     }
 
-    var it = std.mem.tokenize(u8, exec_res.stdout, "\n\r");
+    var it = std.mem.tokenizeAny(u8, exec_res.stdout, "\n\r");
     const line = it.next() orelse return error.LibCRuntimeNotFound;
     // When this command fails, it returns exit code 0 and duplicates the input file name.
     // So we detect failure by checking if the output matches exactly the input.
@@ -692,7 +692,7 @@ fn appendCcExe(args: *std.ArrayList([]const u8), skip_cc_env_var: bool) !void {
         return;
     };
     // Respect space-separated flags to the C compiler.
-    var it = std.mem.tokenize(u8, cc_env_var, " ");
+    var it = std.mem.tokenizeScalar(u8, cc_env_var, ' ');
     while (it.next()) |arg| {
         try args.append(arg);
     }
src/print_zir.zig
@@ -2581,7 +2581,7 @@ const Writer = struct {
     fn writeDocComment(self: *Writer, stream: anytype, doc_comment_index: u32) !void {
         if (doc_comment_index != 0) {
             const doc_comment = self.code.nullTerminatedString(doc_comment_index);
-            var it = std.mem.tokenize(u8, doc_comment, "\n");
+            var it = std.mem.tokenizeScalar(u8, doc_comment, '\n');
             while (it.next()) |doc_line| {
                 try stream.writeByteNTimes(' ', self.indent);
                 try stream.print("///{s}\n", .{doc_line});
test/behavior/bugs/6456.zig
@@ -18,7 +18,7 @@ test "issue 6456" {
     comptime {
         var fields: []const StructField = &[0]StructField{};
 
-        var it = std.mem.tokenize(u8, text, "\n");
+        var it = std.mem.tokenizeScalar(u8, text, '\n');
         while (it.next()) |name| {
             fields = fields ++ &[_]StructField{StructField{
                 .alignment = 0,
test/src/Cases.zig
@@ -846,7 +846,7 @@ const TestManifest = struct {
         const actual_start = start orelse return error.MissingTestManifest;
         const manifest_bytes = bytes[actual_start..end];
 
-        var it = std.mem.tokenize(u8, manifest_bytes, "\r\n");
+        var it = std.mem.tokenizeAny(u8, manifest_bytes, "\r\n");
 
         // First line is the test type
         const tt: Type = blk: {
@@ -923,7 +923,7 @@ const TestManifest = struct {
 
     fn trailing(self: TestManifest) TrailingIterator {
         return .{
-            .inner = std.mem.tokenize(u8, self.trailing_bytes, "\r\n"),
+            .inner = std.mem.tokenizeAny(u8, self.trailing_bytes, "\r\n"),
         };
     }
 
tools/generate_linux_syscalls.zig
@@ -51,11 +51,11 @@ pub fn main() !void {
         try writer.writeAll("pub const X86 = enum(usize) {\n");
 
         const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_32.tbl", buf);
-        var lines = mem.tokenize(u8, table, "\n");
+        var lines = mem.tokenizeScalar(u8, table, '\n');
         while (lines.next()) |line| {
             if (line[0] == '#') continue;
 
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const number = fields.next() orelse return error.Incomplete;
             // abi is always i386
             _ = fields.next() orelse return error.Incomplete;
@@ -70,11 +70,11 @@ pub fn main() !void {
         try writer.writeAll("pub const X64 = enum(usize) {\n");
 
         const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_64.tbl", buf);
-        var lines = mem.tokenize(u8, table, "\n");
+        var lines = mem.tokenizeScalar(u8, table, '\n');
         while (lines.next()) |line| {
             if (line[0] == '#') continue;
 
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const number = fields.next() orelse return error.Incomplete;
             const abi = fields.next() orelse return error.Incomplete;
             // The x32 abi syscalls are always at the end.
@@ -96,11 +96,11 @@ pub fn main() !void {
         );
 
         const table = try linux_dir.readFile("arch/arm/tools/syscall.tbl", buf);
-        var lines = mem.tokenize(u8, table, "\n");
+        var lines = mem.tokenizeScalar(u8, table, '\n');
         while (lines.next()) |line| {
             if (line[0] == '#') continue;
 
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const number = fields.next() orelse return error.Incomplete;
             const abi = fields.next() orelse return error.Incomplete;
             if (mem.eql(u8, abi, "oabi")) continue;
@@ -127,11 +127,11 @@ pub fn main() !void {
     {
         try writer.writeAll("pub const Sparc64 = enum(usize) {\n");
         const table = try linux_dir.readFile("arch/sparc/kernel/syscalls/syscall.tbl", buf);
-        var lines = mem.tokenize(u8, table, "\n");
+        var lines = mem.tokenizeScalar(u8, table, '\n');
         while (lines.next()) |line| {
             if (line[0] == '#') continue;
 
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const number = fields.next() orelse return error.Incomplete;
             const abi = fields.next() orelse return error.Incomplete;
             if (mem.eql(u8, abi, "32")) continue;
@@ -151,11 +151,11 @@ pub fn main() !void {
         );
 
         const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_o32.tbl", buf);
-        var lines = mem.tokenize(u8, table, "\n");
+        var lines = mem.tokenizeScalar(u8, table, '\n');
         while (lines.next()) |line| {
             if (line[0] == '#') continue;
 
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const number = fields.next() orelse return error.Incomplete;
             // abi is always o32
             _ = fields.next() orelse return error.Incomplete;
@@ -176,11 +176,11 @@ pub fn main() !void {
         );
 
         const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_n64.tbl", buf);
-        var lines = mem.tokenize(u8, table, "\n");
+        var lines = mem.tokenizeScalar(u8, table, '\n');
         while (lines.next()) |line| {
             if (line[0] == '#') continue;
 
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const number = fields.next() orelse return error.Incomplete;
             // abi is always n64
             _ = fields.next() orelse return error.Incomplete;
@@ -197,11 +197,11 @@ pub fn main() !void {
 
         const table = try linux_dir.readFile("arch/powerpc/kernel/syscalls/syscall.tbl", buf);
         var list_64 = std.ArrayList(u8).init(allocator);
-        var lines = mem.tokenize(u8, table, "\n");
+        var lines = mem.tokenizeScalar(u8, table, '\n');
         while (lines.next()) |line| {
             if (line[0] == '#') continue;
 
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const number = fields.next() orelse return error.Incomplete;
             const abi = fields.next() orelse return error.Incomplete;
             const name = fields.next() orelse return error.Incomplete;
@@ -277,9 +277,9 @@ pub fn main() !void {
             },
         };
 
-        var lines = mem.tokenize(u8, defines, "\n");
+        var lines = mem.tokenizeScalar(u8, defines, '\n');
         loop: while (lines.next()) |line| {
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const cmd = fields.next() orelse return error.Incomplete;
             if (!mem.eql(u8, cmd, "#define")) continue;
             const define = fields.next() orelse return error.Incomplete;
@@ -339,9 +339,9 @@ pub fn main() !void {
             },
         };
 
-        var lines = mem.tokenize(u8, defines, "\n");
+        var lines = mem.tokenizeScalar(u8, defines, '\n');
         loop: while (lines.next()) |line| {
-            var fields = mem.tokenize(u8, line, " \t");
+            var fields = mem.tokenizeAny(u8, line, " \t");
             const cmd = fields.next() orelse return error.Incomplete;
             if (!mem.eql(u8, cmd, "#define")) continue;
             const define = fields.next() orelse return error.Incomplete;
build.zig
@@ -284,7 +284,7 @@ pub fn build(b: *std.Build) !void {
             // That means we also have to rely on stage1 compiled c++ files. We parse config.h to find
             // the information passed on to us from cmake.
             if (cfg.cmake_prefix_path.len > 0) {
-                var it = mem.tokenize(u8, cfg.cmake_prefix_path, ";");
+                var it = mem.tokenizeScalar(u8, cfg.cmake_prefix_path, ';');
                 while (it.next()) |path| {
                     b.addSearchPrefix(path);
                 }
@@ -687,7 +687,7 @@ fn addCxxKnownPath(
     if (!std.process.can_spawn)
         return error.RequiredLibraryNotFound;
     const path_padded = b.exec(&.{ ctx.cxx_compiler, b.fmt("-print-file-name={s}", .{objname}) });
-    var tokenizer = mem.tokenize(u8, path_padded, "\r\n");
+    var tokenizer = mem.tokenizeAny(u8, path_padded, "\r\n");
     const path_unpadded = tokenizer.next().?;
     if (mem.eql(u8, path_unpadded, objname)) {
         if (errtxt) |msg| {
@@ -710,7 +710,7 @@ fn addCxxKnownPath(
 }
 
 fn addCMakeLibraryList(exe: *std.Build.Step.Compile, list: []const u8) void {
-    var it = mem.tokenize(u8, list, ";");
+    var it = mem.tokenizeScalar(u8, list, ';');
     while (it.next()) |lib| {
         if (mem.startsWith(u8, lib, "-l")) {
             exe.linkSystemLibrary(lib["-l".len..]);
@@ -855,7 +855,7 @@ fn parseConfigH(b: *std.Build, config_h_text: []const u8) ?CMakeConfig {
         // .prefix = ZIG_LLVM_LINK_MODE parsed manually below
     };
 
-    var lines_it = mem.tokenize(u8, config_h_text, "\r\n");
+    var lines_it = mem.tokenizeAny(u8, config_h_text, "\r\n");
     while (lines_it.next()) |line| {
         inline for (mappings) |mapping| {
             if (mem.startsWith(u8, line, mapping.prefix)) {