parser: implement streaming parser
With my pathological 50MiB 10_000 line nested list test, this is definitely slower than the one shot parser, but it has peak memory usage of 5MiB compared to the 120MiB of the one-shot parsing. Not bad. Obviously this result is largely dependent on the fact that this particular benchmark is 99% whitespace, which does not get copied into the resulting document. A (significantly) smaller improvement will be observed in files that are mostly data with little indentation or empty lines. But a win is a win.
This commit is contained in:
30
examples/stream.zig
Normal file
30
examples/stream.zig
Normal file
@@ -0,0 +1,30 @@
|
||||
const std = @import("std");
|
||||
|
||||
const nice = @import("nice");
|
||||
|
||||
pub fn main() !void {
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
defer _ = gpa.deinit();
|
||||
const allocator = gpa.allocator();
|
||||
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
defer std.process.argsFree(allocator, args);
|
||||
if (args.len < 2) return;
|
||||
|
||||
const document: nice.Document = doc: {
|
||||
const file = try std.fs.cwd().openFile(args[1], .{});
|
||||
defer file.close();
|
||||
var parser = try nice.StreamParser.init(allocator, .{});
|
||||
defer parser.deinit();
|
||||
while (true) {
|
||||
var buf = [_]u8{0} ** 1024;
|
||||
const len = try file.read(&buf);
|
||||
if (len == 0) break;
|
||||
try parser.feed(buf[0..len]);
|
||||
}
|
||||
break :doc try parser.finish();
|
||||
};
|
||||
defer document.deinit();
|
||||
|
||||
document.printDebug();
|
||||
}
|
Reference in New Issue
Block a user