rotint/deps/zg/codegen/hangul.zig
torque 7692cb4bc7
init
I will never get tired of vendoring dependencies. ha ha. It is possible
I am insane. I had to do a lot of pruning to get these not to be
ridiculous (especially the unicode data, which had nearly 1 million
lines of... stuff).
2024-08-09 19:26:32 -07:00

134 lines
4.1 KiB
Zig

const std = @import("std");
const builtin = @import("builtin");
const Syllable = enum {
none,
L,
LV,
LVT,
V,
T,
};
const block_size = 256;
const Block = [block_size]u3;
const BlockMap = std.HashMap(
Block,
u16,
struct {
pub fn hash(_: @This(), k: Block) u64 {
var hasher = std.hash.Wyhash.init(0);
std.hash.autoHashStrat(&hasher, k, .DeepRecursive);
return hasher.final();
}
pub fn eql(_: @This(), a: Block, b: Block) bool {
return std.mem.eql(u3, &a, &b);
}
},
std.hash_map.default_max_load_percentage,
);
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
const allocator = arena.allocator();
var flat_map = std.AutoHashMap(u21, u3).init(allocator);
defer flat_map.deinit();
var line_buf: [4096]u8 = undefined;
// Process HangulSyllableType.txt
var in_file = try std.fs.cwd().openFile("data/unicode/HangulSyllableType.txt", .{});
defer in_file.close();
var in_buf = std.io.bufferedReader(in_file.reader());
const in_reader = in_buf.reader();
while (try in_reader.readUntilDelimiterOrEof(&line_buf, '\n')) |line| {
if (line.len == 0 or line[0] == '#') continue;
const no_comment = if (std.mem.indexOfScalar(u8, line, '#')) |octo| line[0..octo] else line;
var field_iter = std.mem.tokenizeAny(u8, no_comment, "; ");
var current_code: [2]u21 = undefined;
var i: usize = 0;
while (field_iter.next()) |field| : (i += 1) {
switch (i) {
0 => {
// Code point(s)
if (std.mem.indexOf(u8, field, "..")) |dots| {
current_code = .{
try std.fmt.parseInt(u21, field[0..dots], 16),
try std.fmt.parseInt(u21, field[dots + 2 ..], 16),
};
} else {
const code = try std.fmt.parseInt(u21, field, 16);
current_code = .{ code, code };
}
},
1 => {
// Syllable type
const st: Syllable = std.meta.stringToEnum(Syllable, field) orelse .none;
for (current_code[0]..current_code[1] + 1) |cp| try flat_map.put(@intCast(cp), @intFromEnum(st));
},
else => {},
}
}
}
var blocks_map = BlockMap.init(allocator);
defer blocks_map.deinit();
var stage1 = std.ArrayList(u16).init(allocator);
defer stage1.deinit();
var stage2 = std.ArrayList(u3).init(allocator);
defer stage2.deinit();
var block: Block = [_]u3{0} ** block_size;
var block_len: u16 = 0;
for (0..0x110000) |i| {
const cp: u21 = @intCast(i);
const st = flat_map.get(cp) orelse 0;
// Process block
block[block_len] = st;
block_len += 1;
if (block_len < block_size and cp != 0x10ffff) continue;
const gop = try blocks_map.getOrPut(block);
if (!gop.found_existing) {
gop.value_ptr.* = @intCast(stage2.items.len);
try stage2.appendSlice(&block);
}
try stage1.append(gop.value_ptr.*);
block_len = 0;
}
var args_iter = try std.process.argsWithAllocator(allocator);
defer args_iter.deinit();
_ = args_iter.skip();
const output_path = args_iter.next() orelse @panic("No output file arg!");
const compressor = std.compress.flate.deflate.compressor;
var out_file = try std.fs.cwd().createFile(output_path, .{});
defer out_file.close();
var out_comp = try compressor(.raw, out_file.writer(), .{ .level = .best });
const writer = out_comp.writer();
const endian = builtin.cpu.arch.endian();
try writer.writeInt(u16, @intCast(stage1.items.len), endian);
for (stage1.items) |i| try writer.writeInt(u16, i, endian);
try writer.writeInt(u16, @intCast(stage2.items.len), endian);
for (stage2.items) |i| try writer.writeInt(u8, i, endian);
try out_comp.flush();
}