309 lines
7.0 KiB
Zig
309 lines
7.0 KiB
Zig
|
const std = @import("std");
|
||
|
const builtin = @import("builtin");
|
||
|
|
||
|
const Script = enum {
|
||
|
none,
|
||
|
Adlam,
|
||
|
Ahom,
|
||
|
Anatolian_Hieroglyphs,
|
||
|
Arabic,
|
||
|
Armenian,
|
||
|
Avestan,
|
||
|
Balinese,
|
||
|
Bamum,
|
||
|
Bassa_Vah,
|
||
|
Batak,
|
||
|
Bengali,
|
||
|
Bhaiksuki,
|
||
|
Bopomofo,
|
||
|
Brahmi,
|
||
|
Braille,
|
||
|
Buginese,
|
||
|
Buhid,
|
||
|
Canadian_Aboriginal,
|
||
|
Carian,
|
||
|
Caucasian_Albanian,
|
||
|
Chakma,
|
||
|
Cham,
|
||
|
Cherokee,
|
||
|
Chorasmian,
|
||
|
Common,
|
||
|
Coptic,
|
||
|
Cuneiform,
|
||
|
Cypriot,
|
||
|
Cypro_Minoan,
|
||
|
Cyrillic,
|
||
|
Deseret,
|
||
|
Devanagari,
|
||
|
Dives_Akuru,
|
||
|
Dogra,
|
||
|
Duployan,
|
||
|
Egyptian_Hieroglyphs,
|
||
|
Elbasan,
|
||
|
Elymaic,
|
||
|
Ethiopic,
|
||
|
Georgian,
|
||
|
Glagolitic,
|
||
|
Gothic,
|
||
|
Grantha,
|
||
|
Greek,
|
||
|
Gujarati,
|
||
|
Gunjala_Gondi,
|
||
|
Gurmukhi,
|
||
|
Han,
|
||
|
Hangul,
|
||
|
Hanifi_Rohingya,
|
||
|
Hanunoo,
|
||
|
Hatran,
|
||
|
Hebrew,
|
||
|
Hiragana,
|
||
|
Imperial_Aramaic,
|
||
|
Inherited,
|
||
|
Inscriptional_Pahlavi,
|
||
|
Inscriptional_Parthian,
|
||
|
Javanese,
|
||
|
Kaithi,
|
||
|
Kannada,
|
||
|
Katakana,
|
||
|
Kawi,
|
||
|
Kayah_Li,
|
||
|
Kharoshthi,
|
||
|
Khitan_Small_Script,
|
||
|
Khmer,
|
||
|
Khojki,
|
||
|
Khudawadi,
|
||
|
Lao,
|
||
|
Latin,
|
||
|
Lepcha,
|
||
|
Limbu,
|
||
|
Linear_A,
|
||
|
Linear_B,
|
||
|
Lisu,
|
||
|
Lycian,
|
||
|
Lydian,
|
||
|
Mahajani,
|
||
|
Makasar,
|
||
|
Malayalam,
|
||
|
Mandaic,
|
||
|
Manichaean,
|
||
|
Marchen,
|
||
|
Masaram_Gondi,
|
||
|
Medefaidrin,
|
||
|
Meetei_Mayek,
|
||
|
Mende_Kikakui,
|
||
|
Meroitic_Cursive,
|
||
|
Meroitic_Hieroglyphs,
|
||
|
Miao,
|
||
|
Modi,
|
||
|
Mongolian,
|
||
|
Mro,
|
||
|
Multani,
|
||
|
Myanmar,
|
||
|
Nabataean,
|
||
|
Nag_Mundari,
|
||
|
Nandinagari,
|
||
|
New_Tai_Lue,
|
||
|
Newa,
|
||
|
Nko,
|
||
|
Nushu,
|
||
|
Nyiakeng_Puachue_Hmong,
|
||
|
Ogham,
|
||
|
Ol_Chiki,
|
||
|
Old_Hungarian,
|
||
|
Old_Italic,
|
||
|
Old_North_Arabian,
|
||
|
Old_Permic,
|
||
|
Old_Persian,
|
||
|
Old_Sogdian,
|
||
|
Old_South_Arabian,
|
||
|
Old_Turkic,
|
||
|
Old_Uyghur,
|
||
|
Oriya,
|
||
|
Osage,
|
||
|
Osmanya,
|
||
|
Pahawh_Hmong,
|
||
|
Palmyrene,
|
||
|
Pau_Cin_Hau,
|
||
|
Phags_Pa,
|
||
|
Phoenician,
|
||
|
Psalter_Pahlavi,
|
||
|
Rejang,
|
||
|
Runic,
|
||
|
Samaritan,
|
||
|
Saurashtra,
|
||
|
Sharada,
|
||
|
Shavian,
|
||
|
Siddham,
|
||
|
SignWriting,
|
||
|
Sinhala,
|
||
|
Sogdian,
|
||
|
Sora_Sompeng,
|
||
|
Soyombo,
|
||
|
Sundanese,
|
||
|
Syloti_Nagri,
|
||
|
Syriac,
|
||
|
Tagalog,
|
||
|
Tagbanwa,
|
||
|
Tai_Le,
|
||
|
Tai_Tham,
|
||
|
Tai_Viet,
|
||
|
Takri,
|
||
|
Tamil,
|
||
|
Tangsa,
|
||
|
Tangut,
|
||
|
Telugu,
|
||
|
Thaana,
|
||
|
Thai,
|
||
|
Tibetan,
|
||
|
Tifinagh,
|
||
|
Tirhuta,
|
||
|
Toto,
|
||
|
Ugaritic,
|
||
|
Vai,
|
||
|
Vithkuqi,
|
||
|
Wancho,
|
||
|
Warang_Citi,
|
||
|
Yezidi,
|
||
|
Yi,
|
||
|
Zanabazar_Square,
|
||
|
};
|
||
|
|
||
|
const block_size = 256;
|
||
|
const Block = [block_size]u8;
|
||
|
|
||
|
const BlockMap = std.HashMap(
|
||
|
Block,
|
||
|
u16,
|
||
|
struct {
|
||
|
pub fn hash(_: @This(), k: Block) u64 {
|
||
|
var hasher = std.hash.Wyhash.init(0);
|
||
|
std.hash.autoHashStrat(&hasher, k, .DeepRecursive);
|
||
|
return hasher.final();
|
||
|
}
|
||
|
|
||
|
pub fn eql(_: @This(), a: Block, b: Block) bool {
|
||
|
return std.mem.eql(u8, &a, &b);
|
||
|
}
|
||
|
},
|
||
|
std.hash_map.default_max_load_percentage,
|
||
|
);
|
||
|
|
||
|
pub fn main() !void {
|
||
|
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||
|
defer arena.deinit();
|
||
|
const allocator = arena.allocator();
|
||
|
|
||
|
var flat_map = std.AutoHashMap(u21, u8).init(allocator);
|
||
|
defer flat_map.deinit();
|
||
|
|
||
|
var line_buf: [4096]u8 = undefined;
|
||
|
|
||
|
// Process DerivedGeneralCategory.txt
|
||
|
var in_file = try std.fs.cwd().openFile("data/unicode/Scripts.txt", .{});
|
||
|
defer in_file.close();
|
||
|
var in_buf = std.io.bufferedReader(in_file.reader());
|
||
|
const in_reader = in_buf.reader();
|
||
|
|
||
|
while (try in_reader.readUntilDelimiterOrEof(&line_buf, '\n')) |line| {
|
||
|
if (line.len == 0 or line[0] == '#') continue;
|
||
|
|
||
|
const no_comment = if (std.mem.indexOfScalar(u8, line, '#')) |octo| line[0..octo] else line;
|
||
|
|
||
|
var field_iter = std.mem.tokenizeAny(u8, no_comment, "; ");
|
||
|
var current_code: [2]u21 = undefined;
|
||
|
|
||
|
var i: usize = 0;
|
||
|
while (field_iter.next()) |field| : (i += 1) {
|
||
|
switch (i) {
|
||
|
0 => {
|
||
|
// Code point(s)
|
||
|
if (std.mem.indexOf(u8, field, "..")) |dots| {
|
||
|
current_code = .{
|
||
|
try std.fmt.parseInt(u21, field[0..dots], 16),
|
||
|
try std.fmt.parseInt(u21, field[dots + 2 ..], 16),
|
||
|
};
|
||
|
} else {
|
||
|
const code = try std.fmt.parseInt(u21, field, 16);
|
||
|
current_code = .{ code, code };
|
||
|
}
|
||
|
},
|
||
|
1 => {
|
||
|
// Script
|
||
|
const script = std.meta.stringToEnum(Script, field) orelse {
|
||
|
std.debug.print("Unknown script: {s}\n", .{field});
|
||
|
return error.UnknownScript;
|
||
|
};
|
||
|
for (current_code[0]..current_code[1] + 1) |cp| try flat_map.put(@intCast(cp), @intFromEnum(script));
|
||
|
},
|
||
|
else => {},
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var blocks_map = BlockMap.init(allocator);
|
||
|
defer blocks_map.deinit();
|
||
|
|
||
|
var stage1 = std.ArrayList(u16).init(allocator);
|
||
|
defer stage1.deinit();
|
||
|
|
||
|
var stage2 = std.ArrayList(u8).init(allocator);
|
||
|
defer stage2.deinit();
|
||
|
|
||
|
var stage3 = std.ArrayList(u8).init(allocator);
|
||
|
defer stage3.deinit();
|
||
|
|
||
|
var block: Block = [_]u8{0} ** block_size;
|
||
|
var block_len: u16 = 0;
|
||
|
|
||
|
for (0..0x110000) |i| {
|
||
|
const cp: u21 = @intCast(i);
|
||
|
const script = flat_map.get(cp) orelse 0;
|
||
|
|
||
|
const stage3_idx = blk: {
|
||
|
for (stage3.items, 0..) |script_i, j| {
|
||
|
if (script == script_i) break :blk j;
|
||
|
}
|
||
|
try stage3.append(script);
|
||
|
break :blk stage3.items.len - 1;
|
||
|
};
|
||
|
|
||
|
// Process block
|
||
|
block[block_len] = @intCast(stage3_idx);
|
||
|
block_len += 1;
|
||
|
|
||
|
if (block_len < block_size and cp != 0x10ffff) continue;
|
||
|
|
||
|
const gop = try blocks_map.getOrPut(block);
|
||
|
if (!gop.found_existing) {
|
||
|
gop.value_ptr.* = @intCast(stage2.items.len);
|
||
|
try stage2.appendSlice(&block);
|
||
|
}
|
||
|
|
||
|
try stage1.append(gop.value_ptr.*);
|
||
|
block_len = 0;
|
||
|
}
|
||
|
|
||
|
var args_iter = try std.process.argsWithAllocator(allocator);
|
||
|
defer args_iter.deinit();
|
||
|
_ = args_iter.skip();
|
||
|
const output_path = args_iter.next() orelse @panic("No output file arg!");
|
||
|
|
||
|
const compressor = std.compress.flate.deflate.compressor;
|
||
|
var out_file = try std.fs.cwd().createFile(output_path, .{});
|
||
|
defer out_file.close();
|
||
|
var out_comp = try compressor(.raw, out_file.writer(), .{ .level = .best });
|
||
|
const writer = out_comp.writer();
|
||
|
|
||
|
const endian = builtin.cpu.arch.endian();
|
||
|
try writer.writeInt(u16, @intCast(stage1.items.len), endian);
|
||
|
for (stage1.items) |i| try writer.writeInt(u16, i, endian);
|
||
|
|
||
|
try writer.writeInt(u16, @intCast(stage2.items.len), endian);
|
||
|
for (stage2.items) |i| try writer.writeInt(u8, i, endian);
|
||
|
|
||
|
try writer.writeInt(u8, @intCast(stage3.items.len), endian);
|
||
|
for (stage3.items) |i| try writer.writeInt(u8, i, endian);
|
||
|
|
||
|
try out_comp.flush();
|
||
|
}
|