Compare commits
16 Commits
e8ddee5ab2
...
master
Author | SHA1 | Date | |
---|---|---|---|
e562e30e5e
|
|||
8aaceba484
|
|||
c74d615131
|
|||
8ccb2c3a66
|
|||
ad73ea6508
|
|||
875b1b6344
|
|||
ea52c99fee
|
|||
dbf2762982
|
|||
0f4a9fcaa7
|
|||
bd079b42d9
|
|||
bd0d74ee6a
|
|||
2208079355
|
|||
98eac68929
|
|||
39619e7d6b
|
|||
33ab092a06
|
|||
21a9753d46
|
25
build.zig
25
build.zig
@@ -2,11 +2,26 @@ const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const nice = b.addModule("nice", .{
|
||||
.source_file = .{ .path = "src/nice.zig" },
|
||||
.root_source_file = b.path("src/nice.zig"),
|
||||
});
|
||||
|
||||
const tests = b.addTest(.{
|
||||
.name = "nice-unit-tests",
|
||||
.root_source_file = b.path("tests/main.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
tests.root_module.addImport("nice", nice);
|
||||
|
||||
const run_main_tests = b.addRunArtifact(tests);
|
||||
const test_step = b.step("test", "Run tests");
|
||||
test_step.dependOn(&b.addInstallArtifact(tests, .{}).step);
|
||||
test_step.dependOn(&run_main_tests.step);
|
||||
|
||||
add_examples(b, .{
|
||||
.target = target,
|
||||
.nice_mod = nice,
|
||||
@@ -14,7 +29,7 @@ pub fn build(b: *std.Build) void {
|
||||
}
|
||||
|
||||
const ExampleOptions = struct {
|
||||
target: std.zig.CrossTarget,
|
||||
target: std.Build.ResolvedTarget,
|
||||
nice_mod: *std.Build.Module,
|
||||
};
|
||||
|
||||
@@ -29,18 +44,18 @@ const examples = [_]Example{
|
||||
.{ .name = "reify", .file = "examples/reify.zig" },
|
||||
};
|
||||
|
||||
pub fn add_examples(b: *std.build, options: ExampleOptions) void {
|
||||
pub fn add_examples(b: *std.Build, options: ExampleOptions) void {
|
||||
const example_step = b.step("examples", "build examples");
|
||||
|
||||
inline for (examples) |example| {
|
||||
const ex_exe = b.addExecutable(.{
|
||||
.name = example.name,
|
||||
.root_source_file = .{ .path = example.file },
|
||||
.root_source_file = b.path(example.file),
|
||||
.target = options.target,
|
||||
.optimize = .Debug,
|
||||
});
|
||||
|
||||
ex_exe.addModule("nice", options.nice_mod);
|
||||
ex_exe.root_module.addImport("nice", options.nice_mod);
|
||||
const install = b.addInstallArtifact(ex_exe, .{});
|
||||
example_step.dependOn(&install.step);
|
||||
}
|
||||
|
@@ -1,5 +1,12 @@
|
||||
.{
|
||||
.name = "nice-data",
|
||||
.name = "nice",
|
||||
.version = "0.1.0-pre",
|
||||
.dependencies = .{},
|
||||
.paths = .{
|
||||
"src",
|
||||
"build.zig",
|
||||
"build.zig.zon",
|
||||
"license",
|
||||
"readme.md",
|
||||
},
|
||||
}
|
||||
|
@@ -21,6 +21,7 @@ const Example = struct {
|
||||
again: ?bool,
|
||||
array: [5]i16,
|
||||
nested: [3]struct { index: usize, title: []const u8 },
|
||||
default: u64 = 0xDEADCAFE,
|
||||
};
|
||||
|
||||
const source =
|
||||
@@ -98,5 +99,6 @@ pub fn main() !void {
|
||||
std.debug.print(" {{ index: {d}, title: {s} }}\n", .{ item.index, item.title });
|
||||
}
|
||||
std.debug.print(" ]\n", .{});
|
||||
std.debug.print(" default: 0x{X}\n", .{loaded.value.default});
|
||||
std.debug.print("}}\n", .{});
|
||||
}
|
||||
|
@@ -267,7 +267,6 @@ nests:
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Restrictions
|
||||
|
||||
Nice documents must be encoded in valid UTF-8 with no BOM. They must use `LF`-only newlines (`CR` characters are forbidden). Tabs and spaces cannot be mixed for indentation. Indentation *must* adhere to a consistent quantum throughout the whole document, including on comment lines. Nonprinting ASCII characters are forbidden (specifically, any character less than `0x20` (space) except for `0x09` (horizontal tab) and `0x0A` (newline)). Trailing whitespace, including lines consisting only of whitespace, is forbidden, although empty lines are permitted. Some keys and values cannot be represented (for example, map keys cannot start with the character `#`, though map values can).
|
||||
@@ -286,7 +285,7 @@ Nice is not, and does not try to be, a general-purpose data serialization format
|
||||
|
||||
### There's No Need to Conquer the World
|
||||
|
||||
Nice has no exhaustive specification or formal grammar. The parser is handwritten, and there are pretty much guaranteed to be some strange edge cases that weren't considered when writing it. Standardization is a good thing, generally speaking, but it's not a goal here. Perhaps this driven by the author's indolence more than deep philosophical zealotry. On the other hand, this paragraph is under the philosophy section.
|
||||
Nice has no exhaustive specification or formal grammar. The parser is handwritten, and there are pretty much guaranteed to be some strange edge cases that weren't considered when writing it. Standardization is a good thing, generally speaking, but it's not a goal here. Perhaps this is driven by the author's indolence more than deep philosophical zealotry. On the other hand, this paragraph is under the philosophy section.
|
||||
|
||||
# The Implementation
|
||||
|
||||
|
@@ -50,7 +50,7 @@ pub const Options = struct {
|
||||
// If an empty document is parsed, this defines what value type should be the
|
||||
// resulting document root object. The default behavior is to emit an error if the
|
||||
// document is empty.
|
||||
default_object: enum { string, list, map, fail } = .fail,
|
||||
default_object: enum { scalar, list, map, fail } = .fail,
|
||||
|
||||
// Only used by the parseTo family of functions.
|
||||
// If false, and a mapping contains additional keys that do not map to the fields of
|
||||
@@ -61,11 +61,11 @@ pub const Options = struct {
|
||||
ignore_extra_fields: bool = true,
|
||||
|
||||
// Only used by the parseTo family of functions.
|
||||
// If true, if a struct field is an optional type and the corresponding mapping key
|
||||
// does not exist, the object field will be set to `null`. By default, if the
|
||||
// parsed document is missing a mapping key for a given field, an error will be
|
||||
// raised instead.
|
||||
treat_omitted_as_null: bool = false,
|
||||
// If true, if a struct field has a default value associated with it and the
|
||||
// corresponding mapping key does not exist, the object field will be set to the
|
||||
// default value. By default, this behavior is enabled, allowing succinct
|
||||
// representation of objects that have default fields.
|
||||
allow_omitting_default_values: bool = true,
|
||||
|
||||
// Only used by the parseTo family of functions.
|
||||
// If true, strings may be coerced into other scalar types, like booleans or
|
||||
@@ -80,13 +80,11 @@ pub const Options = struct {
|
||||
// an error if the destination is a boolean type. By default, these comparisons are
|
||||
// case-sensitive. See the `case_insensitive_scalar_coersion` option to change
|
||||
// this.
|
||||
boolean_scalars: struct { truthy: []const []const u8, falsy: []const []const u8 } = .{
|
||||
.truthy = &.{ "true", "True", "yes", "on" },
|
||||
.falsy = &.{ "false", "False", "no", "off" },
|
||||
},
|
||||
truthy_boolean_scalars: []const []const u8 = &.{ "true", "True", "yes", "on" },
|
||||
falsy_boolean_scalars: []const []const u8 = &.{ "false", "False", "no", "off" },
|
||||
|
||||
// Only used by the parseTo family of functions.
|
||||
// A list of strings. Scalars in the doucment that match any of the values listed
|
||||
// A list of strings. Scalars in the document that match any of the values listed
|
||||
// will be parsed to optional `null`. Any other scalar value will be parsed as the
|
||||
// optional child type if the destination type is an optional. By default, these
|
||||
// comparisons are case-sensitive. See the `case_insensitive_scalar_coersion`
|
||||
@@ -99,7 +97,9 @@ pub const Options = struct {
|
||||
// look like source code enum literals. Any enum value missing the leading `.` will
|
||||
// result in a conversion error. If set to false, no preprocessing will be done
|
||||
// and enum values will be converted from the literal scalar/string. These two styles
|
||||
// cannot be mixed in a single document.
|
||||
// cannot be mixed in a single document. Note that this setting also affects how
|
||||
// tagged unions are parsed (specifically, the union's field name must also have the
|
||||
// leading `.` if this option is enabled.)
|
||||
expect_enum_dot: bool = true,
|
||||
|
||||
// Only used by the parseTo family of functions.
|
||||
|
@@ -59,7 +59,7 @@ pub const State = struct {
|
||||
|
||||
switch (state.mode) {
|
||||
.initial => switch (options.default_object) {
|
||||
.string => state.document.root = Value.emptyString(),
|
||||
.scalar => state.document.root = Value.emptyScalar(),
|
||||
.list => state.document.root = Value.newList(arena_alloc),
|
||||
.map => state.document.root = Value.newMap(arena_alloc),
|
||||
.fail => {
|
||||
@@ -70,7 +70,7 @@ pub const State = struct {
|
||||
},
|
||||
.value => switch (state.value_stack.getLast().*) {
|
||||
// we have an in-progress string, finish it.
|
||||
.string => |*string| string.* = try state.string_builder.toOwnedSlice(arena_alloc),
|
||||
.string => |*string| string.* = try state.string_builder.toOwnedSliceSentinel(arena_alloc, 0),
|
||||
// if we have a dangling -, attach an empty scalar to it
|
||||
.list => |*list| if (state.expect_shift == .indent) try list.append(Value.emptyScalar()),
|
||||
// if we have a dangling "key:", attach an empty scalar to it
|
||||
@@ -185,7 +185,7 @@ pub const State = struct {
|
||||
|
||||
if (firstpass and line.shift == .dedent) {
|
||||
// copy the string into the document proper
|
||||
string.* = try state.string_builder.toOwnedSlice(arena_alloc);
|
||||
string.* = try state.string_builder.toOwnedSliceSentinel(arena_alloc, 0);
|
||||
|
||||
var dedent_depth = line.shift.dedent;
|
||||
while (dedent_depth > 0) : (dedent_depth -= 1)
|
||||
@@ -199,9 +199,9 @@ pub const State = struct {
|
||||
.in_line => |in_line| switch (in_line) {
|
||||
.empty => unreachable,
|
||||
inline .line_string, .space_string, .concat_string => |str, tag| {
|
||||
if (tag == .line_string)
|
||||
if (comptime tag == .line_string)
|
||||
try state.string_builder.append(arena_alloc, '\n');
|
||||
if (tag == .space_string)
|
||||
if (comptime tag == .space_string)
|
||||
try state.string_builder.append(arena_alloc, ' ');
|
||||
try state.string_builder.appendSlice(arena_alloc, str);
|
||||
},
|
||||
|
@@ -9,6 +9,11 @@
|
||||
// CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
||||
const std = @import("std");
|
||||
const hasFn = if (@hasDecl(std.meta, "trait")) struct {
|
||||
fn hasFn(comptime T: type, comptime name: []const u8) bool {
|
||||
return std.meta.trait.hasFn(name)(T);
|
||||
}
|
||||
}.hasFn else std.meta.hasFn;
|
||||
|
||||
const Options = @import("../parser.zig").Options;
|
||||
|
||||
@@ -51,7 +56,7 @@ pub fn Parsed(comptime T: type) type {
|
||||
}
|
||||
|
||||
pub const Value = union(enum) {
|
||||
pub const String = []const u8;
|
||||
pub const String = [:0]const u8;
|
||||
pub const Map = std.StringArrayHashMap(Value);
|
||||
pub const List = std.ArrayList(Value);
|
||||
pub const TagType = @typeInfo(Value).Union.tag_type.?;
|
||||
@@ -63,6 +68,10 @@ pub const Value = union(enum) {
|
||||
map: Map,
|
||||
inline_map: Map,
|
||||
|
||||
pub fn FieldConverter(comptime T: type) type {
|
||||
return *const fn (Value, std.mem.Allocator, Options) error{BadValue}!T;
|
||||
}
|
||||
|
||||
pub fn convertTo(self: Value, comptime T: type, allocator: std.mem.Allocator, options: Options) !T {
|
||||
switch (@typeInfo(T)) {
|
||||
.Void => {
|
||||
@@ -77,14 +86,14 @@ pub const Value = union(enum) {
|
||||
inline .scalar, .string => |str, tag| {
|
||||
if (tag == .string and !options.coerce_strings) return error.BadValue;
|
||||
if (options.case_insensitive_scalar_coersion) {
|
||||
for (options.boolean_scalars.truthy) |check|
|
||||
for (options.truthy_boolean_scalars) |check|
|
||||
if (std.ascii.eqlIgnoreCase(str, check)) return true;
|
||||
for (options.boolean_scalars.falsy) |check|
|
||||
for (options.falsy_boolean_scalars) |check|
|
||||
if (std.ascii.eqlIgnoreCase(str, check)) return false;
|
||||
} else {
|
||||
for (options.boolean_scalars.truthy) |check|
|
||||
for (options.truthy_boolean_scalars) |check|
|
||||
if (std.mem.eql(u8, str, check)) return true;
|
||||
for (options.boolean_scalars.falsy) |check|
|
||||
for (options.falsy_boolean_scalars) |check|
|
||||
if (std.mem.eql(u8, str, check)) return false;
|
||||
}
|
||||
|
||||
@@ -106,7 +115,7 @@ pub const Value = union(enum) {
|
||||
switch (self) {
|
||||
inline .scalar, .string => |str, tag| {
|
||||
if (tag == .string and !options.coerce_strings) return error.BadValue;
|
||||
return try std.fmt.parseFloat(T, str, 0);
|
||||
return try std.fmt.parseFloat(T, str);
|
||||
},
|
||||
else => return error.BadValue,
|
||||
}
|
||||
@@ -120,27 +129,29 @@ pub const Value = union(enum) {
|
||||
// probably be solved in the zig stdlib or similar.
|
||||
switch (self) {
|
||||
.scalar, .string => |str| {
|
||||
if (ptr.child == u8) {
|
||||
if (ptr.sentinel) |sent| {
|
||||
var copy = try allocator.allocSentinel(u8, str.len, @as(*const u8, @ptrCast(sent)).*);
|
||||
@memcpy(copy, str);
|
||||
return copy;
|
||||
}
|
||||
if (comptime ptr.child == u8) {
|
||||
if (comptime ptr.sentinel) |sentinel|
|
||||
if (comptime @as(*align(1) const ptr.child, @ptrCast(sentinel)).* != 0)
|
||||
return error.BadValue;
|
||||
|
||||
return str;
|
||||
} else {
|
||||
return error.BadValue;
|
||||
}
|
||||
},
|
||||
.list, .inline_list => |lst| {
|
||||
var result = try std.ArrayList(ptr.child).initCapacity(allocator, lst.items.len);
|
||||
errdefer result.deinit();
|
||||
for (lst.items) |item| {
|
||||
result.appendAssumeCapacity(try item.convertTo(ptr.child, allocator, options));
|
||||
const result = try allocator.alloc(ptr.child, lst.items.len + @intFromBool(ptr.sentinel != null));
|
||||
|
||||
for (result[0..lst.items.len], lst.items) |*res, item| {
|
||||
res.* = try item.convertTo(ptr.child, allocator, options);
|
||||
}
|
||||
if (ptr.sentinel) |sent| {
|
||||
return try result.toOwnedSliceSentinel(@as(*align(1) const ptr.child, @ptrCast(sent)).*);
|
||||
|
||||
if (comptime ptr.sentinel) |sentinel| {
|
||||
const sval = @as(*align(1) const ptr.child, @ptrCast(sentinel)).*;
|
||||
result[lst.items.len] = sval;
|
||||
return result[0..lst.items.len :sval];
|
||||
} else {
|
||||
return try result.toOwnedSlice();
|
||||
return result;
|
||||
}
|
||||
},
|
||||
else => return error.BadValue,
|
||||
@@ -152,7 +163,7 @@ pub const Value = union(enum) {
|
||||
result.* = try self.convertTo(ptr.child, allocator, options);
|
||||
return result;
|
||||
},
|
||||
else => @compileError("Cannot deserialize into many-pointer or c-pointer " ++ @typeName(T)), // do not support many or C item pointers.
|
||||
else => @compileError("Cannot deserialize into many-pointer or c-pointer " ++ @typeName(T)),
|
||||
},
|
||||
.Array => |arr| {
|
||||
// TODO: There is ambiguity here because a document expecting a list
|
||||
@@ -169,21 +180,19 @@ pub const Value = union(enum) {
|
||||
} else return error.BadValue;
|
||||
},
|
||||
.list, .inline_list => |lst| {
|
||||
var storage = try std.ArrayList(arr.child).initCapacity(allocator, arr.len);
|
||||
defer storage.deinit();
|
||||
for (lst.items) |item| {
|
||||
storage.appendAssumeCapacity(try item.convertTo(arr.child, allocator, options));
|
||||
}
|
||||
// this may result in a big stack allocation, which is not ideal
|
||||
if (lst.items.len != arr.len) return error.BadValue;
|
||||
|
||||
var result: T = undefined;
|
||||
@memcpy(&result, storage.items);
|
||||
for (&result, lst.items) |*res, item| {
|
||||
res.* = try item.convertTo(arr.child, allocator, options);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
else => return error.BadValue,
|
||||
}
|
||||
},
|
||||
.Struct => |stt| {
|
||||
if (comptime std.meta.trait.hasFn("deserializeNice")(T))
|
||||
if (comptime hasFn(T, "deserializeNice"))
|
||||
return T.deserializeNice(self, allocator, options);
|
||||
|
||||
if (stt.is_tuple) {
|
||||
@@ -191,8 +200,8 @@ pub const Value = union(enum) {
|
||||
.list, .inline_list => |list| {
|
||||
if (list.items.len != stt.fields.len) return error.BadValue;
|
||||
var result: T = undefined;
|
||||
inline for (stt.fields, 0..) |field, idx| {
|
||||
result[idx] = try list.items[idx].convertTo(field.type, allocator, options);
|
||||
inline for (stt.fields, &result, list.items) |field, *res, item| {
|
||||
res.* = try item.convertTo(field.type, allocator, options);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
@@ -204,33 +213,28 @@ pub const Value = union(enum) {
|
||||
.map, .inline_map => |map| {
|
||||
var result: T = undefined;
|
||||
|
||||
if (options.ignore_extra_fields) {
|
||||
inline for (stt.fields) |field| {
|
||||
if (map.get(field.name)) |value| {
|
||||
@field(result, field.name) = try value.convertTo(field.type, allocator, options);
|
||||
} else if (options.treat_omitted_as_null and @typeInfo(field.type) == .Optional) {
|
||||
@field(result, field.name) = null;
|
||||
if (!options.ignore_extra_fields and (map.count() > stt.fields.len))
|
||||
return error.BadValue;
|
||||
|
||||
var use_count: usize = 0;
|
||||
inline for (stt.fields) |field| {
|
||||
if (map.get(field.name)) |val| {
|
||||
if (comptime hasFn(T, "niceFieldConverter") and T.niceFieldConverter(field.name) != null) {
|
||||
@field(result, field.name) = try T.niceFieldConverter(field.name).?(val, allocator, options);
|
||||
} else {
|
||||
return error.BadValue;
|
||||
@field(result, field.name) = try val.convertTo(field.type, allocator, options);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// we could iterate over each map key and do an exhaustive
|
||||
// comparison with each struct field name. This would save
|
||||
// memory and it would probably be a fair amount faster for
|
||||
// small structs.
|
||||
var clone = try map.clone();
|
||||
defer clone.deinit();
|
||||
inline for (stt.fields) |field| {
|
||||
if (clone.fetchSwapRemove(field.name)) |kv| {
|
||||
@field(result, field.name) = try kv.value.convertTo(field.type, allocator, options);
|
||||
} else if (options.treat_omitted_as_null and @typeInfo(field.type) == .Optional) {
|
||||
@field(result, field.name) = null;
|
||||
} else return error.BadValue;
|
||||
}
|
||||
// there were extra fields in the data
|
||||
if (clone.count() > 0) return error.BadValue;
|
||||
use_count += 1;
|
||||
} else if (options.allow_omitting_default_values) {
|
||||
if (comptime field.default_value) |def|
|
||||
@field(result, field.name) = @as(*align(1) const field.type, @ptrCast(def)).*
|
||||
else
|
||||
return error.BadValue;
|
||||
} else return error.BadValue;
|
||||
}
|
||||
// there were extra fields in the data
|
||||
if (!options.ignore_extra_fields and (map.count() > use_count))
|
||||
return error.BadValue;
|
||||
|
||||
return result;
|
||||
},
|
||||
@@ -238,7 +242,7 @@ pub const Value = union(enum) {
|
||||
}
|
||||
},
|
||||
.Enum => {
|
||||
if (comptime std.meta.trait.hasFn("deserializeNice")(T))
|
||||
if (comptime hasFn(T, "deserializeNice"))
|
||||
return T.deserializeNice(self, allocator, options);
|
||||
|
||||
switch (self) {
|
||||
@@ -263,7 +267,7 @@ pub const Value = union(enum) {
|
||||
}
|
||||
},
|
||||
.Union => |unn| {
|
||||
if (comptime std.meta.trait.hasFn("deserializeNice")(T))
|
||||
if (comptime hasFn(T, "deserializeNice"))
|
||||
return T.deserializeNice(self, allocator, options);
|
||||
|
||||
if (unn.tag_type == null) @compileError("Cannot deserialize into untagged union " ++ @typeName(T));
|
||||
@@ -342,7 +346,7 @@ pub const Value = union(enum) {
|
||||
}
|
||||
|
||||
inline fn _fromScalarOrString(alloc: std.mem.Allocator, comptime classification: TagType, input: []const u8) !Value {
|
||||
return @unionInit(Value, @tagName(classification), try alloc.dupe(u8, input));
|
||||
return @unionInit(Value, @tagName(classification), try alloc.dupeZ(u8, input));
|
||||
}
|
||||
|
||||
pub inline fn emptyScalar() Value {
|
||||
|
5
tests/main.zig
Normal file
5
tests/main.zig
Normal file
@@ -0,0 +1,5 @@
|
||||
comptime {
|
||||
if (@import("builtin").is_test) {
|
||||
_ = @import("./reify.zig");
|
||||
}
|
||||
}
|
144
tests/reify.zig
Normal file
144
tests/reify.zig
Normal file
@@ -0,0 +1,144 @@
|
||||
const std = @import("std");
|
||||
|
||||
const nice = @import("nice");
|
||||
|
||||
fn reifyScalar(comptime scalar: []const u8, expected: anytype) !void {
|
||||
try reifyScalarWithOptions(scalar, expected, .{});
|
||||
}
|
||||
|
||||
fn reifyScalarWithOptions(comptime scalar: []const u8, expected: anytype, options: nice.parser.Options) !void {
|
||||
const allocator = std.testing.allocator;
|
||||
var diagnostics = nice.Diagnostics{};
|
||||
const parsed = try nice.parseBufferTo(
|
||||
@TypeOf(expected),
|
||||
allocator,
|
||||
scalar ++ "\n",
|
||||
&diagnostics,
|
||||
options,
|
||||
);
|
||||
defer parsed.deinit();
|
||||
|
||||
try std.testing.expectEqual(expected, parsed.value);
|
||||
}
|
||||
|
||||
test "reify integer" {
|
||||
try reifyScalar("123", @as(u8, 123));
|
||||
try reifyScalar("0123", @as(u8, 123));
|
||||
try reifyScalar("1_23", @as(u8, 123));
|
||||
try reifyScalar("-01_23", @as(i8, -123));
|
||||
}
|
||||
|
||||
test "reify hexadecimal" {
|
||||
try reifyScalar("0x123", @as(i64, 0x123));
|
||||
try reifyScalar("0x0123", @as(i64, 0x123));
|
||||
try reifyScalar("0x01_23", @as(i64, 0x123));
|
||||
try reifyScalar("-0x01_23", @as(i64, -0x123));
|
||||
}
|
||||
|
||||
test "reify octal" {
|
||||
try reifyScalar("0o123", @as(i64, 0o123));
|
||||
try reifyScalar("0o0123", @as(i64, 0o123));
|
||||
try reifyScalar("0o01_23", @as(i64, 0o123));
|
||||
try reifyScalar("-0o01_23", @as(i64, -0o123));
|
||||
}
|
||||
|
||||
test "reify binary" {
|
||||
try reifyScalar("0b1011", @as(i5, 0b1011));
|
||||
try reifyScalar("0b01011", @as(i5, 0b1011));
|
||||
try reifyScalar("0b010_11", @as(i5, 0b1011));
|
||||
try reifyScalar("-0b010_11", @as(i5, -0b1011));
|
||||
}
|
||||
|
||||
test "reify float" {
|
||||
try reifyScalar("0.25", @as(f32, 0.25));
|
||||
try reifyScalar("0.2_5", @as(f32, 0.25));
|
||||
try reifyScalar("00.250", @as(f32, 0.25));
|
||||
try reifyScalar("-0.25", @as(f32, -0.25));
|
||||
}
|
||||
|
||||
test "reify hexfloat" {
|
||||
try reifyScalar("0x0.25", @as(f64, 0x0.25));
|
||||
try reifyScalar("0x0.2_5", @as(f64, 0x0.25));
|
||||
try reifyScalar("0x0.250p1", @as(f64, 0x0.25p1));
|
||||
try reifyScalar("-0x0.25", @as(f64, -0x0.25));
|
||||
}
|
||||
|
||||
test "reify true" {
|
||||
try reifyScalar("true", true);
|
||||
try reifyScalar("True", true);
|
||||
try reifyScalar("yes", true);
|
||||
try reifyScalar("on", true);
|
||||
}
|
||||
|
||||
test "reify false" {
|
||||
try reifyScalar("false", false);
|
||||
try reifyScalar("False", false);
|
||||
try reifyScalar("no", false);
|
||||
try reifyScalar("off", false);
|
||||
}
|
||||
|
||||
test "reify custom true" {
|
||||
const options = nice.parser.Options{ .truthy_boolean_scalars = &.{"correct"} };
|
||||
try reifyScalarWithOptions("correct", true, options);
|
||||
}
|
||||
|
||||
test "reify true case insensitive" {
|
||||
try std.testing.expectError(error.BadValue, reifyScalar("TRUE", true));
|
||||
const options = nice.parser.Options{ .case_insensitive_scalar_coersion = true };
|
||||
try reifyScalarWithOptions("TRUE", true, options);
|
||||
}
|
||||
|
||||
test "reify custom false" {
|
||||
const options = nice.parser.Options{ .falsy_boolean_scalars = &.{"incorrect"} };
|
||||
try reifyScalarWithOptions("incorrect", false, options);
|
||||
}
|
||||
|
||||
test "reify false case insensitive" {
|
||||
try std.testing.expectError(error.BadValue, reifyScalar("FALSE", false));
|
||||
const options = nice.parser.Options{ .case_insensitive_scalar_coersion = true };
|
||||
try reifyScalarWithOptions("FALSE", false, options);
|
||||
}
|
||||
|
||||
test "reify null" {
|
||||
try reifyScalar("null", @as(?u8, null));
|
||||
try reifyScalar("nil", @as(?u8, null));
|
||||
try reifyScalar("None", @as(?u8, null));
|
||||
}
|
||||
|
||||
test "reify custom null" {
|
||||
const options = nice.parser.Options{ .null_scalars = &.{"nothing"} };
|
||||
try reifyScalarWithOptions("nothing", @as(?u8, null), options);
|
||||
}
|
||||
|
||||
test "reify null case insensitive" {
|
||||
// this is a little weird because when the null string mismatches, it will try to
|
||||
// parse the child optional type and produce either a value or an error from that,
|
||||
// so the error received depends on whether or not the optional child type fails to
|
||||
// parse the given value.
|
||||
try std.testing.expectError(error.InvalidCharacter, reifyScalar("NULL", @as(?u8, null)));
|
||||
const options = nice.parser.Options{ .case_insensitive_scalar_coersion = true };
|
||||
try reifyScalarWithOptions("NULL", @as(?u8, null), options);
|
||||
}
|
||||
|
||||
test "reify void" {
|
||||
// A void scalar cannot exist on its own as it is not distinguishable from an empty
|
||||
// document.
|
||||
const Void = struct { void: void };
|
||||
try reifyScalar("void:", Void{ .void = void{} });
|
||||
}
|
||||
|
||||
test "reify void scalar" {
|
||||
const options = nice.parser.Options{ .default_object = .scalar };
|
||||
try reifyScalarWithOptions("", void{}, options);
|
||||
}
|
||||
|
||||
test "reify enum" {
|
||||
const Enum = enum { one, two };
|
||||
try reifyScalar(".one", Enum.one);
|
||||
}
|
||||
|
||||
test "reify enum no dot" {
|
||||
const options = nice.parser.Options{ .expect_enum_dot = false };
|
||||
const Enum = enum { one, two };
|
||||
try reifyScalarWithOptions("two", Enum.two, options);
|
||||
}
|
Reference in New Issue
Block a user