structstd.json[src]

JSON parsing and stringification conforming to RFC 8259. https://datatracker.ietf.org/doc/html/rfc8259

The low-level Scanner API produces Tokens from an input slice or successive slices of inputs, The Reader API connects a std.io.Reader to a Scanner.

The high-level parseFromSlice and parseFromTokenSource deserialize a JSON document into a Zig type. Parse into a dynamically-typed Value to load any JSON value for runtime inspection.

The low-level writeStream emits syntax-conformant JSON tokens to a std.io.Writer. The high-level stringify serializes a Zig or Value type into JSON.

Types

TypeObjectMap[src]

Source Code

Source code
pub const ObjectMap = StringArrayHashMap(Value)

TypeArray[src]

Source Code

Source code
pub const Array = ArrayList(Value)

Type FunctionArrayHashMap[src]

A thin wrapper around std.StringArrayHashMapUnmanaged that implements jsonParse, jsonParseFromValue, and jsonStringify. This is useful when your JSON schema has an object with arbitrary data keys instead of comptime-known struct field names.

Parameters

T: type

Fields

Functions

Functiondeinit[src]

pub fn deinit(self: *@This(), allocator: Allocator) void

Parameters

self: *@This()
allocator: Allocator

Source Code

Source code
pub fn deinit(self: *@This(), allocator: Allocator) void {
    self.map.deinit(allocator);
}

FunctionjsonParse[src]

pub fn jsonParse(allocator: Allocator, source: anytype, options: ParseOptions) !@This()

Parameters

allocator: Allocator
options: ParseOptions

Source Code

Source code
pub fn jsonParse(allocator: Allocator, source: anytype, options: ParseOptions) !@This() {
    var map: std.StringArrayHashMapUnmanaged(T) = .empty;
    errdefer map.deinit(allocator);

    if (.object_begin != try source.next()) return error.UnexpectedToken;
    while (true) {
        const token = try source.nextAlloc(allocator, options.allocate.?);
        switch (token) {
            inline .string, .allocated_string => |k| {
                const gop = try map.getOrPut(allocator, k);
                if (gop.found_existing) {
                    switch (options.duplicate_field_behavior) {
                        .use_first => {
                            // Parse and ignore the redundant value.
                            // We don't want to skip the value, because we want type checking.
                            _ = try innerParse(T, allocator, source, options);
                            continue;
                        },
                        .@"error" => return error.DuplicateField,
                        .use_last => {},
                    }
                }
                gop.value_ptr.* = try innerParse(T, allocator, source, options);
            },
            .object_end => break,
            else => unreachable,
        }
    }
    return .{ .map = map };
}

FunctionjsonParseFromValue[src]

pub fn jsonParseFromValue(allocator: Allocator, source: Value, options: ParseOptions) !@This()

Parameters

allocator: Allocator
source: Value
options: ParseOptions

Source Code

Source code
pub fn jsonParseFromValue(allocator: Allocator, source: Value, options: ParseOptions) !@This() {
    if (source != .object) return error.UnexpectedToken;

    var map: std.StringArrayHashMapUnmanaged(T) = .empty;
    errdefer map.deinit(allocator);

    var it = source.object.iterator();
    while (it.next()) |kv| {
        try map.put(allocator, kv.key_ptr.*, try innerParseFromValue(T, allocator, kv.value_ptr.*, options));
    }
    return .{ .map = map };
}

FunctionjsonStringify[src]

pub fn jsonStringify(self: @This(), jws: anytype) !void

Parameters

self: @This()

Source Code

Source code
pub fn jsonStringify(self: @This(), jws: anytype) !void {
    try jws.beginObject();
    var it = self.map.iterator();
    while (it.next()) |kv| {
        try jws.objectField(kv.key_ptr.*);
        try jws.write(kv.value_ptr.*);
    }
    try jws.endObject();
}

Source Code

Source code
pub fn ArrayHashMap(comptime T: type) type {
    return struct {
        map: std.StringArrayHashMapUnmanaged(T) = .empty,

        pub fn deinit(self: *@This(), allocator: Allocator) void {
            self.map.deinit(allocator);
        }

        pub fn jsonParse(allocator: Allocator, source: anytype, options: ParseOptions) !@This() {
            var map: std.StringArrayHashMapUnmanaged(T) = .empty;
            errdefer map.deinit(allocator);

            if (.object_begin != try source.next()) return error.UnexpectedToken;
            while (true) {
                const token = try source.nextAlloc(allocator, options.allocate.?);
                switch (token) {
                    inline .string, .allocated_string => |k| {
                        const gop = try map.getOrPut(allocator, k);
                        if (gop.found_existing) {
                            switch (options.duplicate_field_behavior) {
                                .use_first => {
                                    // Parse and ignore the redundant value.
                                    // We don't want to skip the value, because we want type checking.
                                    _ = try innerParse(T, allocator, source, options);
                                    continue;
                                },
                                .@"error" => return error.DuplicateField,
                                .use_last => {},
                            }
                        }
                        gop.value_ptr.* = try innerParse(T, allocator, source, options);
                    },
                    .object_end => break,
                    else => unreachable,
                }
            }
            return .{ .map = map };
        }

        pub fn jsonParseFromValue(allocator: Allocator, source: Value, options: ParseOptions) !@This() {
            if (source != .object) return error.UnexpectedToken;

            var map: std.StringArrayHashMapUnmanaged(T) = .empty;
            errdefer map.deinit(allocator);

            var it = source.object.iterator();
            while (it.next()) |kv| {
                try map.put(allocator, kv.key_ptr.*, try innerParseFromValue(T, allocator, kv.value_ptr.*, options));
            }
            return .{ .map = map };
        }

        pub fn jsonStringify(self: @This(), jws: anytype) !void {
            try jws.beginObject();
            var it = self.map.iterator();
            while (it.next()) |kv| {
                try jws.objectField(kv.key_ptr.*);
                try jws.write(kv.value_ptr.*);
            }
            try jws.endObject();
        }
    };
}

Type FunctionReader[src]

Connects a std.io.Reader to a std.json.Scanner. All next*() methods here handle error.BufferUnderrun from std.json.Scanner, and then read from the reader.

Parameters

buffer_size: usize
ReaderType: type

Fields

scanner: Scanner
reader: ReaderType
buffer: [buffer_size]u8 = undefined

Error Sets

Error SetNextError[src]

Errors

anyerror means the error set is known only at runtime.

OutOfMemory Error
SyntaxError Error
UnexpectedEndOfInput Error

Source Code

Source code
pub const NextError = ReaderType.Error || Error || Allocator.Error

Error SetNextError[src]

Errors

anyerror means the error set is known only at runtime.

OutOfMemory Error
SyntaxError Error
UnexpectedEndOfInput Error

Source Code

Source code
pub const NextError = ReaderType.Error || Error || Allocator.Error

Error SetAllocError[src]

Errors

anyerror means the error set is known only at runtime.

OutOfMemory Error
SyntaxError Error
UnexpectedEndOfInput Error
ValueTooLong

Source Code

Source code
pub const AllocError = NextError || error{ValueTooLong}

Error SetPeekError[src]

Errors

anyerror means the error set is known only at runtime.

SyntaxError Error
UnexpectedEndOfInput Error

Source Code

Source code
pub const PeekError = ReaderType.Error || Error

Functions

Functioninit[src]

pub fn init(allocator: Allocator, io_reader: ReaderType) @This()

The allocator is only used to track [] and {} nesting levels.

Parameters

allocator: Allocator
io_reader: ReaderType

Source Code

Source code
pub fn init(allocator: Allocator, io_reader: ReaderType) @This() {
    return .{
        .scanner = Scanner.initStreaming(allocator),
        .reader = io_reader,
    };
}

Functiondeinit[src]

pub fn deinit(self: *@This()) void

Parameters

self: *@This()

Source Code

Source code
pub fn deinit(self: *@This()) void {
    self.scanner.deinit();
    self.* = undefined;
}

FunctionenableDiagnostics[src]

pub fn enableDiagnostics(self: *@This(), diagnostics: *Diagnostics) void

Calls std.json.Scanner.enableDiagnostics.

Parameters

self: *@This()
diagnostics: *Diagnostics

Source Code

Source code
pub fn enableDiagnostics(self: *@This(), diagnostics: *Diagnostics) void {
    self.scanner.enableDiagnostics(diagnostics);
}

FunctionnextAlloc[src]

pub fn nextAlloc(self: *@This(), allocator: Allocator, when: AllocWhen) AllocError!Token

Equivalent to nextAllocMax(allocator, when, default_max_value_len); See also std.json.Token for documentation of nextAlloc*() function behavior.

Parameters

self: *@This()
allocator: Allocator
when: AllocWhen

Source Code

Source code
pub fn nextAlloc(self: *@This(), allocator: Allocator, when: AllocWhen) AllocError!Token {
    return self.nextAllocMax(allocator, when, default_max_value_len);
}

FunctionnextAllocMax[src]

pub fn nextAllocMax(self: *@This(), allocator: Allocator, when: AllocWhen, max_value_len: usize) AllocError!Token

See also std.json.Token for documentation of nextAlloc*() function behavior.

Parameters

self: *@This()
allocator: Allocator
when: AllocWhen
max_value_len: usize

Source Code

Source code
pub fn nextAllocMax(self: *@This(), allocator: Allocator, when: AllocWhen, max_value_len: usize) AllocError!Token {
    const token_type = try self.peekNextTokenType();
    switch (token_type) {
        .number, .string => {
            var value_list = ArrayList(u8).init(allocator);
            errdefer {
                value_list.deinit();
            }
            if (try self.allocNextIntoArrayListMax(&value_list, when, max_value_len)) |slice| {
                return if (token_type == .number)
                    Token{ .number = slice }
                else
                    Token{ .string = slice };
            } else {
                return if (token_type == .number)
                    Token{ .allocated_number = try value_list.toOwnedSlice() }
                else
                    Token{ .allocated_string = try value_list.toOwnedSlice() };
            }
        },

        // Simple tokens never alloc.
        .object_begin,
        .object_end,
        .array_begin,
        .array_end,
        .true,
        .false,
        .null,
        .end_of_document,
        => return try self.next(),
    }
}

FunctionallocNextIntoArrayList[src]

pub fn allocNextIntoArrayList(self: *@This(), value_list: *ArrayList(u8), when: AllocWhen) AllocError!?[]const u8

Equivalent to allocNextIntoArrayListMax(value_list, when, default_max_value_len);

Parameters

self: *@This()
value_list: *ArrayList(u8)
when: AllocWhen

Source Code

Source code
pub fn allocNextIntoArrayList(self: *@This(), value_list: *ArrayList(u8), when: AllocWhen) AllocError!?[]const u8 {
    return self.allocNextIntoArrayListMax(value_list, when, default_max_value_len);
}

FunctionallocNextIntoArrayListMax[src]

pub fn allocNextIntoArrayListMax(self: *@This(), value_list: *ArrayList(u8), when: AllocWhen, max_value_len: usize) AllocError!?[]const u8

Calls std.json.Scanner.allocNextIntoArrayListMax and handles error.BufferUnderrun.

Parameters

self: *@This()
value_list: *ArrayList(u8)
when: AllocWhen
max_value_len: usize

Source Code

Source code
pub fn allocNextIntoArrayListMax(self: *@This(), value_list: *ArrayList(u8), when: AllocWhen, max_value_len: usize) AllocError!?[]const u8 {
    while (true) {
        return self.scanner.allocNextIntoArrayListMax(value_list, when, max_value_len) catch |err| switch (err) {
            error.BufferUnderrun => {
                try self.refillBuffer();
                continue;
            },
            else => |other_err| return other_err,
        };
    }
}

FunctionskipValue[src]

pub fn skipValue(self: *@This()) SkipError!void

Like std.json.Scanner.skipValue, but handles error.BufferUnderrun.

Parameters

self: *@This()

Source Code

Source code
pub fn skipValue(self: *@This()) SkipError!void {
    switch (try self.peekNextTokenType()) {
        .object_begin, .array_begin => {
            try self.skipUntilStackHeight(self.stackHeight());
        },
        .number, .string => {
            while (true) {
                switch (try self.next()) {
                    .partial_number,
                    .partial_string,
                    .partial_string_escaped_1,
                    .partial_string_escaped_2,
                    .partial_string_escaped_3,
                    .partial_string_escaped_4,
                    => continue,

                    .number, .string => break,

                    else => unreachable,
                }
            }
        },
        .true, .false, .null => {
            _ = try self.next();
        },

        .object_end, .array_end, .end_of_document => unreachable, // Attempt to skip a non-value token.
    }
}

FunctionskipUntilStackHeight[src]

pub fn skipUntilStackHeight(self: *@This(), terminal_stack_height: usize) NextError!void

Like std.json.Scanner.skipUntilStackHeight() but handles error.BufferUnderrun.

Parameters

self: *@This()
terminal_stack_height: usize

Source Code

Source code
pub fn skipUntilStackHeight(self: *@This(), terminal_stack_height: usize) NextError!void {
    while (true) {
        return self.scanner.skipUntilStackHeight(terminal_stack_height) catch |err| switch (err) {
            error.BufferUnderrun => {
                try self.refillBuffer();
                continue;
            },
            else => |other_err| return other_err,
        };
    }
}

FunctionstackHeight[src]

pub fn stackHeight(self: *const @This()) usize

Calls std.json.Scanner.stackHeight.

Parameters

self: *const @This()

Source Code

Source code
pub fn stackHeight(self: *const @This()) usize {
    return self.scanner.stackHeight();
}

FunctionensureTotalStackCapacity[src]

pub fn ensureTotalStackCapacity(self: *@This(), height: usize) Allocator.Error!void

Calls std.json.Scanner.ensureTotalStackCapacity.

Parameters

self: *@This()
height: usize

Source Code

Source code
pub fn ensureTotalStackCapacity(self: *@This(), height: usize) Allocator.Error!void {
    try self.scanner.ensureTotalStackCapacity(height);
}

Functionnext[src]

pub fn next(self: *@This()) NextError!Token

See std.json.Token for documentation of this function.

Parameters

self: *@This()

Source Code

Source code
pub fn next(self: *@This()) NextError!Token {
    while (true) {
        return self.scanner.next() catch |err| switch (err) {
            error.BufferUnderrun => {
                try self.refillBuffer();
                continue;
            },
            else => |other_err| return other_err,
        };
    }
}

FunctionpeekNextTokenType[src]

pub fn peekNextTokenType(self: *@This()) PeekError!TokenType

See std.json.Scanner.peekNextTokenType().

Parameters

self: *@This()

Source Code

Source code
pub fn peekNextTokenType(self: *@This()) PeekError!TokenType {
    while (true) {
        return self.scanner.peekNextTokenType() catch |err| switch (err) {
            error.BufferUnderrun => {
                try self.refillBuffer();
                continue;
            },
            else => |other_err| return other_err,
        };
    }
}

Source Code

Source code
pub fn Reader(comptime buffer_size: usize, comptime ReaderType: type) type {
    return struct {
        scanner: Scanner,
        reader: ReaderType,

        buffer: [buffer_size]u8 = undefined,

        /// The allocator is only used to track `[]` and `{}` nesting levels.
        pub fn init(allocator: Allocator, io_reader: ReaderType) @This() {
            return .{
                .scanner = Scanner.initStreaming(allocator),
                .reader = io_reader,
            };
        }
        pub fn deinit(self: *@This()) void {
            self.scanner.deinit();
            self.* = undefined;
        }

        /// Calls `std.json.Scanner.enableDiagnostics`.
        pub fn enableDiagnostics(self: *@This(), diagnostics: *Diagnostics) void {
            self.scanner.enableDiagnostics(diagnostics);
        }

        pub const NextError = ReaderType.Error || Error || Allocator.Error;
        pub const SkipError = NextError;
        pub const AllocError = NextError || error{ValueTooLong};
        pub const PeekError = ReaderType.Error || Error;

        /// Equivalent to `nextAllocMax(allocator, when, default_max_value_len);`
        /// See also `std.json.Token` for documentation of `nextAlloc*()` function behavior.
        pub fn nextAlloc(self: *@This(), allocator: Allocator, when: AllocWhen) AllocError!Token {
            return self.nextAllocMax(allocator, when, default_max_value_len);
        }
        /// See also `std.json.Token` for documentation of `nextAlloc*()` function behavior.
        pub fn nextAllocMax(self: *@This(), allocator: Allocator, when: AllocWhen, max_value_len: usize) AllocError!Token {
            const token_type = try self.peekNextTokenType();
            switch (token_type) {
                .number, .string => {
                    var value_list = ArrayList(u8).init(allocator);
                    errdefer {
                        value_list.deinit();
                    }
                    if (try self.allocNextIntoArrayListMax(&value_list, when, max_value_len)) |slice| {
                        return if (token_type == .number)
                            Token{ .number = slice }
                        else
                            Token{ .string = slice };
                    } else {
                        return if (token_type == .number)
                            Token{ .allocated_number = try value_list.toOwnedSlice() }
                        else
                            Token{ .allocated_string = try value_list.toOwnedSlice() };
                    }
                },

                // Simple tokens never alloc.
                .object_begin,
                .object_end,
                .array_begin,
                .array_end,
                .true,
                .false,
                .null,
                .end_of_document,
                => return try self.next(),
            }
        }

        /// Equivalent to `allocNextIntoArrayListMax(value_list, when, default_max_value_len);`
        pub fn allocNextIntoArrayList(self: *@This(), value_list: *ArrayList(u8), when: AllocWhen) AllocError!?[]const u8 {
            return self.allocNextIntoArrayListMax(value_list, when, default_max_value_len);
        }
        /// Calls `std.json.Scanner.allocNextIntoArrayListMax` and handles `error.BufferUnderrun`.
        pub fn allocNextIntoArrayListMax(self: *@This(), value_list: *ArrayList(u8), when: AllocWhen, max_value_len: usize) AllocError!?[]const u8 {
            while (true) {
                return self.scanner.allocNextIntoArrayListMax(value_list, when, max_value_len) catch |err| switch (err) {
                    error.BufferUnderrun => {
                        try self.refillBuffer();
                        continue;
                    },
                    else => |other_err| return other_err,
                };
            }
        }

        /// Like `std.json.Scanner.skipValue`, but handles `error.BufferUnderrun`.
        pub fn skipValue(self: *@This()) SkipError!void {
            switch (try self.peekNextTokenType()) {
                .object_begin, .array_begin => {
                    try self.skipUntilStackHeight(self.stackHeight());
                },
                .number, .string => {
                    while (true) {
                        switch (try self.next()) {
                            .partial_number,
                            .partial_string,
                            .partial_string_escaped_1,
                            .partial_string_escaped_2,
                            .partial_string_escaped_3,
                            .partial_string_escaped_4,
                            => continue,

                            .number, .string => break,

                            else => unreachable,
                        }
                    }
                },
                .true, .false, .null => {
                    _ = try self.next();
                },

                .object_end, .array_end, .end_of_document => unreachable, // Attempt to skip a non-value token.
            }
        }
        /// Like `std.json.Scanner.skipUntilStackHeight()` but handles `error.BufferUnderrun`.
        pub fn skipUntilStackHeight(self: *@This(), terminal_stack_height: usize) NextError!void {
            while (true) {
                return self.scanner.skipUntilStackHeight(terminal_stack_height) catch |err| switch (err) {
                    error.BufferUnderrun => {
                        try self.refillBuffer();
                        continue;
                    },
                    else => |other_err| return other_err,
                };
            }
        }

        /// Calls `std.json.Scanner.stackHeight`.
        pub fn stackHeight(self: *const @This()) usize {
            return self.scanner.stackHeight();
        }
        /// Calls `std.json.Scanner.ensureTotalStackCapacity`.
        pub fn ensureTotalStackCapacity(self: *@This(), height: usize) Allocator.Error!void {
            try self.scanner.ensureTotalStackCapacity(height);
        }

        /// See `std.json.Token` for documentation of this function.
        pub fn next(self: *@This()) NextError!Token {
            while (true) {
                return self.scanner.next() catch |err| switch (err) {
                    error.BufferUnderrun => {
                        try self.refillBuffer();
                        continue;
                    },
                    else => |other_err| return other_err,
                };
            }
        }

        /// See `std.json.Scanner.peekNextTokenType()`.
        pub fn peekNextTokenType(self: *@This()) PeekError!TokenType {
            while (true) {
                return self.scanner.peekNextTokenType() catch |err| switch (err) {
                    error.BufferUnderrun => {
                        try self.refillBuffer();
                        continue;
                    },
                    else => |other_err| return other_err,
                };
            }
        }

        fn refillBuffer(self: *@This()) ReaderType.Error!void {
            const input = self.buffer[0..try self.reader.read(self.buffer[0..])];
            if (input.len > 0) {
                self.scanner.feedInput(input);
            } else {
                self.scanner.endInput();
            }
        }
    };
}

Type FunctionParsed[src]

Parameters

T: type

Fields

value: T

Functions

Functiondeinit[src]

pub fn deinit(self: @This()) void

Parameters

self: @This()

Source Code

Source code
pub fn deinit(self: @This()) void {
    const allocator = self.arena.child_allocator;
    self.arena.deinit();
    allocator.destroy(self.arena);
}

Source Code

Source code
pub fn Parsed(comptime T: type) type {
    return struct {
        arena: *ArenaAllocator,
        value: T,

        pub fn deinit(self: @This()) void {
            const allocator = self.arena.child_allocator;
            self.arena.deinit();
            allocator.destroy(self.arena);
        }
    };
}

Type FunctionParseError[src]

The error set that will be returned when parsing from *Source. Note that this may contain error.BufferUnderrun, but that error will never actually be returned.

Parameters

Source: type

Source Code

Source code
pub fn ParseError(comptime Source: type) type {
    // A few of these will either always be present or present enough of the time that
    // omitting them is more confusing than always including them.
    return ParseFromValueError || Source.NextError || Source.PeekError || Source.AllocError;
}

Type FunctionWriteStream[src]

Writes JSON (RFC8259) formatted data to a stream.

The sequence of method calls to write JSON content must follow this grammar:

 <once> = <value>
 <value> =
   | <object>
   | <array>
   | write
   | print
   | <writeRawStream>
 <object> = beginObject ( <field> <value> )* endObject
 <field> = objectField | objectFieldRaw | <objectFieldRawStream>
 <array> = beginArray ( <value> )* endArray
 <writeRawStream> = beginWriteRaw ( stream.writeAll )* endWriteRaw
 <objectFieldRawStream> = beginObjectFieldRaw ( stream.writeAll )* endObjectFieldRaw

The safety_checks_hint parameter determines how much memory is used to enable assertions that the above grammar is being followed, e.g. tripping an assertion rather than allowing endObject to emit the final } in [[[]]}. "Depth" in this context means the depth of nested [] or {} expressions (or equivalently the amount of recursion on the <value> grammar expression above). For example, emitting the JSON [[[]]] requires a depth of 3. If .checked_to_fixed_depth is used, there is additionally an assertion that the nesting depth never exceeds the given limit. .checked_to_arbitrary_depth requires a runtime allocator for the memory. .checked_to_fixed_depth embeds the storage required in the WriteStream struct. .assumed_correct requires no space and performs none of these assertions. In ReleaseFast and ReleaseSmall mode, the given safety_checks_hint is ignored and is always treated as .assumed_correct.

Parameters

OutStream: type
safety_checks_hint: union(enum) {
    checked_to_arbitrary_depth,
    checked_to_fixed_depth: usize, // Rounded up to the nearest multiple of 8.
    assumed_correct,
}

Types

TypeError[src]

Source Code

Source code
pub const Error = switch (safety_checks) {
    .checked_to_arbitrary_depth => Stream.Error || error{OutOfMemory},
    .checked_to_fixed_depth, .assumed_correct => Stream.Error,
}

Fields

stream: OutStream
indent_level: usize = 0
next_punctuation: enum {
    the_beginning,
    none,
    comma,
    colon,
} = .the_beginning
nesting_stack: switch (safety_checks) {
    .checked_to_arbitrary_depth => BitStack,
    .checked_to_fixed_depth => |fixed_buffer_size| [(fixed_buffer_size + 7) >> 3]u8,
    .assumed_correct => void,
}
raw_streaming_mode: if (build_mode_has_safety)
    enum { none, value, objectField }
else
    void = if (build_mode_has_safety) .none else {}

Values

ConstantStream[src]

Source Code

Source code
pub const Stream = OutStream

Functions

Functioninit[src]

pub fn init(safety_allocator: Allocator, stream: OutStream, options: StringifyOptions) Self

Parameters

safety_allocator: Allocator
stream: OutStream

Source Code

Source code
pub fn init(safety_allocator: Allocator, stream: OutStream, options: StringifyOptions) Self {
    return .{
        .options = options,
        .stream = stream,
        .nesting_stack = switch (safety_checks) {
            .checked_to_arbitrary_depth => BitStack.init(safety_allocator),
            .checked_to_fixed_depth => |fixed_buffer_size| [_]u8{0} ** ((fixed_buffer_size + 7) >> 3),
            .assumed_correct => {},
        },
    };
}

Functiondeinit[src]

pub fn deinit(self: *Self) void

Only necessary with .checked_to_arbitrary_depth.

Parameters

self: *Self

Source Code

Source code
pub fn deinit(self: *Self) void {
    switch (safety_checks) {
        .checked_to_arbitrary_depth => self.nesting_stack.deinit(),
        .checked_to_fixed_depth, .assumed_correct => {},
    }
    self.* = undefined;
}

FunctionbeginArray[src]

pub fn beginArray(self: *Self) Error!void

Parameters

self: *Self

Source Code

Source code
pub fn beginArray(self: *Self) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    try self.valueStart();
    try self.stream.writeByte('[');
    try self.pushIndentation(ARRAY_MODE);
    self.next_punctuation = .none;
}

FunctionbeginObject[src]

pub fn beginObject(self: *Self) Error!void

Parameters

self: *Self

Source Code

Source code
pub fn beginObject(self: *Self) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    try self.valueStart();
    try self.stream.writeByte('{');
    try self.pushIndentation(OBJECT_MODE);
    self.next_punctuation = .none;
}

FunctionendArray[src]

pub fn endArray(self: *Self) Error!void

Parameters

self: *Self

Source Code

Source code
pub fn endArray(self: *Self) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    self.popIndentation(ARRAY_MODE);
    switch (self.next_punctuation) {
        .none => {},
        .comma => {
            try self.indent();
        },
        .the_beginning, .colon => unreachable,
    }
    try self.stream.writeByte(']');
    self.valueDone();
}

FunctionendObject[src]

pub fn endObject(self: *Self) Error!void

Parameters

self: *Self

Source Code

Source code
pub fn endObject(self: *Self) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    self.popIndentation(OBJECT_MODE);
    switch (self.next_punctuation) {
        .none => {},
        .comma => {
            try self.indent();
        },
        .the_beginning, .colon => unreachable,
    }
    try self.stream.writeByte('}');
    self.valueDone();
}

Functionprint[src]

pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) Error!void

An alternative to calling write that formats a value with std.fmt. This function does the usual punctuation and indentation formatting assuming the resulting formatted string represents a single complete value; e.g. "1", "[]", "[1,2]", not "1,2". This function may be useful for doing your own number formatting.

Parameters

self: *Self
fmt: []const u8

Source Code

Source code
pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    try self.valueStart();
    try self.stream.print(fmt, args);
    self.valueDone();
}

FunctionbeginWriteRaw[src]

pub fn beginWriteRaw(self: *Self) !void

An alternative to calling write that allows you to write directly to the .stream field, e.g. with .stream.writeAll(). Call beginWriteRaw(), then write a complete value (including any quotes if necessary) directly to the .stream field, then call endWriteRaw(). This can be useful for streaming very long strings into the output without needing it all buffered in memory.

Parameters

self: *Self

Source Code

Source code
pub fn beginWriteRaw(self: *Self) !void {
    if (build_mode_has_safety) {
        assert(self.raw_streaming_mode == .none);
        self.raw_streaming_mode = .value;
    }
    try self.valueStart();
}

FunctionendWriteRaw[src]

pub fn endWriteRaw(self: *Self) void

See beginWriteRaw.

Parameters

self: *Self

Source Code

Source code
pub fn endWriteRaw(self: *Self) void {
    if (build_mode_has_safety) {
        assert(self.raw_streaming_mode == .value);
        self.raw_streaming_mode = .none;
    }
    self.valueDone();
}

FunctionobjectField[src]

pub fn objectField(self: *Self, key: []const u8) Error!void

See WriteStream for when to call this method. key is the string content of the property name. Surrounding quotes will be added and any special characters will be escaped. See also objectFieldRaw.

Parameters

self: *Self
key: []const u8

Source Code

Source code
pub fn objectField(self: *Self, key: []const u8) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    try self.objectFieldStart();
    try encodeJsonString(key, self.options, self.stream);
    self.next_punctuation = .colon;
}

FunctionobjectFieldRaw[src]

pub fn objectFieldRaw(self: *Self, quoted_key: []const u8) Error!void

See WriteStream for when to call this method. quoted_key is the complete bytes of the key including quotes and any necessary escape sequences. A few assertions are performed on the given value to ensure that the caller of this function understands the API contract. See also objectField.

Parameters

self: *Self
quoted_key: []const u8

Source Code

Source code
pub fn objectFieldRaw(self: *Self, quoted_key: []const u8) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    assert(quoted_key.len >= 2 and quoted_key[0] == '"' and quoted_key[quoted_key.len - 1] == '"'); // quoted_key should be "quoted".
    try self.objectFieldStart();
    try self.stream.writeAll(quoted_key);
    self.next_punctuation = .colon;
}

FunctionbeginObjectFieldRaw[src]

pub fn beginObjectFieldRaw(self: *Self) !void

In the rare case that you need to write very long object field names, this is an alternative to objectField and objectFieldRaw that allows you to write directly to the .stream field similar to beginWriteRaw. Call endObjectFieldRaw() when you're done.

Parameters

self: *Self

Source Code

Source code
pub fn beginObjectFieldRaw(self: *Self) !void {
    if (build_mode_has_safety) {
        assert(self.raw_streaming_mode == .none);
        self.raw_streaming_mode = .objectField;
    }
    try self.objectFieldStart();
}

FunctionendObjectFieldRaw[src]

pub fn endObjectFieldRaw(self: *Self) void

See beginObjectFieldRaw.

Parameters

self: *Self

Source Code

Source code
pub fn endObjectFieldRaw(self: *Self) void {
    if (build_mode_has_safety) {
        assert(self.raw_streaming_mode == .objectField);
        self.raw_streaming_mode = .none;
    }
    self.next_punctuation = .colon;
}

Functionwrite[src]

pub fn write(self: *Self, value: anytype) Error!void

Renders the given Zig value as JSON.

Supported types:

  • Zig bool -> JSON true or false.
  • Zig ?T -> null or the rendering of T.
  • Zig i32, u64, etc. -> JSON number or string.
    • When option emit_nonportable_numbers_as_strings is true, if the value is outside the range +-1<<53 (the precise integer range of f64), it is rendered as a JSON string in base 10. Otherwise, it is rendered as JSON number.
  • Zig floats -> JSON number or string.
    • If the value cannot be precisely represented by an f64, it is rendered as a JSON string. Otherwise, it is rendered as JSON number.
    • TODO: Float rendering will likely change in the future, e.g. to remove the unnecessary "e+00".
  • Zig []const u8, []u8, *[N]u8, @Vector(N, u8), and similar -> JSON string.
    • See StringifyOptions.emit_strings_as_arrays.
    • If the content is not valid UTF-8, rendered as an array of numbers instead.
  • Zig []T, [N]T, *[N]T, @Vector(N, T), and similar -> JSON array of the rendering of each item.
  • Zig tuple -> JSON array of the rendering of each item.
  • Zig struct -> JSON object with each field in declaration order.
    • If the struct declares a method pub fn jsonStringify(self: *@This(), jw: anytype) !void, it is called to do the serialization instead of the default behavior. The given jw is a pointer to this WriteStream. See std.json.Value for an example.
    • See StringifyOptions.emit_null_optional_fields.
  • Zig union(enum) -> JSON object with one field named for the active tag and a value representing the payload.
    • If the payload is void, then the emitted value is {}.
    • If the union declares a method pub fn jsonStringify(self: *@This(), jw: anytype) !void, it is called to do the serialization instead of the default behavior. The given jw is a pointer to this WriteStream.
  • Zig enum -> JSON string naming the active tag.
    • If the enum declares a method pub fn jsonStringify(self: *@This(), jw: anytype) !void, it is called to do the serialization instead of the default behavior. The given jw is a pointer to this WriteStream.
    • If the enum is non-exhaustive, unnamed values are rendered as integers.
  • Zig untyped enum literal -> JSON string naming the active tag.
  • Zig error -> JSON string naming the error.
  • Zig *T -> the rendering of T. Note there is no guard against circular-reference infinite recursion.

See also alternative functions print and beginWriteRaw. For writing object field names, use objectField instead.

Parameters

self: *Self

Source Code

Source code
pub fn write(self: *Self, value: anytype) Error!void {
    if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
    const T = @TypeOf(value);
    switch (@typeInfo(T)) {
        .int => {
            try self.valueStart();
            if (self.options.emit_nonportable_numbers_as_strings and
                (value <= -(1 << 53) or value >= (1 << 53)))
            {
                try self.stream.print("\"{}\"", .{value});
            } else {
                try self.stream.print("{}", .{value});
            }
            self.valueDone();
            return;
        },
        .comptime_int => {
            return self.write(@as(std.math.IntFittingRange(value, value), value));
        },
        .float, .comptime_float => {
            if (@as(f64, @floatCast(value)) == value) {
                try self.valueStart();
                try self.stream.print("{}", .{@as(f64, @floatCast(value))});
                self.valueDone();
                return;
            }
            try self.valueStart();
            try self.stream.print("\"{}\"", .{value});
            self.valueDone();
            return;
        },

        .bool => {
            try self.valueStart();
            try self.stream.writeAll(if (value) "true" else "false");
            self.valueDone();
            return;
        },
        .null => {
            try self.valueStart();
            try self.stream.writeAll("null");
            self.valueDone();
            return;
        },
        .optional => {
            if (value) |payload| {
                return try self.write(payload);
            } else {
                return try self.write(null);
            }
        },
        .@"enum" => |enum_info| {
            if (std.meta.hasFn(T, "jsonStringify")) {
                return value.jsonStringify(self);
            }

            if (!enum_info.is_exhaustive) {
                inline for (enum_info.fields) |field| {
                    if (value == @field(T, field.name)) {
                        break;
                    }
                } else {
                    return self.write(@intFromEnum(value));
                }
            }

            return self.stringValue(@tagName(value));
        },
        .enum_literal => {
            return self.stringValue(@tagName(value));
        },
        .@"union" => {
            if (std.meta.hasFn(T, "jsonStringify")) {
                return value.jsonStringify(self);
            }

            const info = @typeInfo(T).@"union";
            if (info.tag_type) |UnionTagType| {
                try self.beginObject();
                inline for (info.fields) |u_field| {
                    if (value == @field(UnionTagType, u_field.name)) {
                        try self.objectField(u_field.name);
                        if (u_field.type == void) {
                            // void value is {}
                            try self.beginObject();
                            try self.endObject();
                        } else {
                            try self.write(@field(value, u_field.name));
                        }
                        break;
                    }
                } else {
                    unreachable; // No active tag?
                }
                try self.endObject();
                return;
            } else {
                @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
            }
        },
        .@"struct" => |S| {
            if (std.meta.hasFn(T, "jsonStringify")) {
                return value.jsonStringify(self);
            }

            if (S.is_tuple) {
                try self.beginArray();
            } else {
                try self.beginObject();
            }
            inline for (S.fields) |Field| {
                // don't include void fields
                if (Field.type == void) continue;

                var emit_field = true;

                // don't include optional fields that are null when emit_null_optional_fields is set to false
                if (@typeInfo(Field.type) == .optional) {
                    if (self.options.emit_null_optional_fields == false) {
                        if (@field(value, Field.name) == null) {
                            emit_field = false;
                        }
                    }
                }

                if (emit_field) {
                    if (!S.is_tuple) {
                        try self.objectField(Field.name);
                    }
                    try self.write(@field(value, Field.name));
                }
            }
            if (S.is_tuple) {
                try self.endArray();
            } else {
                try self.endObject();
            }
            return;
        },
        .error_set => return self.stringValue(@errorName(value)),
        .pointer => |ptr_info| switch (ptr_info.size) {
            .one => switch (@typeInfo(ptr_info.child)) {
                .array => {
                    // Coerce `*[N]T` to `[]const T`.
                    const Slice = []const std.meta.Elem(ptr_info.child);
                    return self.write(@as(Slice, value));
                },
                else => {
                    return self.write(value.*);
                },
            },
            .many, .slice => {
                if (ptr_info.size == .many and ptr_info.sentinel() == null)
                    @compileError("unable to stringify type '" ++ @typeName(T) ++ "' without sentinel");
                const slice = if (ptr_info.size == .many) std.mem.span(value) else value;

                if (ptr_info.child == u8) {
                    // This is a []const u8, or some similar Zig string.
                    if (!self.options.emit_strings_as_arrays and std.unicode.utf8ValidateSlice(slice)) {
                        return self.stringValue(slice);
                    }
                }

                try self.beginArray();
                for (slice) |x| {
                    try self.write(x);
                }
                try self.endArray();
                return;
            },
            else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
        },
        .array => {
            // Coerce `[N]T` to `*const [N]T` (and then to `[]const T`).
            return self.write(&value);
        },
        .vector => |info| {
            const array: [info.len]info.child = value;
            return self.write(&array);
        },
        else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
    }
    unreachable;
}

Source Code

Source code
pub fn WriteStream(
    comptime OutStream: type,
    comptime safety_checks_hint: union(enum) {
        checked_to_arbitrary_depth,
        checked_to_fixed_depth: usize, // Rounded up to the nearest multiple of 8.
        assumed_correct,
    },
) type {
    return struct {
        const Self = @This();
        const build_mode_has_safety = switch (@import("builtin").mode) {
            .Debug, .ReleaseSafe => true,
            .ReleaseFast, .ReleaseSmall => false,
        };
        const safety_checks: @TypeOf(safety_checks_hint) = if (build_mode_has_safety)
            safety_checks_hint
        else
            .assumed_correct;

        pub const Stream = OutStream;
        pub const Error = switch (safety_checks) {
            .checked_to_arbitrary_depth => Stream.Error || error{OutOfMemory},
            .checked_to_fixed_depth, .assumed_correct => Stream.Error,
        };

        options: StringifyOptions,

        stream: OutStream,
        indent_level: usize = 0,
        next_punctuation: enum {
            the_beginning,
            none,
            comma,
            colon,
        } = .the_beginning,

        nesting_stack: switch (safety_checks) {
            .checked_to_arbitrary_depth => BitStack,
            .checked_to_fixed_depth => |fixed_buffer_size| [(fixed_buffer_size + 7) >> 3]u8,
            .assumed_correct => void,
        },

        raw_streaming_mode: if (build_mode_has_safety)
            enum { none, value, objectField }
        else
            void = if (build_mode_has_safety) .none else {},

        pub fn init(safety_allocator: Allocator, stream: OutStream, options: StringifyOptions) Self {
            return .{
                .options = options,
                .stream = stream,
                .nesting_stack = switch (safety_checks) {
                    .checked_to_arbitrary_depth => BitStack.init(safety_allocator),
                    .checked_to_fixed_depth => |fixed_buffer_size| [_]u8{0} ** ((fixed_buffer_size + 7) >> 3),
                    .assumed_correct => {},
                },
            };
        }

        /// Only necessary with .checked_to_arbitrary_depth.
        pub fn deinit(self: *Self) void {
            switch (safety_checks) {
                .checked_to_arbitrary_depth => self.nesting_stack.deinit(),
                .checked_to_fixed_depth, .assumed_correct => {},
            }
            self.* = undefined;
        }

        pub fn beginArray(self: *Self) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            try self.valueStart();
            try self.stream.writeByte('[');
            try self.pushIndentation(ARRAY_MODE);
            self.next_punctuation = .none;
        }

        pub fn beginObject(self: *Self) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            try self.valueStart();
            try self.stream.writeByte('{');
            try self.pushIndentation(OBJECT_MODE);
            self.next_punctuation = .none;
        }

        pub fn endArray(self: *Self) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            self.popIndentation(ARRAY_MODE);
            switch (self.next_punctuation) {
                .none => {},
                .comma => {
                    try self.indent();
                },
                .the_beginning, .colon => unreachable,
            }
            try self.stream.writeByte(']');
            self.valueDone();
        }

        pub fn endObject(self: *Self) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            self.popIndentation(OBJECT_MODE);
            switch (self.next_punctuation) {
                .none => {},
                .comma => {
                    try self.indent();
                },
                .the_beginning, .colon => unreachable,
            }
            try self.stream.writeByte('}');
            self.valueDone();
        }

        fn pushIndentation(self: *Self, mode: u1) !void {
            switch (safety_checks) {
                .checked_to_arbitrary_depth => {
                    try self.nesting_stack.push(mode);
                    self.indent_level += 1;
                },
                .checked_to_fixed_depth => {
                    BitStack.pushWithStateAssumeCapacity(&self.nesting_stack, &self.indent_level, mode);
                },
                .assumed_correct => {
                    self.indent_level += 1;
                },
            }
        }
        fn popIndentation(self: *Self, assert_its_this_one: u1) void {
            switch (safety_checks) {
                .checked_to_arbitrary_depth => {
                    assert(self.nesting_stack.pop() == assert_its_this_one);
                    self.indent_level -= 1;
                },
                .checked_to_fixed_depth => {
                    assert(BitStack.popWithState(&self.nesting_stack, &self.indent_level) == assert_its_this_one);
                },
                .assumed_correct => {
                    self.indent_level -= 1;
                },
            }
        }

        fn indent(self: *Self) !void {
            var char: u8 = ' ';
            const n_chars = switch (self.options.whitespace) {
                .minified => return,
                .indent_1 => 1 * self.indent_level,
                .indent_2 => 2 * self.indent_level,
                .indent_3 => 3 * self.indent_level,
                .indent_4 => 4 * self.indent_level,
                .indent_8 => 8 * self.indent_level,
                .indent_tab => blk: {
                    char = '\t';
                    break :blk self.indent_level;
                },
            };
            try self.stream.writeByte('\n');
            try self.stream.writeByteNTimes(char, n_chars);
        }

        fn valueStart(self: *Self) !void {
            if (self.isObjectKeyExpected()) |is_it| assert(!is_it); // Call objectField*(), not write(), for object keys.
            return self.valueStartAssumeTypeOk();
        }
        fn objectFieldStart(self: *Self) !void {
            if (self.isObjectKeyExpected()) |is_it| assert(is_it); // Expected write(), not objectField*().
            return self.valueStartAssumeTypeOk();
        }
        fn valueStartAssumeTypeOk(self: *Self) !void {
            assert(!self.isComplete()); // JSON document already complete.
            switch (self.next_punctuation) {
                .the_beginning => {
                    // No indentation for the very beginning.
                },
                .none => {
                    // First item in a container.
                    try self.indent();
                },
                .comma => {
                    // Subsequent item in a container.
                    try self.stream.writeByte(',');
                    try self.indent();
                },
                .colon => {
                    try self.stream.writeByte(':');
                    if (self.options.whitespace != .minified) {
                        try self.stream.writeByte(' ');
                    }
                },
            }
        }
        fn valueDone(self: *Self) void {
            self.next_punctuation = .comma;
        }

        // Only when safety is enabled:
        fn isObjectKeyExpected(self: *const Self) ?bool {
            switch (safety_checks) {
                .checked_to_arbitrary_depth => return self.indent_level > 0 and
                    self.nesting_stack.peek() == OBJECT_MODE and
                    self.next_punctuation != .colon,
                .checked_to_fixed_depth => return self.indent_level > 0 and
                    BitStack.peekWithState(&self.nesting_stack, self.indent_level) == OBJECT_MODE and
                    self.next_punctuation != .colon,
                .assumed_correct => return null,
            }
        }
        fn isComplete(self: *const Self) bool {
            return self.indent_level == 0 and self.next_punctuation == .comma;
        }

        /// An alternative to calling `write` that formats a value with `std.fmt`.
        /// This function does the usual punctuation and indentation formatting
        /// assuming the resulting formatted string represents a single complete value;
        /// e.g. `"1"`, `"[]"`, `"[1,2]"`, not `"1,2"`.
        /// This function may be useful for doing your own number formatting.
        pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            try self.valueStart();
            try self.stream.print(fmt, args);
            self.valueDone();
        }

        /// An alternative to calling `write` that allows you to write directly to the `.stream` field, e.g. with `.stream.writeAll()`.
        /// Call `beginWriteRaw()`, then write a complete value (including any quotes if necessary) directly to the `.stream` field,
        /// then call `endWriteRaw()`.
        /// This can be useful for streaming very long strings into the output without needing it all buffered in memory.
        pub fn beginWriteRaw(self: *Self) !void {
            if (build_mode_has_safety) {
                assert(self.raw_streaming_mode == .none);
                self.raw_streaming_mode = .value;
            }
            try self.valueStart();
        }

        /// See `beginWriteRaw`.
        pub fn endWriteRaw(self: *Self) void {
            if (build_mode_has_safety) {
                assert(self.raw_streaming_mode == .value);
                self.raw_streaming_mode = .none;
            }
            self.valueDone();
        }

        /// See `WriteStream` for when to call this method.
        /// `key` is the string content of the property name.
        /// Surrounding quotes will be added and any special characters will be escaped.
        /// See also `objectFieldRaw`.
        pub fn objectField(self: *Self, key: []const u8) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            try self.objectFieldStart();
            try encodeJsonString(key, self.options, self.stream);
            self.next_punctuation = .colon;
        }
        /// See `WriteStream` for when to call this method.
        /// `quoted_key` is the complete bytes of the key including quotes and any necessary escape sequences.
        /// A few assertions are performed on the given value to ensure that the caller of this function understands the API contract.
        /// See also `objectField`.
        pub fn objectFieldRaw(self: *Self, quoted_key: []const u8) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            assert(quoted_key.len >= 2 and quoted_key[0] == '"' and quoted_key[quoted_key.len - 1] == '"'); // quoted_key should be "quoted".
            try self.objectFieldStart();
            try self.stream.writeAll(quoted_key);
            self.next_punctuation = .colon;
        }

        /// In the rare case that you need to write very long object field names,
        /// this is an alternative to `objectField` and `objectFieldRaw` that allows you to write directly to the `.stream` field
        /// similar to `beginWriteRaw`.
        /// Call `endObjectFieldRaw()` when you're done.
        pub fn beginObjectFieldRaw(self: *Self) !void {
            if (build_mode_has_safety) {
                assert(self.raw_streaming_mode == .none);
                self.raw_streaming_mode = .objectField;
            }
            try self.objectFieldStart();
        }

        /// See `beginObjectFieldRaw`.
        pub fn endObjectFieldRaw(self: *Self) void {
            if (build_mode_has_safety) {
                assert(self.raw_streaming_mode == .objectField);
                self.raw_streaming_mode = .none;
            }
            self.next_punctuation = .colon;
        }

        /// Renders the given Zig value as JSON.
        ///
        /// Supported types:
        ///  * Zig `bool` -> JSON `true` or `false`.
        ///  * Zig `?T` -> `null` or the rendering of `T`.
        ///  * Zig `i32`, `u64`, etc. -> JSON number or string.
        ///      * When option `emit_nonportable_numbers_as_strings` is true, if the value is outside the range `+-1<<53` (the precise integer range of f64), it is rendered as a JSON string in base 10. Otherwise, it is rendered as JSON number.
        ///  * Zig floats -> JSON number or string.
        ///      * If the value cannot be precisely represented by an f64, it is rendered as a JSON string. Otherwise, it is rendered as JSON number.
        ///      * TODO: Float rendering will likely change in the future, e.g. to remove the unnecessary "e+00".
        ///  * Zig `[]const u8`, `[]u8`, `*[N]u8`, `@Vector(N, u8)`, and similar -> JSON string.
        ///      * See `StringifyOptions.emit_strings_as_arrays`.
        ///      * If the content is not valid UTF-8, rendered as an array of numbers instead.
        ///  * Zig `[]T`, `[N]T`, `*[N]T`, `@Vector(N, T)`, and similar -> JSON array of the rendering of each item.
        ///  * Zig tuple -> JSON array of the rendering of each item.
        ///  * Zig `struct` -> JSON object with each field in declaration order.
        ///      * If the struct declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `WriteStream`. See `std.json.Value` for an example.
        ///      * See `StringifyOptions.emit_null_optional_fields`.
        ///  * Zig `union(enum)` -> JSON object with one field named for the active tag and a value representing the payload.
        ///      * If the payload is `void`, then the emitted value is `{}`.
        ///      * If the union declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `WriteStream`.
        ///  * Zig `enum` -> JSON string naming the active tag.
        ///      * If the enum declares a method `pub fn jsonStringify(self: *@This(), jw: anytype) !void`, it is called to do the serialization instead of the default behavior. The given `jw` is a pointer to this `WriteStream`.
        ///      * If the enum is non-exhaustive, unnamed values are rendered as integers.
        ///  * Zig untyped enum literal -> JSON string naming the active tag.
        ///  * Zig error -> JSON string naming the error.
        ///  * Zig `*T` -> the rendering of `T`. Note there is no guard against circular-reference infinite recursion.
        ///
        /// See also alternative functions `print` and `beginWriteRaw`.
        /// For writing object field names, use `objectField` instead.
        pub fn write(self: *Self, value: anytype) Error!void {
            if (build_mode_has_safety) assert(self.raw_streaming_mode == .none);
            const T = @TypeOf(value);
            switch (@typeInfo(T)) {
                .int => {
                    try self.valueStart();
                    if (self.options.emit_nonportable_numbers_as_strings and
                        (value <= -(1 << 53) or value >= (1 << 53)))
                    {
                        try self.stream.print("\"{}\"", .{value});
                    } else {
                        try self.stream.print("{}", .{value});
                    }
                    self.valueDone();
                    return;
                },
                .comptime_int => {
                    return self.write(@as(std.math.IntFittingRange(value, value), value));
                },
                .float, .comptime_float => {
                    if (@as(f64, @floatCast(value)) == value) {
                        try self.valueStart();
                        try self.stream.print("{}", .{@as(f64, @floatCast(value))});
                        self.valueDone();
                        return;
                    }
                    try self.valueStart();
                    try self.stream.print("\"{}\"", .{value});
                    self.valueDone();
                    return;
                },

                .bool => {
                    try self.valueStart();
                    try self.stream.writeAll(if (value) "true" else "false");
                    self.valueDone();
                    return;
                },
                .null => {
                    try self.valueStart();
                    try self.stream.writeAll("null");
                    self.valueDone();
                    return;
                },
                .optional => {
                    if (value) |payload| {
                        return try self.write(payload);
                    } else {
                        return try self.write(null);
                    }
                },
                .@"enum" => |enum_info| {
                    if (std.meta.hasFn(T, "jsonStringify")) {
                        return value.jsonStringify(self);
                    }

                    if (!enum_info.is_exhaustive) {
                        inline for (enum_info.fields) |field| {
                            if (value == @field(T, field.name)) {
                                break;
                            }
                        } else {
                            return self.write(@intFromEnum(value));
                        }
                    }

                    return self.stringValue(@tagName(value));
                },
                .enum_literal => {
                    return self.stringValue(@tagName(value));
                },
                .@"union" => {
                    if (std.meta.hasFn(T, "jsonStringify")) {
                        return value.jsonStringify(self);
                    }

                    const info = @typeInfo(T).@"union";
                    if (info.tag_type) |UnionTagType| {
                        try self.beginObject();
                        inline for (info.fields) |u_field| {
                            if (value == @field(UnionTagType, u_field.name)) {
                                try self.objectField(u_field.name);
                                if (u_field.type == void) {
                                    // void value is {}
                                    try self.beginObject();
                                    try self.endObject();
                                } else {
                                    try self.write(@field(value, u_field.name));
                                }
                                break;
                            }
                        } else {
                            unreachable; // No active tag?
                        }
                        try self.endObject();
                        return;
                    } else {
                        @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
                    }
                },
                .@"struct" => |S| {
                    if (std.meta.hasFn(T, "jsonStringify")) {
                        return value.jsonStringify(self);
                    }

                    if (S.is_tuple) {
                        try self.beginArray();
                    } else {
                        try self.beginObject();
                    }
                    inline for (S.fields) |Field| {
                        // don't include void fields
                        if (Field.type == void) continue;

                        var emit_field = true;

                        // don't include optional fields that are null when emit_null_optional_fields is set to false
                        if (@typeInfo(Field.type) == .optional) {
                            if (self.options.emit_null_optional_fields == false) {
                                if (@field(value, Field.name) == null) {
                                    emit_field = false;
                                }
                            }
                        }

                        if (emit_field) {
                            if (!S.is_tuple) {
                                try self.objectField(Field.name);
                            }
                            try self.write(@field(value, Field.name));
                        }
                    }
                    if (S.is_tuple) {
                        try self.endArray();
                    } else {
                        try self.endObject();
                    }
                    return;
                },
                .error_set => return self.stringValue(@errorName(value)),
                .pointer => |ptr_info| switch (ptr_info.size) {
                    .one => switch (@typeInfo(ptr_info.child)) {
                        .array => {
                            // Coerce `*[N]T` to `[]const T`.
                            const Slice = []const std.meta.Elem(ptr_info.child);
                            return self.write(@as(Slice, value));
                        },
                        else => {
                            return self.write(value.*);
                        },
                    },
                    .many, .slice => {
                        if (ptr_info.size == .many and ptr_info.sentinel() == null)
                            @compileError("unable to stringify type '" ++ @typeName(T) ++ "' without sentinel");
                        const slice = if (ptr_info.size == .many) std.mem.span(value) else value;

                        if (ptr_info.child == u8) {
                            // This is a []const u8, or some similar Zig string.
                            if (!self.options.emit_strings_as_arrays and std.unicode.utf8ValidateSlice(slice)) {
                                return self.stringValue(slice);
                            }
                        }

                        try self.beginArray();
                        for (slice) |x| {
                            try self.write(x);
                        }
                        try self.endArray();
                        return;
                    },
                    else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
                },
                .array => {
                    // Coerce `[N]T` to `*const [N]T` (and then to `[]const T`).
                    return self.write(&value);
                },
                .vector => |info| {
                    const array: [info.len]info.child = value;
                    return self.write(&array);
                },
                else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
            }
            unreachable;
        }

        fn stringValue(self: *Self, s: []const u8) !void {
            try self.valueStart();
            try encodeJsonString(s, self.options, self.stream);
            self.valueDone();
        }
    };
}

Type FunctionFormatter[src]

Formats the given value using stringify.

Parameters

T: type

Fields

value: T

Functions

Functionformat[src]

pub fn format( self: @This(), comptime fmt_spec: []const u8, options: std.fmt.FormatOptions, writer: anytype, ) !void

Parameters

self: @This()
fmt_spec: []const u8

Source Code

Source code
pub fn format(
    self: @This(),
    comptime fmt_spec: []const u8,
    options: std.fmt.FormatOptions,
    writer: anytype,
) !void {
    _ = fmt_spec;
    _ = options;
    try stringify(self.value, self.options, writer);
}

Source Code

Source code
pub fn Formatter(comptime T: type) type {
    return struct {
        value: T,
        options: StringifyOptions,

        pub fn format(
            self: @This(),
            comptime fmt_spec: []const u8,
            options: std.fmt.FormatOptions,
            writer: anytype,
        ) !void {
            _ = fmt_spec;
            _ = options;
            try stringify(self.value, self.options, writer);
        }
    };
}

Values

Constantdefault_buffer_size[src]

Used by json.reader.

Source Code

Source code
pub const default_buffer_size = 0x1000

Constantdefault_max_value_len[src]

For security, the maximum size allocated to store a single string or number value is limited to 4MiB by default. This limit can be specified by calling nextAllocMax() instead of nextAlloc().

Source Code

Source code
pub const default_max_value_len = 4 * 1024 * 1024

Error Sets

Error SetError[src]

The parsing errors are divided into two categories:

  • SyntaxError is for clearly malformed JSON documents, such as giving an input document that isn't JSON at all.
  • UnexpectedEndOfInput is for signaling that everything's been valid so far, but the input appears to be truncated for some reason. Note that a completely empty (or whitespace-only) input will give UnexpectedEndOfInput.

Errors

anyerror means the error set is known only at runtime.

SyntaxError
UnexpectedEndOfInput

Source Code

Source code
pub const Error = error{ SyntaxError, UnexpectedEndOfInput }

Error SetParseFromValueError[src]

Errors

anyerror means the error set is known only at runtime.

DuplicateField
InvalidCharacter ParseIntError

The input was empty or contained an invalid character

InvalidEnumTag
InvalidNumber
LengthMismatch
MissingField
OutOfMemory Error
Overflow ParseIntError

The result cannot fit in the type specified

UnexpectedToken
UnknownField

Source Code

Source code
pub const ParseFromValueError = std.fmt.ParseIntError || std.fmt.ParseFloatError || Allocator.Error || error{
    UnexpectedToken,
    InvalidNumber,
    Overflow,
    InvalidEnumTag,
    DuplicateField,
    UnknownField,
    MissingField,
    LengthMismatch,
}

Functions

Functionvalidate[src]

pub fn validate(allocator: Allocator, s: []const u8) Allocator.Error!bool

Scan the input and check for malformed JSON. On SyntaxError or UnexpectedEndOfInput, returns false. Returns any errors from the allocator as-is, which is unlikely, but can be caused by extreme nesting depth in the input.

Parameters

allocator: Allocator
s: []const u8

Source Code

Source code
pub fn validate(allocator: Allocator, s: []const u8) Allocator.Error!bool {
    var scanner = Scanner.initCompleteInput(allocator, s);
    defer scanner.deinit();

    while (true) {
        const token = scanner.next() catch |err| switch (err) {
            error.SyntaxError, error.UnexpectedEndOfInput => return false,
            error.OutOfMemory => return error.OutOfMemory,
            error.BufferUnderrun => unreachable,
        };
        if (token == .end_of_document) break;
    }

    return true;
}

Functionreader[src]

pub fn reader(allocator: Allocator, io_reader: anytype) Reader(default_buffer_size, @TypeOf(io_reader))

Parameters

allocator: Allocator

Source Code

Source code
pub fn reader(allocator: Allocator, io_reader: anytype) Reader(default_buffer_size, @TypeOf(io_reader)) {
    return Reader(default_buffer_size, @TypeOf(io_reader)).init(allocator, io_reader);
}

FunctionisNumberFormattedLikeAnInteger[src]

pub fn isNumberFormattedLikeAnInteger(value: []const u8) bool

For the slice you get from a Token.number or Token.allocated_number, this function returns true if the number doesn't contain any fraction or exponent components, and is not -0. Note, the numeric value encoded by the value may still be an integer, such as 1.0. This function is meant to give a hint about whether integer parsing or float parsing should be used on the value. This function will not give meaningful results on non-numeric input.

Parameters

value: []const u8

Source Code

Source code
pub fn isNumberFormattedLikeAnInteger(value: []const u8) bool {
    if (std.mem.eql(u8, value, "-0")) return false;
    return std.mem.indexOfAny(u8, value, ".eE") == null;
}

FunctionparseFromSlice[src]

pub fn parseFromSlice( comptime T: type, allocator: Allocator, s: []const u8, options: ParseOptions, ) ParseError(Scanner)!Parsed(T)

Parses the json document from s and returns the result packaged in a std.json.Parsed. You must call deinit() of the returned object to clean up allocated resources. If you are using a std.heap.ArenaAllocator or similar, consider calling parseFromSliceLeaky instead. Note that error.BufferUnderrun is not actually possible to return from this function.

Parameters

T: type
allocator: Allocator
s: []const u8
options: ParseOptions

Source Code

Source code
pub fn parseFromSlice(
    comptime T: type,
    allocator: Allocator,
    s: []const u8,
    options: ParseOptions,
) ParseError(Scanner)!Parsed(T) {
    var scanner = Scanner.initCompleteInput(allocator, s);
    defer scanner.deinit();

    return parseFromTokenSource(T, allocator, &scanner, options);
}

FunctionparseFromSliceLeaky[src]

pub fn parseFromSliceLeaky( comptime T: type, allocator: Allocator, s: []const u8, options: ParseOptions, ) ParseError(Scanner)!T

Parses the json document from s and returns the result. Allocations made during this operation are not carefully tracked and may not be possible to individually clean up. It is recommended to use a std.heap.ArenaAllocator or similar.

Parameters

T: type
allocator: Allocator
s: []const u8
options: ParseOptions

Source Code

Source code
pub fn parseFromSliceLeaky(
    comptime T: type,
    allocator: Allocator,
    s: []const u8,
    options: ParseOptions,
) ParseError(Scanner)!T {
    var scanner = Scanner.initCompleteInput(allocator, s);
    defer scanner.deinit();

    return parseFromTokenSourceLeaky(T, allocator, &scanner, options);
}

FunctionparseFromTokenSource[src]

pub fn parseFromTokenSource( comptime T: type, allocator: Allocator, scanner_or_reader: anytype, options: ParseOptions, ) ParseError(@TypeOf(scanner_or_reader.*))!Parsed(T)

scanner_or_reader must be either a *std.json.Scanner with complete input or a *std.json.Reader. Note that error.BufferUnderrun is not actually possible to return from this function.

Parameters

T: type
allocator: Allocator
options: ParseOptions

Source Code

Source code
pub fn parseFromTokenSource(
    comptime T: type,
    allocator: Allocator,
    scanner_or_reader: anytype,
    options: ParseOptions,
) ParseError(@TypeOf(scanner_or_reader.*))!Parsed(T) {
    var parsed = Parsed(T){
        .arena = try allocator.create(ArenaAllocator),
        .value = undefined,
    };
    errdefer allocator.destroy(parsed.arena);
    parsed.arena.* = ArenaAllocator.init(allocator);
    errdefer parsed.arena.deinit();

    parsed.value = try parseFromTokenSourceLeaky(T, parsed.arena.allocator(), scanner_or_reader, options);

    return parsed;
}

FunctionparseFromTokenSourceLeaky[src]

pub fn parseFromTokenSourceLeaky( comptime T: type, allocator: Allocator, scanner_or_reader: anytype, options: ParseOptions, ) ParseError(@TypeOf(scanner_or_reader.*))!T

scanner_or_reader must be either a *std.json.Scanner with complete input or a *std.json.Reader. Allocations made during this operation are not carefully tracked and may not be possible to individually clean up. It is recommended to use a std.heap.ArenaAllocator or similar.

Parameters

T: type
allocator: Allocator
options: ParseOptions

Source Code

Source code
pub fn parseFromTokenSourceLeaky(
    comptime T: type,
    allocator: Allocator,
    scanner_or_reader: anytype,
    options: ParseOptions,
) ParseError(@TypeOf(scanner_or_reader.*))!T {
    if (@TypeOf(scanner_or_reader.*) == Scanner) {
        assert(scanner_or_reader.is_end_of_input);
    }
    var resolved_options = options;
    if (resolved_options.max_value_len == null) {
        if (@TypeOf(scanner_or_reader.*) == Scanner) {
            resolved_options.max_value_len = scanner_or_reader.input.len;
        } else {
            resolved_options.max_value_len = default_max_value_len;
        }
    }
    if (resolved_options.allocate == null) {
        if (@TypeOf(scanner_or_reader.*) == Scanner) {
            resolved_options.allocate = .alloc_if_needed;
        } else {
            resolved_options.allocate = .alloc_always;
        }
    }

    const value = try innerParse(T, allocator, scanner_or_reader, resolved_options);

    assert(.end_of_document == try scanner_or_reader.next());

    return value;
}

FunctioninnerParse[src]

pub fn innerParse( comptime T: type, allocator: Allocator, source: anytype, options: ParseOptions, ) ParseError(@TypeOf(source.*))!T

This is an internal function called recursively during the implementation of parseFromTokenSourceLeaky and similar. It is exposed primarily to enable custom jsonParse() methods to call back into the parseFrom* system, such as if you're implementing a custom container of type T; you can call innerParse(T, ...) for each of the container's items. Note that null fields are not allowed on the options when calling this function. (The options you get in your jsonParse method has no null fields.)

Parameters

T: type
allocator: Allocator
options: ParseOptions

Source Code

Source code
pub fn innerParse(
    comptime T: type,
    allocator: Allocator,
    source: anytype,
    options: ParseOptions,
) ParseError(@TypeOf(source.*))!T {
    switch (@typeInfo(T)) {
        .bool => {
            return switch (try source.next()) {
                .true => true,
                .false => false,
                else => error.UnexpectedToken,
            };
        },
        .float, .comptime_float => {
            const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
            defer freeAllocated(allocator, token);
            const slice = switch (token) {
                inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
                else => return error.UnexpectedToken,
            };
            return try std.fmt.parseFloat(T, slice);
        },
        .int, .comptime_int => {
            const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
            defer freeAllocated(allocator, token);
            const slice = switch (token) {
                inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
                else => return error.UnexpectedToken,
            };
            return sliceToInt(T, slice);
        },
        .optional => |optionalInfo| {
            switch (try source.peekNextTokenType()) {
                .null => {
                    _ = try source.next();
                    return null;
                },
                else => {
                    return try innerParse(optionalInfo.child, allocator, source, options);
                },
            }
        },
        .@"enum" => {
            if (std.meta.hasFn(T, "jsonParse")) {
                return T.jsonParse(allocator, source, options);
            }

            const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
            defer freeAllocated(allocator, token);
            const slice = switch (token) {
                inline .number, .allocated_number, .string, .allocated_string => |slice| slice,
                else => return error.UnexpectedToken,
            };
            return sliceToEnum(T, slice);
        },
        .@"union" => |unionInfo| {
            if (std.meta.hasFn(T, "jsonParse")) {
                return T.jsonParse(allocator, source, options);
            }

            if (unionInfo.tag_type == null) @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");

            if (.object_begin != try source.next()) return error.UnexpectedToken;

            var result: ?T = null;
            var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
            const field_name = switch (name_token.?) {
                inline .string, .allocated_string => |slice| slice,
                else => {
                    return error.UnexpectedToken;
                },
            };

            inline for (unionInfo.fields) |u_field| {
                if (std.mem.eql(u8, u_field.name, field_name)) {
                    // Free the name token now in case we're using an allocator that optimizes freeing the last allocated object.
                    // (Recursing into innerParse() might trigger more allocations.)
                    freeAllocated(allocator, name_token.?);
                    name_token = null;
                    if (u_field.type == void) {
                        // void isn't really a json type, but we can support void payload union tags with {} as a value.
                        if (.object_begin != try source.next()) return error.UnexpectedToken;
                        if (.object_end != try source.next()) return error.UnexpectedToken;
                        result = @unionInit(T, u_field.name, {});
                    } else {
                        // Recurse.
                        result = @unionInit(T, u_field.name, try innerParse(u_field.type, allocator, source, options));
                    }
                    break;
                }
            } else {
                // Didn't match anything.
                return error.UnknownField;
            }

            if (.object_end != try source.next()) return error.UnexpectedToken;

            return result.?;
        },

        .@"struct" => |structInfo| {
            if (structInfo.is_tuple) {
                if (.array_begin != try source.next()) return error.UnexpectedToken;

                var r: T = undefined;
                inline for (0..structInfo.fields.len) |i| {
                    r[i] = try innerParse(structInfo.fields[i].type, allocator, source, options);
                }

                if (.array_end != try source.next()) return error.UnexpectedToken;

                return r;
            }

            if (std.meta.hasFn(T, "jsonParse")) {
                return T.jsonParse(allocator, source, options);
            }

            if (.object_begin != try source.next()) return error.UnexpectedToken;

            var r: T = undefined;
            var fields_seen = [_]bool{false} ** structInfo.fields.len;

            while (true) {
                var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?);
                const field_name = switch (name_token.?) {
                    inline .string, .allocated_string => |slice| slice,
                    .object_end => { // No more fields.
                        break;
                    },
                    else => {
                        return error.UnexpectedToken;
                    },
                };

                inline for (structInfo.fields, 0..) |field, i| {
                    if (field.is_comptime) @compileError("comptime fields are not supported: " ++ @typeName(T) ++ "." ++ field.name);
                    if (std.mem.eql(u8, field.name, field_name)) {
                        // Free the name token now in case we're using an allocator that optimizes freeing the last allocated object.
                        // (Recursing into innerParse() might trigger more allocations.)
                        freeAllocated(allocator, name_token.?);
                        name_token = null;
                        if (fields_seen[i]) {
                            switch (options.duplicate_field_behavior) {
                                .use_first => {
                                    // Parse and ignore the redundant value.
                                    // We don't want to skip the value, because we want type checking.
                                    _ = try innerParse(field.type, allocator, source, options);
                                    break;
                                },
                                .@"error" => return error.DuplicateField,
                                .use_last => {},
                            }
                        }
                        @field(r, field.name) = try innerParse(field.type, allocator, source, options);
                        fields_seen[i] = true;
                        break;
                    }
                } else {
                    // Didn't match anything.
                    freeAllocated(allocator, name_token.?);
                    if (options.ignore_unknown_fields) {
                        try source.skipValue();
                    } else {
                        return error.UnknownField;
                    }
                }
            }
            try fillDefaultStructValues(T, &r, &fields_seen);
            return r;
        },

        .array => |arrayInfo| {
            switch (try source.peekNextTokenType()) {
                .array_begin => {
                    // Typical array.
                    return internalParseArray(T, arrayInfo.child, arrayInfo.len, allocator, source, options);
                },
                .string => {
                    if (arrayInfo.child != u8) return error.UnexpectedToken;
                    // Fixed-length string.

                    var r: T = undefined;
                    var i: usize = 0;
                    while (true) {
                        switch (try source.next()) {
                            .string => |slice| {
                                if (i + slice.len != r.len) return error.LengthMismatch;
                                @memcpy(r[i..][0..slice.len], slice);
                                break;
                            },
                            .partial_string => |slice| {
                                if (i + slice.len > r.len) return error.LengthMismatch;
                                @memcpy(r[i..][0..slice.len], slice);
                                i += slice.len;
                            },
                            .partial_string_escaped_1 => |arr| {
                                if (i + arr.len > r.len) return error.LengthMismatch;
                                @memcpy(r[i..][0..arr.len], arr[0..]);
                                i += arr.len;
                            },
                            .partial_string_escaped_2 => |arr| {
                                if (i + arr.len > r.len) return error.LengthMismatch;
                                @memcpy(r[i..][0..arr.len], arr[0..]);
                                i += arr.len;
                            },
                            .partial_string_escaped_3 => |arr| {
                                if (i + arr.len > r.len) return error.LengthMismatch;
                                @memcpy(r[i..][0..arr.len], arr[0..]);
                                i += arr.len;
                            },
                            .partial_string_escaped_4 => |arr| {
                                if (i + arr.len > r.len) return error.LengthMismatch;
                                @memcpy(r[i..][0..arr.len], arr[0..]);
                                i += arr.len;
                            },
                            else => unreachable,
                        }
                    }

                    return r;
                },

                else => return error.UnexpectedToken,
            }
        },

        .vector => |vecInfo| {
            switch (try source.peekNextTokenType()) {
                .array_begin => {
                    return internalParseArray(T, vecInfo.child, vecInfo.len, allocator, source, options);
                },
                else => return error.UnexpectedToken,
            }
        },

        .pointer => |ptrInfo| {
            switch (ptrInfo.size) {
                .one => {
                    const r: *ptrInfo.child = try allocator.create(ptrInfo.child);
                    r.* = try innerParse(ptrInfo.child, allocator, source, options);
                    return r;
                },
                .slice => {
                    switch (try source.peekNextTokenType()) {
                        .array_begin => {
                            _ = try source.next();

                            // Typical array.
                            var arraylist = ArrayList(ptrInfo.child).init(allocator);
                            while (true) {
                                switch (try source.peekNextTokenType()) {
                                    .array_end => {
                                        _ = try source.next();
                                        break;
                                    },
                                    else => {},
                                }

                                try arraylist.ensureUnusedCapacity(1);
                                arraylist.appendAssumeCapacity(try innerParse(ptrInfo.child, allocator, source, options));
                            }

                            if (ptrInfo.sentinel()) |s| {
                                return try arraylist.toOwnedSliceSentinel(s);
                            }

                            return try arraylist.toOwnedSlice();
                        },
                        .string => {
                            if (ptrInfo.child != u8) return error.UnexpectedToken;

                            // Dynamic length string.
                            if (ptrInfo.sentinel()) |s| {
                                // Use our own array list so we can append the sentinel.
                                var value_list = ArrayList(u8).init(allocator);
                                _ = try source.allocNextIntoArrayList(&value_list, .alloc_always);
                                return try value_list.toOwnedSliceSentinel(s);
                            }
                            if (ptrInfo.is_const) {
                                switch (try source.nextAllocMax(allocator, options.allocate.?, options.max_value_len.?)) {
                                    inline .string, .allocated_string => |slice| return slice,
                                    else => unreachable,
                                }
                            } else {
                                // Have to allocate to get a mutable copy.
                                switch (try source.nextAllocMax(allocator, .alloc_always, options.max_value_len.?)) {
                                    .allocated_string => |slice| return slice,
                                    else => unreachable,
                                }
                            }
                        },
                        else => return error.UnexpectedToken,
                    }
                },
                else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"),
            }
        },
        else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"),
    }
    unreachable;
}

FunctionparseFromValue[src]

pub fn parseFromValue( comptime T: type, allocator: Allocator, source: Value, options: ParseOptions, ) ParseFromValueError!Parsed(T)

Like parseFromSlice, but the input is an already-parsed std.json.Value object. Only options.ignore_unknown_fields is used from options.

Parameters

T: type
allocator: Allocator
source: Value
options: ParseOptions

Source Code

Source code
pub fn parseFromValue(
    comptime T: type,
    allocator: Allocator,
    source: Value,
    options: ParseOptions,
) ParseFromValueError!Parsed(T) {
    var parsed = Parsed(T){
        .arena = try allocator.create(ArenaAllocator),
        .value = undefined,
    };
    errdefer allocator.destroy(parsed.arena);
    parsed.arena.* = ArenaAllocator.init(allocator);
    errdefer parsed.arena.deinit();

    parsed.value = try parseFromValueLeaky(T, parsed.arena.allocator(), source, options);

    return parsed;
}

FunctionparseFromValueLeaky[src]

pub fn parseFromValueLeaky( comptime T: type, allocator: Allocator, source: Value, options: ParseOptions, ) ParseFromValueError!T

Parameters

T: type
allocator: Allocator
source: Value
options: ParseOptions

Source Code

Source code
pub fn parseFromValueLeaky(
    comptime T: type,
    allocator: Allocator,
    source: Value,
    options: ParseOptions,
) ParseFromValueError!T {
    // I guess this function doesn't need to exist,
    // but the flow of the sourcecode is easy to follow and grouped nicely with
    // this pub redirect function near the top and the implementation near the bottom.
    return innerParseFromValue(T, allocator, source, options);
}

FunctioninnerParseFromValue[src]

pub fn innerParseFromValue( comptime T: type, allocator: Allocator, source: Value, options: ParseOptions, ) ParseFromValueError!T

This is an internal function called recursively during the implementation of parseFromValueLeaky. It is exposed primarily to enable custom jsonParseFromValue() methods to call back into the parseFromValue* system, such as if you're implementing a custom container of type T; you can call innerParseFromValue(T, ...) for each of the container's items.

Parameters

T: type
allocator: Allocator
source: Value
options: ParseOptions

Source Code

Source code
pub fn innerParseFromValue(
    comptime T: type,
    allocator: Allocator,
    source: Value,
    options: ParseOptions,
) ParseFromValueError!T {
    switch (@typeInfo(T)) {
        .bool => {
            switch (source) {
                .bool => |b| return b,
                else => return error.UnexpectedToken,
            }
        },
        .float, .comptime_float => {
            switch (source) {
                .float => |f| return @as(T, @floatCast(f)),
                .integer => |i| return @as(T, @floatFromInt(i)),
                .number_string, .string => |s| return std.fmt.parseFloat(T, s),
                else => return error.UnexpectedToken,
            }
        },
        .int, .comptime_int => {
            switch (source) {
                .float => |f| {
                    if (@round(f) != f) return error.InvalidNumber;
                    if (f > std.math.maxInt(T)) return error.Overflow;
                    if (f < std.math.minInt(T)) return error.Overflow;
                    return @as(T, @intFromFloat(f));
                },
                .integer => |i| {
                    if (i > std.math.maxInt(T)) return error.Overflow;
                    if (i < std.math.minInt(T)) return error.Overflow;
                    return @as(T, @intCast(i));
                },
                .number_string, .string => |s| {
                    return sliceToInt(T, s);
                },
                else => return error.UnexpectedToken,
            }
        },
        .optional => |optionalInfo| {
            switch (source) {
                .null => return null,
                else => return try innerParseFromValue(optionalInfo.child, allocator, source, options),
            }
        },
        .@"enum" => {
            if (std.meta.hasFn(T, "jsonParseFromValue")) {
                return T.jsonParseFromValue(allocator, source, options);
            }

            switch (source) {
                .float => return error.InvalidEnumTag,
                .integer => |i| return std.meta.intToEnum(T, i),
                .number_string, .string => |s| return sliceToEnum(T, s),
                else => return error.UnexpectedToken,
            }
        },
        .@"union" => |unionInfo| {
            if (std.meta.hasFn(T, "jsonParseFromValue")) {
                return T.jsonParseFromValue(allocator, source, options);
            }

            if (unionInfo.tag_type == null) @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");

            if (source != .object) return error.UnexpectedToken;
            if (source.object.count() != 1) return error.UnexpectedToken;

            var it = source.object.iterator();
            const kv = it.next().?;
            const field_name = kv.key_ptr.*;

            inline for (unionInfo.fields) |u_field| {
                if (std.mem.eql(u8, u_field.name, field_name)) {
                    if (u_field.type == void) {
                        // void isn't really a json type, but we can support void payload union tags with {} as a value.
                        if (kv.value_ptr.* != .object) return error.UnexpectedToken;
                        if (kv.value_ptr.*.object.count() != 0) return error.UnexpectedToken;
                        return @unionInit(T, u_field.name, {});
                    }
                    // Recurse.
                    return @unionInit(T, u_field.name, try innerParseFromValue(u_field.type, allocator, kv.value_ptr.*, options));
                }
            }
            // Didn't match anything.
            return error.UnknownField;
        },

        .@"struct" => |structInfo| {
            if (structInfo.is_tuple) {
                if (source != .array) return error.UnexpectedToken;
                if (source.array.items.len != structInfo.fields.len) return error.UnexpectedToken;

                var r: T = undefined;
                inline for (0..structInfo.fields.len, source.array.items) |i, item| {
                    r[i] = try innerParseFromValue(structInfo.fields[i].type, allocator, item, options);
                }

                return r;
            }

            if (std.meta.hasFn(T, "jsonParseFromValue")) {
                return T.jsonParseFromValue(allocator, source, options);
            }

            if (source != .object) return error.UnexpectedToken;

            var r: T = undefined;
            var fields_seen = [_]bool{false} ** structInfo.fields.len;

            var it = source.object.iterator();
            while (it.next()) |kv| {
                const field_name = kv.key_ptr.*;

                inline for (structInfo.fields, 0..) |field, i| {
                    if (field.is_comptime) @compileError("comptime fields are not supported: " ++ @typeName(T) ++ "." ++ field.name);
                    if (std.mem.eql(u8, field.name, field_name)) {
                        assert(!fields_seen[i]); // Can't have duplicate keys in a Value.object.
                        @field(r, field.name) = try innerParseFromValue(field.type, allocator, kv.value_ptr.*, options);
                        fields_seen[i] = true;
                        break;
                    }
                } else {
                    // Didn't match anything.
                    if (!options.ignore_unknown_fields) return error.UnknownField;
                }
            }
            try fillDefaultStructValues(T, &r, &fields_seen);
            return r;
        },

        .array => |arrayInfo| {
            switch (source) {
                .array => |array| {
                    // Typical array.
                    return innerParseArrayFromArrayValue(T, arrayInfo.child, arrayInfo.len, allocator, array, options);
                },
                .string => |s| {
                    if (arrayInfo.child != u8) return error.UnexpectedToken;
                    // Fixed-length string.

                    if (s.len != arrayInfo.len) return error.LengthMismatch;

                    var r: T = undefined;
                    @memcpy(r[0..], s);
                    return r;
                },

                else => return error.UnexpectedToken,
            }
        },

        .vector => |vecInfo| {
            switch (source) {
                .array => |array| {
                    return innerParseArrayFromArrayValue(T, vecInfo.child, vecInfo.len, allocator, array, options);
                },
                else => return error.UnexpectedToken,
            }
        },

        .pointer => |ptrInfo| {
            switch (ptrInfo.size) {
                .one => {
                    const r: *ptrInfo.child = try allocator.create(ptrInfo.child);
                    r.* = try innerParseFromValue(ptrInfo.child, allocator, source, options);
                    return r;
                },
                .slice => {
                    switch (source) {
                        .array => |array| {
                            const r = if (ptrInfo.sentinel()) |sentinel|
                                try allocator.allocSentinel(ptrInfo.child, array.items.len, sentinel)
                            else
                                try allocator.alloc(ptrInfo.child, array.items.len);

                            for (array.items, r) |item, *dest| {
                                dest.* = try innerParseFromValue(ptrInfo.child, allocator, item, options);
                            }

                            return r;
                        },
                        .string => |s| {
                            if (ptrInfo.child != u8) return error.UnexpectedToken;
                            // Dynamic length string.

                            const r = if (ptrInfo.sentinel()) |sentinel|
                                try allocator.allocSentinel(ptrInfo.child, s.len, sentinel)
                            else
                                try allocator.alloc(ptrInfo.child, s.len);
                            @memcpy(r[0..], s);

                            return r;
                        },
                        else => return error.UnexpectedToken,
                    }
                },
                else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"),
            }
        },
        else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"),
    }
}

Functionstringify[src]

pub fn stringify( value: anytype, options: StringifyOptions, out_stream: anytype, ) @TypeOf(out_stream).Error!void

Writes the given value to the std.io.Writer stream. See WriteStream for how the given value is serialized into JSON. The maximum nesting depth of the output JSON document is 256. See also stringifyMaxDepth and stringifyArbitraryDepth.

Parameters

Source Code

Source code
pub fn stringify(
    value: anytype,
    options: StringifyOptions,
    out_stream: anytype,
) @TypeOf(out_stream).Error!void {
    var jw = writeStream(out_stream, options);
    defer jw.deinit();
    try jw.write(value);
}

FunctionstringifyMaxDepth[src]

pub fn stringifyMaxDepth( value: anytype, options: StringifyOptions, out_stream: anytype, comptime max_depth: ?usize, ) @TypeOf(out_stream).Error!void

Like stringify with configurable nesting depth. max_depth is rounded up to the nearest multiple of 8. Give null for max_depth to disable some safety checks and allow arbitrary nesting depth. See writeStreamMaxDepth for more info.

Parameters

max_depth: ?usize

Source Code

Source code
pub fn stringifyMaxDepth(
    value: anytype,
    options: StringifyOptions,
    out_stream: anytype,
    comptime max_depth: ?usize,
) @TypeOf(out_stream).Error!void {
    var jw = writeStreamMaxDepth(out_stream, options, max_depth);
    try jw.write(value);
}

FunctionstringifyArbitraryDepth[src]

pub fn stringifyArbitraryDepth( allocator: Allocator, value: anytype, options: StringifyOptions, out_stream: anytype, ) WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth).Error!void

Like stringify but takes an allocator to facilitate safety checks while allowing arbitrary nesting depth. These safety checks can be helpful when debugging custom jsonStringify implementations; See WriteStream.

Parameters

allocator: Allocator

Source Code

Source code
pub fn stringifyArbitraryDepth(
    allocator: Allocator,
    value: anytype,
    options: StringifyOptions,
    out_stream: anytype,
) WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth).Error!void {
    var jw = writeStreamArbitraryDepth(allocator, out_stream, options);
    defer jw.deinit();
    try jw.write(value);
}

FunctionstringifyAlloc[src]

pub fn stringifyAlloc( allocator: Allocator, value: anytype, options: StringifyOptions, ) error{OutOfMemory}![]u8

Calls stringifyArbitraryDepth and stores the result in dynamically allocated memory instead of taking a std.io.Writer.

Caller owns returned memory.

Parameters

allocator: Allocator

Source Code

Source code
pub fn stringifyAlloc(
    allocator: Allocator,
    value: anytype,
    options: StringifyOptions,
) error{OutOfMemory}![]u8 {
    var list = std.ArrayList(u8).init(allocator);
    errdefer list.deinit();
    try stringifyArbitraryDepth(allocator, value, options, list.writer());
    return list.toOwnedSlice();
}

FunctionwriteStream[src]

pub fn writeStream( out_stream: anytype, options: StringifyOptions, ) WriteStream(@TypeOf(out_stream), .{ .checked_to_fixed_depth = 256 })

See WriteStream for documentation. Equivalent to calling writeStreamMaxDepth with a depth of 256.

The caller does not need to call deinit() on the returned object.

Parameters

Source Code

Source code
pub fn writeStream(
    out_stream: anytype,
    options: StringifyOptions,
) WriteStream(@TypeOf(out_stream), .{ .checked_to_fixed_depth = 256 }) {
    return writeStreamMaxDepth(out_stream, options, 256);
}

FunctionwriteStreamMaxDepth[src]

pub fn writeStreamMaxDepth( out_stream: anytype, options: StringifyOptions, comptime max_depth: ?usize, ) WriteStream( @TypeOf(out_stream), if (max_depth) |d| .{ .checked_to_fixed_depth = d } else .assumed_correct, )

See WriteStream for documentation. The returned object includes 1 bit of size per max_depth to enable safety checks on the order of method calls; see the grammar in the WriteStream documentation. max_depth is rounded up to the nearest multiple of 8. If the nesting depth exceeds max_depth, it is detectable illegal behavior. Give null for max_depth to disable safety checks for the grammar and allow arbitrary nesting depth. In ReleaseFast and ReleaseSmall, max_depth is ignored, effectively equivalent to passing null. Alternatively, see writeStreamArbitraryDepth to do safety checks to arbitrary depth.

The caller does not need to call deinit() on the returned object.

Parameters

max_depth: ?usize

Source Code

Source code
pub fn writeStreamMaxDepth(
    out_stream: anytype,
    options: StringifyOptions,
    comptime max_depth: ?usize,
) WriteStream(
    @TypeOf(out_stream),
    if (max_depth) |d| .{ .checked_to_fixed_depth = d } else .assumed_correct,
) {
    return WriteStream(
        @TypeOf(out_stream),
        if (max_depth) |d| .{ .checked_to_fixed_depth = d } else .assumed_correct,
    ).init(undefined, out_stream, options);
}

FunctionwriteStreamArbitraryDepth[src]

pub fn writeStreamArbitraryDepth( allocator: Allocator, out_stream: anytype, options: StringifyOptions, ) WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth)

See WriteStream for documentation. This version of the write stream enables safety checks to arbitrarily deep nesting levels by using the given allocator. The caller should call deinit() on the returned object to free allocated memory.

In ReleaseFast and ReleaseSmall mode, this function is effectively equivalent to calling writeStreamMaxDepth(..., null); in those build modes, the allocator is not used.

Parameters

allocator: Allocator

Source Code

Source code
pub fn writeStreamArbitraryDepth(
    allocator: Allocator,
    out_stream: anytype,
    options: StringifyOptions,
) WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth) {
    return WriteStream(@TypeOf(out_stream), .checked_to_arbitrary_depth).init(allocator, out_stream, options);
}

FunctionencodeJsonString[src]

pub fn encodeJsonString(string: []const u8, options: StringifyOptions, writer: anytype) !void

Write string to writer as a JSON encoded string.

Parameters

string: []const u8

Source Code

Source code
pub fn encodeJsonString(string: []const u8, options: StringifyOptions, writer: anytype) !void {
    try writer.writeByte('\"');
    try encodeJsonStringChars(string, options, writer);
    try writer.writeByte('\"');
}

FunctionencodeJsonStringChars[src]

pub fn encodeJsonStringChars(chars: []const u8, options: StringifyOptions, writer: anytype) !void

Write chars to writer as JSON encoded string characters.

Parameters

chars: []const u8

Source Code

Source code
pub fn encodeJsonStringChars(chars: []const u8, options: StringifyOptions, writer: anytype) !void {
    var write_cursor: usize = 0;
    var i: usize = 0;
    if (options.escape_unicode) {
        while (i < chars.len) : (i += 1) {
            switch (chars[i]) {
                // normal ascii character
                0x20...0x21, 0x23...0x5B, 0x5D...0x7E => {},
                0x00...0x1F, '\\', '\"' => {
                    // Always must escape these.
                    try writer.writeAll(chars[write_cursor..i]);
                    try outputSpecialEscape(chars[i], writer);
                    write_cursor = i + 1;
                },
                0x7F...0xFF => {
                    try writer.writeAll(chars[write_cursor..i]);
                    const ulen = std.unicode.utf8ByteSequenceLength(chars[i]) catch unreachable;
                    const codepoint = std.unicode.utf8Decode(chars[i..][0..ulen]) catch unreachable;
                    try outputUnicodeEscape(codepoint, writer);
                    i += ulen - 1;
                    write_cursor = i + 1;
                },
            }
        }
    } else {
        while (i < chars.len) : (i += 1) {
            switch (chars[i]) {
                // normal bytes
                0x20...0x21, 0x23...0x5B, 0x5D...0xFF => {},
                0x00...0x1F, '\\', '\"' => {
                    // Always must escape these.
                    try writer.writeAll(chars[write_cursor..i]);
                    try outputSpecialEscape(chars[i], writer);
                    write_cursor = i + 1;
                },
            }
        }
    }
    try writer.writeAll(chars[write_cursor..chars.len]);
}

Functionfmt[src]

pub fn fmt(value: anytype, options: StringifyOptions) Formatter(@TypeOf(value))

Returns a formatter that formats the given value using stringify.

Parameters

Example Usage

test fmt {
    const expectFmt = std.testing.expectFmt;
    try expectFmt("123", "{}", .{fmt(@as(u32, 123), .{})});
    try expectFmt(
        \\{"num":927,"msg":"hello","sub":{"mybool":true}}
    , "{}", .{fmt(struct {
        num: u32,
        msg: []const u8,
        sub: struct {
            mybool: bool,
        },
    }{
        .num = 927,
        .msg = "hello",
        .sub = .{ .mybool = true },
    }, .{})});
}

Source Code

Source code
pub fn fmt(value: anytype, options: StringifyOptions) Formatter(@TypeOf(value)) {
    return Formatter(@TypeOf(value)){ .value = value, .options = options };
}

Source Code

Source code
//! JSON parsing and stringification conforming to RFC 8259. https://datatracker.ietf.org/doc/html/rfc8259
//!
//! The low-level `Scanner` API produces `Token`s from an input slice or successive slices of inputs,
//! The `Reader` API connects a `std.io.Reader` to a `Scanner`.
//!
//! The high-level `parseFromSlice` and `parseFromTokenSource` deserialize a JSON document into a Zig type.
//! Parse into a dynamically-typed `Value` to load any JSON value for runtime inspection.
//!
//! The low-level `writeStream` emits syntax-conformant JSON tokens to a `std.io.Writer`.
//! The high-level `stringify` serializes a Zig or `Value` type into JSON.

const builtin = @import("builtin");
const testing = @import("std").testing;
const ArrayList = @import("std").ArrayList;

test Scanner {
    var scanner = Scanner.initCompleteInput(testing.allocator, "{\"foo\": 123}\n");
    defer scanner.deinit();
    try testing.expectEqual(Token.object_begin, try scanner.next());
    try testing.expectEqualSlices(u8, "foo", (try scanner.next()).string);
    try testing.expectEqualSlices(u8, "123", (try scanner.next()).number);
    try testing.expectEqual(Token.object_end, try scanner.next());
    try testing.expectEqual(Token.end_of_document, try scanner.next());
}

test parseFromSlice {
    var parsed_str = try parseFromSlice([]const u8, testing.allocator, "\"a\\u0020b\"", .{});
    defer parsed_str.deinit();
    try testing.expectEqualSlices(u8, "a b", parsed_str.value);

    const T = struct { a: i32 = -1, b: [2]u8 };
    var parsed_struct = try parseFromSlice(T, testing.allocator, "{\"b\":\"xy\"}", .{});
    defer parsed_struct.deinit();
    try testing.expectEqual(@as(i32, -1), parsed_struct.value.a); // default value
    try testing.expectEqualSlices(u8, "xy", parsed_struct.value.b[0..]);
}

test Value {
    var parsed = try parseFromSlice(Value, testing.allocator, "{\"anything\": \"goes\"}", .{});
    defer parsed.deinit();
    try testing.expectEqualSlices(u8, "goes", parsed.value.object.get("anything").?.string);
}

test writeStream {
    var out = ArrayList(u8).init(testing.allocator);
    defer out.deinit();
    var write_stream = writeStream(out.writer(), .{ .whitespace = .indent_2 });
    defer write_stream.deinit();
    try write_stream.beginObject();
    try write_stream.objectField("foo");
    try write_stream.write(123);
    try write_stream.endObject();
    const expected =
        \\{
        \\  "foo": 123
        \\}
    ;
    try testing.expectEqualSlices(u8, expected, out.items);
}

test stringify {
    var out = ArrayList(u8).init(testing.allocator);
    defer out.deinit();

    const T = struct { a: i32, b: []const u8 };
    try stringify(T{ .a = 123, .b = "xy" }, .{}, out.writer());
    try testing.expectEqualSlices(u8, "{\"a\":123,\"b\":\"xy\"}", out.items);
}

pub const ObjectMap = @import("json/dynamic.zig").ObjectMap;
pub const Array = @import("json/dynamic.zig").Array;
pub const Value = @import("json/dynamic.zig").Value;

pub const ArrayHashMap = @import("json/hashmap.zig").ArrayHashMap;

pub const validate = @import("json/scanner.zig").validate;
pub const Error = @import("json/scanner.zig").Error;
pub const reader = @import("json/scanner.zig").reader;
pub const default_buffer_size = @import("json/scanner.zig").default_buffer_size;
pub const Token = @import("json/scanner.zig").Token;
pub const TokenType = @import("json/scanner.zig").TokenType;
pub const Diagnostics = @import("json/scanner.zig").Diagnostics;
pub const AllocWhen = @import("json/scanner.zig").AllocWhen;
pub const default_max_value_len = @import("json/scanner.zig").default_max_value_len;
pub const Reader = @import("json/scanner.zig").Reader;
pub const Scanner = @import("json/scanner.zig").Scanner;
pub const isNumberFormattedLikeAnInteger = @import("json/scanner.zig").isNumberFormattedLikeAnInteger;

pub const ParseOptions = @import("json/static.zig").ParseOptions;
pub const Parsed = @import("json/static.zig").Parsed;
pub const parseFromSlice = @import("json/static.zig").parseFromSlice;
pub const parseFromSliceLeaky = @import("json/static.zig").parseFromSliceLeaky;
pub const parseFromTokenSource = @import("json/static.zig").parseFromTokenSource;
pub const parseFromTokenSourceLeaky = @import("json/static.zig").parseFromTokenSourceLeaky;
pub const innerParse = @import("json/static.zig").innerParse;
pub const parseFromValue = @import("json/static.zig").parseFromValue;
pub const parseFromValueLeaky = @import("json/static.zig").parseFromValueLeaky;
pub const innerParseFromValue = @import("json/static.zig").innerParseFromValue;
pub const ParseError = @import("json/static.zig").ParseError;
pub const ParseFromValueError = @import("json/static.zig").ParseFromValueError;

pub const StringifyOptions = @import("json/stringify.zig").StringifyOptions;
pub const stringify = @import("json/stringify.zig").stringify;
pub const stringifyMaxDepth = @import("json/stringify.zig").stringifyMaxDepth;
pub const stringifyArbitraryDepth = @import("json/stringify.zig").stringifyArbitraryDepth;
pub const stringifyAlloc = @import("json/stringify.zig").stringifyAlloc;
pub const writeStream = @import("json/stringify.zig").writeStream;
pub const writeStreamMaxDepth = @import("json/stringify.zig").writeStreamMaxDepth;
pub const writeStreamArbitraryDepth = @import("json/stringify.zig").writeStreamArbitraryDepth;
pub const WriteStream = @import("json/stringify.zig").WriteStream;
pub const encodeJsonString = @import("json/stringify.zig").encodeJsonString;
pub const encodeJsonStringChars = @import("json/stringify.zig").encodeJsonStringChars;

pub const Formatter = @import("json/fmt.zig").Formatter;
pub const fmt = @import("json/fmt.zig").fmt;

test {
    _ = @import("json/test.zig");
    _ = @import("json/scanner.zig");
    _ = @import("json/dynamic.zig");
    _ = @import("json/hashmap.zig");
    _ = @import("json/static.zig");
    _ = @import("json/stringify.zig");
    _ = @import("json/JSONTestSuite_test.zig");
}