structstd.crypto.blake3.Blake3[src]

An incremental hasher that can accept any number of writes.

Types

TypeWriter[src]

Source Code

Source code
pub const Writer = std.io.Writer(*Blake3, Error, write)

Fields

chunk_state: ChunkState
key: [8]u32
cv_stack: [54][8]u32 = undefined
cv_stack_len: u8 = 0
flags: u8

Values

ConstantBLOCK_LEN[src]

Source Code

Source code
const BLOCK_LEN: usize = 64

ConstantOUT_LEN[src]

Source Code

Source code
const OUT_LEN: usize = 32

ConstantKEY_LEN[src]

Source Code

Source code
const KEY_LEN: usize = 32

Error Sets

Error SetError[src]

Source Code

Source code
pub const Error = error{}

Functions

Functioninit[src]

pub fn init(options: Options) Blake3

Construct a new Blake3 for the hash function, with an optional key

Parameters

options: Options

Source Code

Source code
pub fn init(options: Options) Blake3 {
    if (options.key) |key| {
        const key_words = wordsFromLittleEndianBytes(8, key);
        return Blake3.init_internal(key_words, KEYED_HASH);
    } else {
        return Blake3.init_internal(IV, 0);
    }
}

FunctioninitKdf[src]

pub fn initKdf(context: []const u8, options: KdfOptions) Blake3

Construct a new Blake3 for the key derivation function. The context string should be hardcoded, globally unique, and application-specific.

Parameters

context: []const u8
options: KdfOptions

Source Code

Source code
pub fn initKdf(context: []const u8, options: KdfOptions) Blake3 {
    _ = options;
    var context_hasher = Blake3.init_internal(IV, DERIVE_KEY_CONTEXT);
    context_hasher.update(context);
    var context_key: [KEY_LEN]u8 = undefined;
    context_hasher.final(context_key[0..]);
    const context_key_words = wordsFromLittleEndianBytes(8, context_key);
    return Blake3.init_internal(context_key_words, DERIVE_KEY_MATERIAL);
}

Functionhash[src]

pub fn hash(b: []const u8, out: []u8, options: Options) void

Parameters

b: []const u8
out: []u8
options: Options

Source Code

Source code
pub fn hash(b: []const u8, out: []u8, options: Options) void {
    var d = Blake3.init(options);
    d.update(b);
    d.final(out);
}

Functionupdate[src]

pub fn update(self: *Blake3, input_slice: []const u8) void

Add input to the hash state. This can be called any number of times.

Parameters

self: *Blake3
input_slice: []const u8

Source Code

Source code
pub fn update(self: *Blake3, input_slice: []const u8) void {
    var input = input_slice;
    while (input.len > 0) {
        // If the current chunk is complete, finalize it and reset the
        // chunk state. More input is coming, so this chunk is not ROOT.
        if (self.chunk_state.len() == CHUNK_LEN) {
            const chunk_cv = self.chunk_state.output().chainingValue();
            const total_chunks = self.chunk_state.chunk_counter + 1;
            self.addChunkChainingValue(chunk_cv, total_chunks);
            self.chunk_state = ChunkState.init(self.key, total_chunks, self.flags);
        }

        // Compress input bytes into the current chunk state.
        const want = CHUNK_LEN - self.chunk_state.len();
        const take = @min(want, input.len);
        self.chunk_state.update(input[0..take]);
        input = input[take..];
    }
}

Functionfinal[src]

pub fn final(self: *const Blake3, out_slice: []u8) void

Finalize the hash and write any number of output bytes.

Parameters

self: *const Blake3
out_slice: []u8

Source Code

Source code
pub fn final(self: *const Blake3, out_slice: []u8) void {
    // Starting with the Output from the current chunk, compute all the
    // parent chaining values along the right edge of the tree, until we
    // have the root Output.
    var output = self.chunk_state.output();
    var parent_nodes_remaining: usize = self.cv_stack_len;
    while (parent_nodes_remaining > 0) {
        parent_nodes_remaining -= 1;
        output = parentOutput(
            self.cv_stack[parent_nodes_remaining],
            output.chainingValue(),
            self.key,
            self.flags,
        );
    }
    output.rootOutputBytes(out_slice);
}

Functionwriter[src]

pub fn writer(self: *Blake3) Writer

Parameters

self: *Blake3

Source Code

Source code
pub fn writer(self: *Blake3) Writer {
    return .{ .context = self };
}

Source Code

Source code
pub const Blake3 = struct {
    pub const Options = struct { key: ?[digest_length]u8 = null };
    pub const KdfOptions = struct {};

    chunk_state: ChunkState,
    key: [8]u32,
    cv_stack: [54][8]u32 = undefined, // Space for 54 subtree chaining values:
    cv_stack_len: u8 = 0, // 2^54 * CHUNK_LEN = 2^64
    flags: u8,

    pub const block_length = BLOCK_LEN;
    pub const digest_length = OUT_LEN;
    pub const key_length = KEY_LEN;

    fn init_internal(key: [8]u32, flags: u8) Blake3 {
        return Blake3{
            .chunk_state = ChunkState.init(key, 0, flags),
            .key = key,
            .flags = flags,
        };
    }

    /// Construct a new `Blake3` for the hash function, with an optional key
    pub fn init(options: Options) Blake3 {
        if (options.key) |key| {
            const key_words = wordsFromLittleEndianBytes(8, key);
            return Blake3.init_internal(key_words, KEYED_HASH);
        } else {
            return Blake3.init_internal(IV, 0);
        }
    }

    /// Construct a new `Blake3` for the key derivation function. The context
    /// string should be hardcoded, globally unique, and application-specific.
    pub fn initKdf(context: []const u8, options: KdfOptions) Blake3 {
        _ = options;
        var context_hasher = Blake3.init_internal(IV, DERIVE_KEY_CONTEXT);
        context_hasher.update(context);
        var context_key: [KEY_LEN]u8 = undefined;
        context_hasher.final(context_key[0..]);
        const context_key_words = wordsFromLittleEndianBytes(8, context_key);
        return Blake3.init_internal(context_key_words, DERIVE_KEY_MATERIAL);
    }

    pub fn hash(b: []const u8, out: []u8, options: Options) void {
        var d = Blake3.init(options);
        d.update(b);
        d.final(out);
    }

    fn pushCv(self: *Blake3, cv: [8]u32) void {
        self.cv_stack[self.cv_stack_len] = cv;
        self.cv_stack_len += 1;
    }

    fn popCv(self: *Blake3) [8]u32 {
        self.cv_stack_len -= 1;
        return self.cv_stack[self.cv_stack_len];
    }

    // Section 5.1.2 of the BLAKE3 spec explains this algorithm in more detail.
    fn addChunkChainingValue(self: *Blake3, first_cv: [8]u32, total_chunks: u64) void {
        // This chunk might complete some subtrees. For each completed subtree,
        // its left child will be the current top entry in the CV stack, and
        // its right child will be the current value of `new_cv`. Pop each left
        // child off the stack, merge it with `new_cv`, and overwrite `new_cv`
        // with the result. After all these merges, push the final value of
        // `new_cv` onto the stack. The number of completed subtrees is given
        // by the number of trailing 0-bits in the new total number of chunks.
        var new_cv = first_cv;
        var chunk_counter = total_chunks;
        while (chunk_counter & 1 == 0) {
            new_cv = parentCv(self.popCv(), new_cv, self.key, self.flags);
            chunk_counter >>= 1;
        }
        self.pushCv(new_cv);
    }

    /// Add input to the hash state. This can be called any number of times.
    pub fn update(self: *Blake3, input_slice: []const u8) void {
        var input = input_slice;
        while (input.len > 0) {
            // If the current chunk is complete, finalize it and reset the
            // chunk state. More input is coming, so this chunk is not ROOT.
            if (self.chunk_state.len() == CHUNK_LEN) {
                const chunk_cv = self.chunk_state.output().chainingValue();
                const total_chunks = self.chunk_state.chunk_counter + 1;
                self.addChunkChainingValue(chunk_cv, total_chunks);
                self.chunk_state = ChunkState.init(self.key, total_chunks, self.flags);
            }

            // Compress input bytes into the current chunk state.
            const want = CHUNK_LEN - self.chunk_state.len();
            const take = @min(want, input.len);
            self.chunk_state.update(input[0..take]);
            input = input[take..];
        }
    }

    /// Finalize the hash and write any number of output bytes.
    pub fn final(self: *const Blake3, out_slice: []u8) void {
        // Starting with the Output from the current chunk, compute all the
        // parent chaining values along the right edge of the tree, until we
        // have the root Output.
        var output = self.chunk_state.output();
        var parent_nodes_remaining: usize = self.cv_stack_len;
        while (parent_nodes_remaining > 0) {
            parent_nodes_remaining -= 1;
            output = parentOutput(
                self.cv_stack[parent_nodes_remaining],
                output.chainingValue(),
                self.key,
                self.flags,
            );
        }
        output.rootOutputBytes(out_slice);
    }

    pub const Error = error{};
    pub const Writer = std.io.Writer(*Blake3, Error, write);

    fn write(self: *Blake3, bytes: []const u8) Error!usize {
        self.update(bytes);
        return bytes.len;
    }

    pub fn writer(self: *Blake3) Writer {
        return .{ .context = self };
    }
}