001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements.  See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License.  You may obtain a copy of the License at
008 *
009 *      http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017package org.apache.commons.codec.digest;
018
019import java.util.Arrays;
020import java.util.Objects;
021
022/**
023 * Implements the Blake3 algorithm providing a {@linkplain #initHash() hash function} with extensible output (XOF), a
024 * {@linkplain #initKeyedHash(byte[]) keyed hash function} (MAC, PRF), and a
025 * {@linkplain #initKeyDerivationFunction(byte[]) key derivation function} (KDF). Blake3 has a 128-bit security level
026 * and a default output length of 256 bits (32 bytes) which can extended up to 2<sup>64</sup> bytes.
027 * <h2>Hashing</h2>
028 * <p>Hash mode calculates the same output hash given the same input bytes and can be used as both a message digest and
029 * and extensible output function.</p>
030 * <pre>{@code
031 *      Blake3 hasher = Blake3.initHash();
032 *      hasher.update("Hello, world!".getBytes(StandardCharsets.UTF_8));
033 *      byte[] hash = new byte[32];
034 *      hasher.doFinalize(hash);
035 * }</pre>
036 * <h2>Keyed Hashing</h2>
037 * <p>Keyed hashes take a 32-byte secret key and calculates a message authentication code on some input bytes. These
038 * also work as pseudo-random functions (PRFs) with extensible output similar to the extensible hash output. Note that
039 * Blake3 keyed hashes have the same performance as plain hashes; the key is used in initialization in place of a
040 * standard initialization vector used for plain hashing.</p>
041 * <pre>{@code
042 *      SecureRandom random = SecureRandom.getInstanceStrong();
043 *      byte[] key = new byte[32];
044 *      random.nextBytes(key);
045 *      Blake3 hasher = Blake3.initKeyedHash(key);
046 *      hasher.update("Hello, Alice!".getBytes(StandardCharsets.UTF_8));
047 *      byte[] mac = new byte[32];
048 *      hasher.doFinalize(mac);
049 * }</pre>
050 * <h2>Key Derivation</h2>
051 * <p>A specific hash mode for deriving session keys and other derived keys in a unique key derivation context
052 * identified by some sequence of bytes. These context strings should be unique but do not need to be kept secret.
053 * Additional input data is hashed for key material which can be finalized to derive subkeys.</p>
054 * <pre>{@code
055 *      String context = "org.apache.commons.codec.digest.Blake3Example";
056 *      byte[] sharedSecret = ...;
057 *      byte[] senderId = ...;
058 *      byte[] recipientId = ...;
059 *      Blake3 kdf = Blake3.initKeyDerivationFunction(context.getBytes(StandardCharsets.UTF_8));
060 *      kdf.update(sharedSecret);
061 *      kdf.update(senderId);
062 *      kdf.update(recipientId);
063 *      byte[] txKey = new byte[32];
064 *      byte[] rxKey = new byte[32];
065 *      kdf.doFinalize(txKey);
066 *      kdf.doFinalize(rxKey);
067 * }</pre>
068 * <p>
069 * Adapted from the ISC-licensed O(1) Cryptography library by Matt Sicker and ported from the reference public domain
070 * implementation by Jack O'Connor.
071 * </p>
072 *
073 * @see <a href="https://github.com/BLAKE3-team/BLAKE3">BLAKE3 hash function</a>
074 * @since 1.16
075 */
076public final class Blake3 {
077
078    private static final int BLOCK_LEN = 64;
079    private static final int BLOCK_INTS = BLOCK_LEN / Integer.BYTES;
080    private static final int KEY_LEN = 32;
081    private static final int KEY_INTS = KEY_LEN / Integer.BYTES;
082    private static final int OUT_LEN = 32;
083    private static final int CHUNK_LEN = 1024;
084    private static final int CHAINING_VALUE_INTS = 8;
085
086    /**
087     * Standard hash key used for plain hashes; same initialization vector as Blake2s.
088     */
089    private static final int[] IV =
090            { 0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19 };
091
092    // domain flags
093    private static final int CHUNK_START = 1;
094    private static final int CHUNK_END = 1 << 1;
095    private static final int PARENT = 1 << 2;
096    private static final int ROOT = 1 << 3;
097    private static final int KEYED_HASH = 1 << 4;
098    private static final int DERIVE_KEY_CONTEXT = 1 << 5;
099    private static final int DERIVE_KEY_MATERIAL = 1 << 6;
100
101    /**
102     * Pre-permuted for all 7 rounds; the second row (2,6,3,...) indicates the base permutation.
103     */
104    private static final byte[][] MSG_SCHEDULE = {
105            { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
106            { 2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8 },
107            { 3, 4, 10, 12, 13, 2, 7, 14, 6, 5, 9, 0, 11, 15, 8, 1 },
108            { 10, 7, 12, 9, 14, 3, 13, 15, 4, 0, 11, 2, 5, 8, 1, 6 },
109            { 12, 13, 9, 11, 15, 10, 14, 8, 7, 2, 5, 3, 0, 1, 6, 4 },
110            { 9, 14, 11, 5, 8, 12, 15, 1, 13, 3, 0, 10, 2, 6, 4, 7 },
111            { 11, 15, 5, 0, 1, 9, 8, 6, 14, 10, 2, 12, 3, 4, 7, 13 }
112    };
113
114    private final EngineState engineState;
115
116    private Blake3(final int[] key, final int flags) {
117        engineState = new EngineState(key, flags);
118    }
119
120    /**
121     * Resets this instance back to its initial state when it was first constructed.
122     * @return this
123     */
124    public Blake3 reset() {
125        engineState.reset();
126        return this;
127    }
128
129    /**
130     * Updates this hash state using the provided bytes.
131     *
132     * @param in source array to update data from
133     * @return this
134     * @throws NullPointerException if in is null
135     */
136    public Blake3 update(final byte[] in) {
137        return update(in, 0, in.length);
138    }
139
140    /**
141     * Updates this hash state using the provided bytes at an offset.
142     *
143     * @param in     source array to update data from
144     * @param offset where in the array to begin reading bytes
145     * @param length number of bytes to update
146     * @return this
147     * @throws NullPointerException      if in is null
148     * @throws IndexOutOfBoundsException if offset or length are negative or if offset + length is greater than the
149     *                                   length of the provided array
150     */
151    public Blake3 update(final byte[] in, final int offset, final int length) {
152        checkBufferArgs(in, offset, length);
153        engineState.inputData(in, offset, length);
154        return this;
155    }
156
157    /**
158     * Finalizes hash output data that depends on the sequence of updated bytes preceding this invocation and any
159     * previously finalized bytes. Note that this can finalize up to 2<sup>64</sup> bytes per instance.
160     *
161     * @param out destination array to finalize bytes into
162     * @return this
163     * @throws NullPointerException if out is null
164     */
165    public Blake3 doFinalize(final byte[] out) {
166        return doFinalize(out, 0, out.length);
167    }
168
169    /**
170     * Finalizes an arbitrary number of bytes into the provided output array that depends on the sequence of previously
171     * updated and finalized bytes. Note that this can finalize up to 2<sup>64</sup> bytes per instance.
172     *
173     * @param out    destination array to finalize bytes into
174     * @param offset where in the array to begin writing bytes to
175     * @param length number of bytes to finalize
176     * @return this
177     * @throws NullPointerException      if out is null
178     * @throws IndexOutOfBoundsException if offset or length are negative or if offset + length is greater than the
179     *                                   length of the provided array
180     */
181    public Blake3 doFinalize(final byte[] out, final int offset, final int length) {
182        checkBufferArgs(out, offset, length);
183        engineState.outputHash(out, offset, length);
184        return this;
185    }
186
187    /**
188     * Squeezes and returns an arbitrary number of bytes dependent on the sequence of previously absorbed and squeezed bytes.
189     *
190     * @param nrBytes number of bytes to finalize
191     * @return requested number of finalized bytes
192     * @throws IllegalArgumentException if nrBytes is negative
193     */
194    public byte[] doFinalize(final int nrBytes) {
195        if (nrBytes < 0) {
196            throw new IllegalArgumentException("Requested bytes must be non-negative");
197        }
198        final byte[] hash = new byte[nrBytes];
199        doFinalize(hash);
200        return hash;
201    }
202
203    /**
204     * Constructs a fresh Blake3 hash function. The instance returned functions as an arbitrary length message digest.
205     *
206     * @return fresh Blake3 instance in hashed mode
207     */
208    public static Blake3 initHash() {
209        return new Blake3(IV, 0);
210    }
211
212    /**
213     * Constructs a fresh Blake3 keyed hash function. The instance returned functions as a pseudorandom function (PRF) or as a
214     * message authentication code (MAC).
215     *
216     * @param key 32-byte secret key
217     * @return fresh Blake3 instance in keyed mode using the provided key
218     * @throws NullPointerException     if key is null
219     * @throws IllegalArgumentException if key is not 32 bytes
220     */
221    public static Blake3 initKeyedHash(final byte[] key) {
222        Objects.requireNonNull(key);
223        if (key.length != KEY_LEN) {
224            throw new IllegalArgumentException("Blake3 keys must be 32 bytes");
225        }
226        return new Blake3(unpackInts(key, KEY_INTS), KEYED_HASH);
227    }
228
229    /**
230     * Constructs a fresh Blake3 key derivation function using the provided key derivation context byte string.
231     * The instance returned functions as a key-derivation function which can further absorb additional context data
232     * before squeezing derived key data.
233     *
234     * @param kdfContext a globally unique key-derivation context byte string to separate key derivation contexts from each other
235     * @return fresh Blake3 instance in key derivation mode
236     * @throws NullPointerException if kdfContext is null
237     */
238    public static Blake3 initKeyDerivationFunction(final byte[] kdfContext) {
239        Objects.requireNonNull(kdfContext);
240        final EngineState kdf = new EngineState(IV, DERIVE_KEY_CONTEXT);
241        kdf.inputData(kdfContext, 0, kdfContext.length);
242        final byte[] key = new byte[KEY_LEN];
243        kdf.outputHash(key, 0, key.length);
244        return new Blake3(unpackInts(key, KEY_INTS), DERIVE_KEY_MATERIAL);
245    }
246
247    /**
248     * Calculates the Blake3 hash of the provided data.
249     *
250     * @param data source array to absorb data from
251     * @return 32-byte hash squeezed from the provided data
252     * @throws NullPointerException if data is null
253     */
254    public static byte[] hash(final byte[] data) {
255        return Blake3.initHash().update(data).doFinalize(OUT_LEN);
256    }
257
258    /**
259     * Calculates the Blake3 keyed hash (MAC) of the provided data.
260     *
261     * @param key  32-byte secret key
262     * @param data source array to absorb data from
263     * @return 32-byte mac squeezed from the provided data
264     * @throws NullPointerException if key or data are null
265     */
266    public static byte[] keyedHash(final byte[] key, final byte[] data) {
267        return Blake3.initKeyedHash(key).update(data).doFinalize(OUT_LEN);
268    }
269
270    private static void checkBufferArgs(final byte[] buffer, final int offset, final int length) {
271        Objects.requireNonNull(buffer);
272        if (offset < 0) {
273            throw new IndexOutOfBoundsException("Offset must be non-negative");
274        }
275        if (length < 0) {
276            throw new IndexOutOfBoundsException("Length must be non-negative");
277        }
278        final int bufferLength = buffer.length;
279        if (offset > bufferLength - length) {
280            throw new IndexOutOfBoundsException(
281                    "Offset " + offset + " and length " + length + " out of bounds with buffer length " + bufferLength);
282        }
283    }
284
285    private static void packInt(final int value, final byte[] dst, final int off, final int len) {
286        for (int i = 0; i < len; i++) {
287            dst[off + i] = (byte) (value >>> i * Byte.SIZE);
288        }
289    }
290
291    private static int unpackInt(final byte[] buf, final int off) {
292        return buf[off] & 0xFF | (buf[off + 1] & 0xFF) << 8 | (buf[off + 2] & 0xFF) << 16 | (buf[off + 3] & 0xFF) << 24;
293    }
294
295    private static int[] unpackInts(final byte[] buf, final int nrInts) {
296        final int[] values = new int[nrInts];
297        for (int i = 0, off = 0; i < nrInts; i++, off += Integer.BYTES) {
298            values[i] = unpackInt(buf, off);
299        }
300        return values;
301    }
302
303    /**
304     * The mixing function, G, which mixes either a column or a diagonal.
305     */
306    private static void g(
307            final int[] state, final int a, final int b, final int c, final int d, final int mx, final int my) {
308        state[a] += state[b] + mx;
309        state[d] = Integer.rotateRight(state[d] ^ state[a], 16);
310        state[c] += state[d];
311        state[b] = Integer.rotateRight(state[b] ^ state[c], 12);
312        state[a] += state[b] + my;
313        state[d] = Integer.rotateRight(state[d] ^ state[a], 8);
314        state[c] += state[d];
315        state[b] = Integer.rotateRight(state[b] ^ state[c], 7);
316    }
317
318    private static void round(final int[] state, final int[] msg, final byte[] schedule) {
319        // Mix the columns.
320        g(state, 0, 4, 8, 12, msg[schedule[0]], msg[schedule[1]]);
321        g(state, 1, 5, 9, 13, msg[schedule[2]], msg[schedule[3]]);
322        g(state, 2, 6, 10, 14, msg[schedule[4]], msg[schedule[5]]);
323        g(state, 3, 7, 11, 15, msg[schedule[6]], msg[schedule[7]]);
324
325        // Mix the diagonals.
326        g(state, 0, 5, 10, 15, msg[schedule[8]], msg[schedule[9]]);
327        g(state, 1, 6, 11, 12, msg[schedule[10]], msg[schedule[11]]);
328        g(state, 2, 7, 8, 13, msg[schedule[12]], msg[schedule[13]]);
329        g(state, 3, 4, 9, 14, msg[schedule[14]], msg[schedule[15]]);
330    }
331
332    private static int[] compress(
333            final int[] chainingValue, final int[] blockWords, final int blockLength, final long counter,
334            final int flags) {
335        final int[] state = Arrays.copyOf(chainingValue, BLOCK_INTS);
336        System.arraycopy(IV, 0, state, 8, 4);
337        state[12] = (int) counter;
338        state[13] = (int) (counter >> Integer.SIZE);
339        state[14] = blockLength;
340        state[15] = flags;
341        for (int i = 0; i < 7; i++) {
342            final byte[] schedule = MSG_SCHEDULE[i];
343            round(state, blockWords, schedule);
344        }
345        for (int i = 0; i < state.length / 2; i++) {
346            state[i] ^= state[i + 8];
347            state[i + 8] ^= chainingValue[i];
348        }
349        return state;
350    }
351
352    private static Output parentOutput(
353            final int[] leftChildCV, final int[] rightChildCV, final int[] key, final int flags) {
354        final int[] blockWords = Arrays.copyOf(leftChildCV, BLOCK_INTS);
355        System.arraycopy(rightChildCV, 0, blockWords, 8, CHAINING_VALUE_INTS);
356        return new Output(key.clone(), blockWords, 0, BLOCK_LEN, flags | PARENT);
357    }
358
359    private static int[] parentChainingValue(
360            final int[] leftChildCV, final int[] rightChildCV, final int[] key, final int flags) {
361        return parentOutput(leftChildCV, rightChildCV, key, flags).chainingValue();
362    }
363
364    /**
365     * Represents the state just prior to either producing an eight word chaining value or any number of output bytes
366     * when the ROOT flag is set.
367     */
368    private static class Output {
369        private final int[] inputChainingValue;
370        private final int[] blockWords;
371        private final long counter;
372        private final int blockLength;
373        private final int flags;
374
375        private Output(
376                final int[] inputChainingValue, final int[] blockWords, final long counter, final int blockLength,
377                final int flags) {
378            this.inputChainingValue = inputChainingValue;
379            this.blockWords = blockWords;
380            this.counter = counter;
381            this.blockLength = blockLength;
382            this.flags = flags;
383        }
384
385        private int[] chainingValue() {
386            return Arrays
387                    .copyOf(compress(inputChainingValue, blockWords, blockLength, counter, flags), CHAINING_VALUE_INTS);
388        }
389
390        private void rootOutputBytes(final byte[] out, int offset, int length) {
391            int outputBlockCounter = 0;
392            while (length > 0) {
393                int chunkLength = Math.min(OUT_LEN * 2, length);
394                length -= chunkLength;
395                final int[] words =
396                        compress(inputChainingValue, blockWords, blockLength, outputBlockCounter++, flags | ROOT);
397                int wordCounter = 0;
398                while (chunkLength > 0) {
399                    final int wordLength = Math.min(Integer.BYTES, chunkLength);
400                    packInt(words[wordCounter++], out, offset, wordLength);
401                    offset += wordLength;
402                    chunkLength -= wordLength;
403                }
404            }
405        }
406    }
407
408    private static class ChunkState {
409        private int[] chainingValue;
410        private final long chunkCounter;
411        private final int flags;
412
413        private final byte[] block = new byte[BLOCK_LEN];
414        private int blockLength;
415        private int blocksCompressed;
416
417        private ChunkState(final int[] key, final long chunkCounter, final int flags) {
418            chainingValue = key;
419            this.chunkCounter = chunkCounter;
420            this.flags = flags;
421        }
422
423        private int length() {
424            return BLOCK_LEN * blocksCompressed + blockLength;
425        }
426
427        private int startFlag() {
428            return blocksCompressed == 0 ? CHUNK_START : 0;
429        }
430
431        private void update(final byte[] input, int offset, int length) {
432            while (length > 0) {
433                if (blockLength == BLOCK_LEN) {
434                    // If the block buffer is full, compress it and clear it. More
435                    // input is coming, so this compression is not CHUNK_END.
436                    final int[] blockWords = unpackInts(block, BLOCK_INTS);
437                    chainingValue = Arrays.copyOf(
438                            compress(chainingValue, blockWords, BLOCK_LEN, chunkCounter, flags | startFlag()),
439                            CHAINING_VALUE_INTS);
440                    blocksCompressed++;
441                    blockLength = 0;
442                    Arrays.fill(block, (byte) 0);
443                }
444
445                final int want = BLOCK_LEN - blockLength;
446                final int take = Math.min(want, length);
447                System.arraycopy(input, offset, block, blockLength, take);
448                blockLength += take;
449                offset += take;
450                length -= take;
451            }
452        }
453
454        private Output output() {
455            final int[] blockWords = unpackInts(block, BLOCK_INTS);
456            final int outputFlags = flags | startFlag() | CHUNK_END;
457            return new Output(chainingValue, blockWords, chunkCounter, blockLength, outputFlags);
458        }
459    }
460
461    private static class EngineState {
462        private final int[] key;
463        private final int flags;
464        // Space for 54 subtree chaining values: 2^54 * CHUNK_LEN = 2^64
465        // No more than 54 entries can ever be added to this stack (after updating 2^64 bytes and not finalizing any)
466        // so we preallocate the stack here. This can be smaller in environments where the data limit is expected to
467        // be much lower.
468        private final int[][] cvStack = new int[54][];
469        private int stackLen;
470        private ChunkState state;
471
472        private EngineState(final int[] key, final int flags) {
473            this.key = key;
474            this.flags = flags;
475            state = new ChunkState(key, 0, flags);
476        }
477
478        private void inputData(final byte[] in, int offset, int length) {
479            while (length > 0) {
480                // If the current chunk is complete, finalize it and reset the
481                // chunk state. More input is coming, so this chunk is not ROOT.
482                if (state.length() == CHUNK_LEN) {
483                    final int[] chunkCV = state.output().chainingValue();
484                    final long totalChunks = state.chunkCounter + 1;
485                    addChunkCV(chunkCV, totalChunks);
486                    state = new ChunkState(key, totalChunks, flags);
487                }
488
489                // Compress input bytes into the current chunk state.
490                final int want = CHUNK_LEN - state.length();
491                final int take = Math.min(want, length);
492                state.update(in, offset, take);
493                offset += take;
494                length -= take;
495            }
496        }
497
498        private void outputHash(final byte[] out, final int offset, final int length) {
499            // Starting with the Output from the current chunk, compute all the
500            // parent chaining values along the right edge of the tree, until we
501            // have the root Output.
502            Output output = state.output();
503            int parentNodesRemaining = stackLen;
504            while (parentNodesRemaining-- > 0) {
505                final int[] parentCV = cvStack[parentNodesRemaining];
506                output = parentOutput(parentCV, output.chainingValue(), key, flags);
507            }
508            output.rootOutputBytes(out, offset, length);
509        }
510
511        private void reset() {
512            stackLen = 0;
513            Arrays.fill(cvStack, null);
514            state = new ChunkState(key, 0, flags);
515        }
516
517        // Section 5.1.2 of the BLAKE3 spec explains this algorithm in more detail.
518        private void addChunkCV(final int[] firstCV, final long totalChunks) {
519            // This chunk might complete some subtrees. For each completed subtree,
520            // its left child will be the current top entry in the CV stack, and
521            // its right child will be the current value of `newCV`. Pop each left
522            // child off the stack, merge it with `newCV`, and overwrite `newCV`
523            // with the result. After all these merges, push the final value of
524            // `newCV` onto the stack. The number of completed subtrees is given
525            // by the number of trailing 0-bits in the new total number of chunks.
526            int[] newCV = firstCV;
527            long chunkCounter = totalChunks;
528            while ((chunkCounter & 1) == 0) {
529                newCV = parentChainingValue(popCV(), newCV, key, flags);
530                chunkCounter >>= 1;
531            }
532            pushCV(newCV);
533        }
534
535        private void pushCV(final int[] cv) {
536            cvStack[stackLen++] = cv;
537        }
538
539        private int[] popCV() {
540            return cvStack[--stackLen];
541        }
542    }
543
544}