1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Common values and helper functions for the ChaCha and XChaCha stream ciphers.
4  *
5  * XChaCha extends ChaCha's nonce to 192 bits, while provably retaining ChaCha's
6  * security.  Here they share the same key size, tfm context, and setkey
7  * function; only their IV size and encrypt/decrypt function differ.
8  *
9  * The ChaCha paper specifies 20, 12, and 8-round variants.  In general, it is
10  * recommended to use the 20-round variant ChaCha20.  However, the other
11  * variants can be needed in some performance-sensitive scenarios.  The generic
12  * ChaCha code currently allows only the 20 and 12-round variants.
13  */
14 
15 #ifndef _CRYPTO_CHACHA_H
16 #define _CRYPTO_CHACHA_H
17 
18 #include <asm/unaligned.h>
19 #include <linux/types.h>
20 
21 /* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */
22 #define CHACHA_IV_SIZE		16
23 
24 #define CHACHA_KEY_SIZE		32
25 #define CHACHA_BLOCK_SIZE	64
26 #define CHACHAPOLY_IV_SIZE	12
27 
28 #define CHACHA_STATE_WORDS	(CHACHA_BLOCK_SIZE / sizeof(u32))
29 
30 /* 192-bit nonce, then 64-bit stream position */
31 #define XCHACHA_IV_SIZE		32
32 
33 void chacha_block_generic(u32 *state, u8 *stream, int nrounds);
chacha20_block(u32 * state,u8 * stream)34 static inline void chacha20_block(u32 *state, u8 *stream)
35 {
36 	chacha_block_generic(state, stream, 20);
37 }
38 
39 void hchacha_block_arch(const u32 *state, u32 *out, int nrounds);
40 void hchacha_block_generic(const u32 *state, u32 *out, int nrounds);
41 
hchacha_block(const u32 * state,u32 * out,int nrounds)42 static inline void hchacha_block(const u32 *state, u32 *out, int nrounds)
43 {
44 	if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
45 		hchacha_block_arch(state, out, nrounds);
46 	else
47 		hchacha_block_generic(state, out, nrounds);
48 }
49 
chacha_init_consts(u32 * state)50 static inline void chacha_init_consts(u32 *state)
51 {
52 	state[0]  = 0x61707865; /* "expa" */
53 	state[1]  = 0x3320646e; /* "nd 3" */
54 	state[2]  = 0x79622d32; /* "2-by" */
55 	state[3]  = 0x6b206574; /* "te k" */
56 }
57 
58 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv);
chacha_init_generic(u32 * state,const u32 * key,const u8 * iv)59 static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
60 {
61 	chacha_init_consts(state);
62 	state[4]  = key[0];
63 	state[5]  = key[1];
64 	state[6]  = key[2];
65 	state[7]  = key[3];
66 	state[8]  = key[4];
67 	state[9]  = key[5];
68 	state[10] = key[6];
69 	state[11] = key[7];
70 	state[12] = get_unaligned_le32(iv +  0);
71 	state[13] = get_unaligned_le32(iv +  4);
72 	state[14] = get_unaligned_le32(iv +  8);
73 	state[15] = get_unaligned_le32(iv + 12);
74 }
75 
chacha_init(u32 * state,const u32 * key,const u8 * iv)76 static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
77 {
78 	if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
79 		chacha_init_arch(state, key, iv);
80 	else
81 		chacha_init_generic(state, key, iv);
82 }
83 
84 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
85 		       unsigned int bytes, int nrounds);
86 void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
87 			  unsigned int bytes, int nrounds);
88 
chacha_crypt(u32 * state,u8 * dst,const u8 * src,unsigned int bytes,int nrounds)89 static inline void chacha_crypt(u32 *state, u8 *dst, const u8 *src,
90 				unsigned int bytes, int nrounds)
91 {
92 	if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
93 		chacha_crypt_arch(state, dst, src, bytes, nrounds);
94 	else
95 		chacha_crypt_generic(state, dst, src, bytes, nrounds);
96 }
97 
chacha20_crypt(u32 * state,u8 * dst,const u8 * src,unsigned int bytes)98 static inline void chacha20_crypt(u32 *state, u8 *dst, const u8 *src,
99 				  unsigned int bytes)
100 {
101 	chacha_crypt(state, dst, src, bytes, 20);
102 }
103 
104 #endif /* _CRYPTO_CHACHA_H */
105