1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3 * Copyright 2024 NXP
4 *
5 * Implementation of Cipher GCM functions
6 */
7 #include <caam_common.h>
8 #include <caam_desc_ccb_defines.h>
9 #include <caam_utils_mem.h>
10 #include <caam_utils_status.h>
11 #include <stdint.h>
12 #include <string.h>
13 #include <string_ext.h>
14 #include <utee_defines.h>
15
16 #include "local.h"
17
18 /*
19 * Default GCM nonce length
20 * CAAM Errata fix is used if nonce size is not the default one
21 */
22 #define AES_GCM_DEFAULT_NONCE_LEN 12
23
24 /*
25 * Context format in GCM mode
26 */
27 struct gcm_caam_ctx_layout {
28 /*
29 * 128 bits MAC value
30 */
31 uint32_t mac[4];
32 /*
33 * 128 bits Ctri value
34 */
35 uint32_t yi[4];
36 /*
37 * 128 bits Ctr0 value
38 */
39 uint32_t y0[4];
40 };
41
42 /*
43 * Update of the cipher operation of complete block except
44 * last block. Last block can be partial block.
45 *
46 * @caam_ctx AE Cipher context
47 * @encrypt Encrypt or decrypt direction
48 * @src Source data to encrypt/decrypt
49 * @dst [out] Destination data encrypted/decrypted
50 * @final Last block flag
51 */
caam_ae_do_block_gcm(struct caam_ae_ctx * caam_ctx,bool encrypt,struct caamdmaobj * src,struct caamdmaobj * dst,bool final)52 static bool caam_ae_do_block_gcm(struct caam_ae_ctx *caam_ctx, bool encrypt,
53 struct caamdmaobj *src, struct caamdmaobj *dst,
54 bool final)
55 {
56 /*
57 * CAAM Errata:
58 * When running GCM when the IV is not 12 bytes (96 bits),
59 * it is possible to "roll over" the 32-bit counter value.
60 * When this occurs (unless counter starts at -1),
61 * the AES operation will generate an incorrect MAC.
62 * This occurs even when -1 is used as the counter for the last block.
63 * The problem is that the 32-bit counter will overflow into the h
64 * value, thus corrupting the MAC.
65 * In order to reliably work around this issue,
66 * the AES operation must be stopped after initialization to
67 * determine the counter value to see whether/when it will roll over.
68 * Then, before the offending block is processed,
69 * the context needs to be saved. The one block gets processed twice :
70 * GCM, restore MAC, GMAC over its ciphertext,
71 * then patch up the message and AAD lengths, and carry on as normal.
72 */
73 uint32_t *desc = NULL;
74 struct gcm_caam_ctx_layout ctx = { };
75 size_t input_length = 0;
76 uint32_t processed_blocks = 0;
77 uint32_t counter_value = 0;
78 uint32_t corrupted_block_size = 0;
79 uint32_t yi_1s_complement = 0;
80 uint32_t remaining_len = 0;
81
82 if (!caam_ctx)
83 return false;
84
85 desc = caam_ctx->descriptor;
86
87 /*
88 * for encrypt:
89 * 1) Run GCM until we get to the block which will
90 * roll over the counter.
91 * 2) Save the current ghash value
92 * 3) Encrypt that one block (creating a bad hash value)
93 * 4) Restore the hash value
94 * 5) Save current AAD len
95 * 6) Run ciphertext of the block in as AAD
96 * 7) Restore the AAD len
97 * 8) Run GCM on the rest of the message
98 * 9) Compute and store the MAC/tag
99 *
100 * for decrypt:
101 * 1) Run GCM until we get to the block which will
102 * roll over the counter.
103 * 2) Save the current ghash value
104 * 3) Decrypt that one block (creating a bad hash value)
105 * 4) Restore the hash value
106 * 5) Save current AAD len
107 * 6) Run ciphertext of the block in as AAD
108 * 7) Restore the AAD len
109 * 8) Run GCM on the rest of the message
110 * 9) Compute and store the MAC/tag
111 */
112
113 if (!src || src->orig.length == 0 ||
114 caam_ctx->nonce.length == AES_GCM_DEFAULT_NONCE_LEN)
115 return false;
116
117 memcpy(&ctx, caam_ctx->ctx.data, sizeof(struct gcm_caam_ctx_layout));
118 processed_blocks = TEE_U32_FROM_BIG_ENDIAN(ctx.yi[3]);
119 input_length = src->orig.length;
120 counter_value = processed_blocks + ROUNDUP_DIV(input_length, 16);
121
122 /* check for overflow */
123 if (counter_value >= processed_blocks)
124 return false;
125
126 assert(dst);
127
128 yi_1s_complement = SHIFT_U32(UINT32_MAX - processed_blocks, 4);
129 if ((yi_1s_complement + TEE_AES_BLOCK_SIZE) > input_length)
130 corrupted_block_size = input_length - yi_1s_complement;
131 else
132 corrupted_block_size = TEE_AES_BLOCK_SIZE;
133 remaining_len = input_length - (yi_1s_complement +
134 corrupted_block_size);
135
136 caam_desc_seq_out(desc, dst);
137 caam_dmaobj_cache_push(dst);
138
139 caam_desc_seq_in(desc, src);
140 caam_dmaobj_cache_push(src);
141
142 /* operation: cls1-op aes gcm update enc/dec */
143 caam_desc_add_word(desc, CIPHER_UPDATE(caam_ctx->alg->type, encrypt));
144
145 caam_desc_add_word(desc, FIFO_LD_SEQ(MSG, 0) | FIFO_STORE_EXT |
146 CMD_CLASS(CLASS_1) |
147 FIFO_LOAD_ACTION(LAST_C1));
148 caam_desc_add_word(desc, yi_1s_complement);
149
150 caam_desc_add_word(desc, FIFO_ST_SEQ(MSG_DATA, 0) | FIFO_STORE_EXT);
151 caam_desc_add_word(desc, yi_1s_complement);
152
153 /* jump: class1-done all-match[] always-jump offset=[01] local->[15] */
154 caam_desc_add_word(desc,
155 JUMP_C1_LOCAL(ALL_COND_TRUE, JMP_COND(NONE), 1));
156
157 /*
158 * move: class1-ctx+0 -> math2, len=TEE_AES_BLOCK_SIZE wait
159 * Save the current ghash value
160 */
161 caam_desc_add_word(desc, MOVE_WAIT(C1_CTX_REG, MATH_REG2, 0,
162 TEE_AES_BLOCK_SIZE));
163
164 /*
165 * ld: ind-clrw len=4 offs=0 imm
166 * clrw: clr_c1mode clr_c1datas reset_cls1_done reset_cls1_cha
167 * clr_c2_ctx
168 */
169 caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_CLEAR_WRITTEN, 4));
170 caam_desc_add_word(desc, CLR_WR_RST_C1_MDE | CLR_WR_RST_C1_DSZ |
171 CLR_WR_RST_C1_CHA | CLR_WR_RST_C1_DNE |
172 CLR_WR_RST_C2_CTX);
173
174 /*
175 * Encrypt that one block (creating a bad hash value)
176 * operation: cls1-op aes gcm update enc/dec
177 */
178 caam_desc_add_word(desc, CIPHER_UPDATE(caam_ctx->alg->type, encrypt));
179
180 if (encrypt) {
181 /* seqfifold: class1 msg-last1 len=corrupted_Block_Size */
182 caam_desc_add_word(desc,
183 FIFO_LD_SEQ(MSG, corrupted_block_size) |
184 CMD_CLASS(CLASS_1) |
185 FIFO_LOAD_ACTION(LAST_C1));
186
187 /* move: ofifo -> class2-ctx+0, len=corrupted_Block_Size wait */
188 caam_desc_add_word(desc, MOVE_WAIT(OFIFO, C2_CTX_REG, 0,
189 corrupted_block_size));
190
191 /* seqstr: ccb2 ctx len=vseqoutsz offs=0 */
192 caam_desc_add_word(desc, ST_NOIMM_SEQ(CLASS_2, REG_CTX,
193 corrupted_block_size));
194 } else {
195 /* seqfifold: both msg-last2-last1 len=corrupted_Block_Size */
196 caam_desc_add_word(desc,
197 FIFO_LD_SEQ(MSG, corrupted_block_size) |
198 CMD_CLASS(CLASS_DECO) |
199 FIFO_LOAD_ACTION(LAST_C1) |
200 FIFO_LOAD_ACTION(LAST_C2));
201
202 /*
203 * move: class2-alnblk -> class2-ctx+0,
204 * len=corrupted_Block_Size (aux_ms)
205 */
206 caam_desc_add_word(desc, MOVE(DECO_ALIGN, C2_CTX_REG, 0,
207 corrupted_block_size) |
208 MOVE_AUX(0x2));
209
210 /* seqfifostr: msg len=vseqoutsz */
211 caam_desc_add_word(desc,
212 FIFO_ST_SEQ(MSG_DATA, corrupted_block_size));
213 }
214
215 /* jump: class1-done all-match[] always-jump offset=[01] local->[23] */
216 caam_desc_add_word(desc,
217 JUMP_C1_LOCAL(ALL_COND_TRUE, JMP_COND(NONE), 1));
218
219 /*
220 * Restore the hash value
221 * move: math2 -> class1-ctx+0, len=TEE_AES_BLOCK_SIZE wait
222 */
223 caam_desc_add_word(desc, MOVE_WAIT(MATH_REG2, C1_CTX_REG, 0,
224 TEE_AES_BLOCK_SIZE));
225
226 /*
227 * ld: ind-clrw len=4 offs=0 imm
228 * clrw: clr_c1mode clr_c1datas reset_cls1_done reset_cls1_cha
229 */
230 caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_CLEAR_WRITTEN, 4));
231 caam_desc_add_word(desc, CLR_WR_RST_C1_MDE | CLR_WR_RST_C1_DSZ |
232 CLR_WR_RST_C1_CHA | CLR_WR_RST_C1_DNE);
233
234 /*
235 * Save current AAD len
236 * move: class1-ctx+48 -> math2, len=8 wait
237 */
238 caam_desc_add_word(desc, MOVE_WAIT(C1_CTX_REG, MATH_REG2, 48, 8));
239
240 /*
241 * Run ciphertext of the block in as AAD
242 * move: class2-ctx+0 -> ififo, len=corrupted_Block_Size
243 */
244 caam_desc_add_word(desc,
245 MOVE(C2_CTX_REG, IFIFO, 0, corrupted_block_size));
246
247 /*
248 * ld: ind-nfsl len=4 offs=0 imm
249 * <nfifo_entry: ififo->class1 type=aad/pka1 lc1 len=16>
250 */
251 caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_NFIFO_n_SIZE,
252 sizeof(uint32_t)));
253 caam_desc_add_word(desc, NFIFO_NOPAD(C1, NFIFO_LC1, IFIFO, AAD,
254 corrupted_block_size));
255
256 /* operation: cls1-op aes gcm update enc/dec */
257 caam_desc_add_word(desc, CIPHER_UPDATE(caam_ctx->alg->type, encrypt));
258
259 /* jump: class1-done all-match[] always-jump offset=[01] local->[32] */
260 caam_desc_add_word(desc,
261 JUMP_C1_LOCAL(ALL_COND_TRUE, JMP_COND(NONE), 1));
262
263 /*
264 * Restore the AAD len
265 * move: math2 -> class1-ctx+48, len=8 wait
266 */
267 caam_desc_add_word(desc, MOVE_WAIT(MATH_REG2, C1_CTX_REG, 48, 8));
268
269 /*
270 * Run GCM on the rest of the message
271 * ld: ind-clrw len=4 offs=0 imm
272 * clrw: clr_c1mode clr_c1datas reset_cls1_done reset_cls1_cha
273 */
274 caam_desc_add_word(desc, LD_IMM(CLASS_NO, REG_CLEAR_WRITTEN, 4));
275 caam_desc_add_word(desc, CLR_WR_RST_C1_MDE | CLR_WR_RST_C1_DSZ |
276 CLR_WR_RST_C1_CHA | CLR_WR_RST_C1_DNE);
277
278 if (final)
279 caam_desc_add_word(desc,
280 CIPHER_FINAL(caam_ctx->alg->type, encrypt));
281 else
282 caam_desc_add_word(desc,
283 CIPHER_UPDATE(caam_ctx->alg->type, encrypt));
284
285 /* ptr incremented by max. 7 */
286 caam_desc_add_word(desc, FIFO_LD_SEQ(MSG, 0) | FIFO_STORE_EXT |
287 CMD_CLASS(CLASS_1) |
288 FIFO_LOAD_ACTION(LAST_C1));
289 caam_desc_add_word(desc, remaining_len);
290
291 caam_desc_add_word(desc, FIFO_ST_SEQ(MSG_DATA, 0) | FIFO_STORE_EXT);
292 caam_desc_add_word(desc, remaining_len);
293
294 return true;
295 }
296
caam_ae_initialize_gcm(struct drvcrypt_authenc_init * dinit)297 TEE_Result caam_ae_initialize_gcm(struct drvcrypt_authenc_init *dinit)
298 {
299 enum caam_status retstatus = CAAM_FAILURE;
300 struct caam_ae_ctx *caam_ctx = NULL;
301
302 if (!dinit || !dinit->ctx)
303 return TEE_ERROR_BAD_PARAMETERS;
304
305 caam_ctx = dinit->ctx;
306
307 if (dinit->nonce.data && dinit->nonce.length) {
308 retstatus = caam_cpy_buf(&caam_ctx->nonce, dinit->nonce.data,
309 dinit->nonce.length);
310 AE_TRACE("Copy Nonce returned 0x%" PRIx32, retstatus);
311 if (retstatus)
312 return caam_status_to_tee_result(retstatus);
313 }
314
315 caam_ctx->do_block = caam_ae_do_block_gcm;
316
317 /* Initialize the AAD buffer */
318 caam_ctx->buf_aad.max = dinit->aad_len;
319
320 return TEE_SUCCESS;
321 }
322
caam_ae_final_gcm(struct drvcrypt_authenc_final * dfinal)323 TEE_Result caam_ae_final_gcm(struct drvcrypt_authenc_final *dfinal)
324 {
325 TEE_Result ret = TEE_ERROR_GENERIC;
326 struct caam_ae_ctx *caam_ctx = NULL;
327
328 if (!dfinal)
329 return TEE_ERROR_BAD_PARAMETERS;
330
331 caam_ctx = dfinal->ctx;
332
333 ret = caam_ae_do_update(caam_ctx, &dfinal->src, &dfinal->dst, true);
334 if (ret)
335 return ret;
336
337 if (caam_ctx->tag_length) {
338 if (dfinal->tag.length < caam_ctx->tag_length)
339 return TEE_ERROR_BAD_PARAMETERS;
340
341 if (caam_ctx->encrypt) {
342 memcpy(dfinal->tag.data, caam_ctx->ctx.data,
343 caam_ctx->tag_length);
344 dfinal->tag.length = caam_ctx->tag_length;
345 } else {
346 if (consttime_memcmp(dfinal->tag.data,
347 caam_ctx->ctx.data,
348 caam_ctx->tag_length))
349 return TEE_ERROR_MAC_INVALID;
350 }
351 }
352
353 return TEE_SUCCESS;
354 }
355