1 /* 2 * Copyright (c) 2014, Linaro Limited 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions are met: 7 * 8 * 1. Redistributions of source code must retain the above copyright notice, 9 * this list of conditions and the following disclaimer. 10 * 11 * 2. Redistributions in binary form must reproduce the above copyright notice, 12 * this list of conditions and the following disclaimer in the documentation 13 * and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 25 * POSSIBILITY OF SUCH DAMAGE. 26 */ 27 28 #include <stdlib.h> 29 #include <string.h> 30 #include <string_ext.h> 31 #include <utee_defines.h> 32 #include <tee/tee_cryp_utl.h> 33 #include <tee/tee_cryp_provider.h> 34 #include <kernel/tee_time.h> 35 #include <rng_support.h> 36 #include <initcall.h> 37 38 #if !defined(CFG_WITH_SOFTWARE_PRNG) 39 TEE_Result get_rng_array(void *buffer, int len) 40 { 41 char *buf_char = buffer; 42 int i; 43 44 45 if (buf_char == NULL) 46 return TEE_ERROR_BAD_PARAMETERS; 47 48 for (i = 0; i < len; i++) 49 buf_char[i] = hw_get_random_byte(); 50 51 return TEE_SUCCESS; 52 } 53 #endif 54 55 TEE_Result tee_hash_get_digest_size(uint32_t algo, size_t *size) 56 { 57 switch (algo) { 58 case TEE_ALG_MD5: 59 case TEE_ALG_HMAC_MD5: 60 *size = TEE_MD5_HASH_SIZE; 61 break; 62 case TEE_ALG_SHA1: 63 case TEE_ALG_HMAC_SHA1: 64 case TEE_ALG_DSA_SHA1: 65 *size = TEE_SHA1_HASH_SIZE; 66 break; 67 case TEE_ALG_SHA224: 68 case TEE_ALG_HMAC_SHA224: 69 case TEE_ALG_DSA_SHA224: 70 *size = TEE_SHA224_HASH_SIZE; 71 break; 72 case TEE_ALG_SHA256: 73 case TEE_ALG_HMAC_SHA256: 74 case TEE_ALG_DSA_SHA256: 75 *size = TEE_SHA256_HASH_SIZE; 76 break; 77 case TEE_ALG_SHA384: 78 case TEE_ALG_HMAC_SHA384: 79 *size = TEE_SHA384_HASH_SIZE; 80 break; 81 case TEE_ALG_SHA512: 82 case TEE_ALG_HMAC_SHA512: 83 *size = TEE_SHA512_HASH_SIZE; 84 break; 85 default: 86 return TEE_ERROR_NOT_SUPPORTED; 87 } 88 89 return TEE_SUCCESS; 90 } 91 92 TEE_Result tee_hash_createdigest(uint32_t algo, const uint8_t *data, 93 size_t datalen, uint8_t *digest, 94 size_t digestlen) 95 { 96 TEE_Result res = TEE_ERROR_BAD_STATE; 97 void *ctx = NULL; 98 size_t ctxsize; 99 100 if (crypto_ops.hash.get_ctx_size == NULL || 101 crypto_ops.hash.init == NULL || 102 crypto_ops.hash.update == NULL || 103 crypto_ops.hash.final == NULL) 104 return TEE_ERROR_NOT_IMPLEMENTED; 105 106 if (crypto_ops.hash.get_ctx_size(algo, &ctxsize) != TEE_SUCCESS) { 107 res = TEE_ERROR_NOT_SUPPORTED; 108 goto out; 109 } 110 111 ctx = malloc(ctxsize); 112 if (ctx == NULL) { 113 res = TEE_ERROR_OUT_OF_MEMORY; 114 goto out; 115 } 116 117 if (crypto_ops.hash.init(ctx, algo) != TEE_SUCCESS) 118 goto out; 119 120 if (datalen != 0) { 121 if (crypto_ops.hash.update(ctx, algo, data, datalen) 122 != TEE_SUCCESS) 123 goto out; 124 } 125 126 if (crypto_ops.hash.final(ctx, algo, digest, digestlen) != TEE_SUCCESS) 127 goto out; 128 129 res = TEE_SUCCESS; 130 131 out: 132 if (ctx) 133 free(ctx); 134 135 return res; 136 } 137 138 TEE_Result tee_mac_get_digest_size(uint32_t algo, size_t *size) 139 { 140 switch (algo) { 141 case TEE_ALG_HMAC_MD5: 142 case TEE_ALG_HMAC_SHA224: 143 case TEE_ALG_HMAC_SHA1: 144 case TEE_ALG_HMAC_SHA256: 145 case TEE_ALG_HMAC_SHA384: 146 case TEE_ALG_HMAC_SHA512: 147 return tee_hash_get_digest_size(algo, size); 148 case TEE_ALG_AES_CBC_MAC_NOPAD: 149 case TEE_ALG_AES_CBC_MAC_PKCS5: 150 case TEE_ALG_AES_CMAC: 151 *size = TEE_AES_BLOCK_SIZE; 152 return TEE_SUCCESS; 153 case TEE_ALG_DES_CBC_MAC_NOPAD: 154 case TEE_ALG_DES_CBC_MAC_PKCS5: 155 case TEE_ALG_DES3_CBC_MAC_NOPAD: 156 case TEE_ALG_DES3_CBC_MAC_PKCS5: 157 *size = TEE_DES_BLOCK_SIZE; 158 return TEE_SUCCESS; 159 default: 160 return TEE_ERROR_NOT_SUPPORTED; 161 } 162 } 163 164 TEE_Result tee_cipher_get_block_size(uint32_t algo, size_t *size) 165 { 166 switch (algo) { 167 case TEE_ALG_AES_CBC_MAC_NOPAD: 168 case TEE_ALG_AES_CBC_MAC_PKCS5: 169 case TEE_ALG_AES_CMAC: 170 case TEE_ALG_AES_ECB_NOPAD: 171 case TEE_ALG_AES_CBC_NOPAD: 172 case TEE_ALG_AES_CTR: 173 case TEE_ALG_AES_CTS: 174 case TEE_ALG_AES_XTS: 175 case TEE_ALG_AES_CCM: 176 case TEE_ALG_AES_GCM: 177 *size = 16; 178 break; 179 180 case TEE_ALG_DES_CBC_MAC_NOPAD: 181 case TEE_ALG_DES_CBC_MAC_PKCS5: 182 case TEE_ALG_DES_ECB_NOPAD: 183 case TEE_ALG_DES_CBC_NOPAD: 184 case TEE_ALG_DES3_CBC_MAC_NOPAD: 185 case TEE_ALG_DES3_CBC_MAC_PKCS5: 186 case TEE_ALG_DES3_ECB_NOPAD: 187 case TEE_ALG_DES3_CBC_NOPAD: 188 *size = 8; 189 break; 190 191 default: 192 return TEE_ERROR_NOT_SUPPORTED; 193 } 194 195 return TEE_SUCCESS; 196 } 197 198 TEE_Result tee_do_cipher_update(void *ctx, uint32_t algo, 199 TEE_OperationMode mode, bool last_block, 200 const uint8_t *data, size_t len, uint8_t *dst) 201 { 202 TEE_Result res; 203 size_t block_size; 204 205 if (mode != TEE_MODE_ENCRYPT && mode != TEE_MODE_DECRYPT) 206 return TEE_ERROR_BAD_PARAMETERS; 207 208 if (crypto_ops.cipher.update == NULL) 209 return TEE_ERROR_NOT_IMPLEMENTED; 210 /* 211 * Check that the block contains the correct number of data, apart 212 * for the last block in some XTS / CTR / XTS mode 213 */ 214 res = tee_cipher_get_block_size(algo, &block_size); 215 if (res != TEE_SUCCESS) 216 return res; 217 if ((len % block_size) != 0) { 218 if (!last_block && algo != TEE_ALG_AES_CTR) 219 return TEE_ERROR_BAD_PARAMETERS; 220 221 switch (algo) { 222 case TEE_ALG_AES_ECB_NOPAD: 223 case TEE_ALG_DES_ECB_NOPAD: 224 case TEE_ALG_DES3_ECB_NOPAD: 225 case TEE_ALG_AES_CBC_NOPAD: 226 case TEE_ALG_DES_CBC_NOPAD: 227 case TEE_ALG_DES3_CBC_NOPAD: 228 return TEE_ERROR_BAD_PARAMETERS; 229 230 case TEE_ALG_AES_CTR: 231 case TEE_ALG_AES_XTS: 232 case TEE_ALG_AES_CTS: 233 /* 234 * These modes doesn't require padding for the last 235 * block. 236 * 237 * This isn't entirely true, both XTS and CTS can only 238 * encrypt minimum one block and also they need at least 239 * one complete block in the last update to finish the 240 * encryption. The algorithms are supposed to detect 241 * that, we're only making sure that all data fed up to 242 * that point consists of complete blocks. 243 */ 244 break; 245 246 default: 247 return TEE_ERROR_NOT_SUPPORTED; 248 } 249 } 250 251 return crypto_ops.cipher.update(ctx, algo, mode, last_block, data, len, 252 dst); 253 } 254 255 /* 256 * From http://en.wikipedia.org/wiki/Ciphertext_stealing 257 * CBC ciphertext stealing encryption using a standard 258 * CBC interface: 259 * 1. Pad the last partial plaintext block with 0. 260 * 2. Encrypt the whole padded plaintext using the 261 * standard CBC mode. 262 * 3. Swap the last two ciphertext blocks. 263 * 4. Truncate the ciphertext to the length of the 264 * original plaintext. 265 * 266 * CBC ciphertext stealing decryption using a standard 267 * CBC interface 268 * 1. Dn = Decrypt (K, Cn-1). Decrypt the second to last 269 * ciphertext block. 270 * 2. Cn = Cn || Tail (Dn, B-M). Pad the ciphertext to the 271 * nearest multiple of the block size using the last 272 * B-M bits of block cipher decryption of the 273 * second-to-last ciphertext block. 274 * 3. Swap the last two ciphertext blocks. 275 * 4. Decrypt the (modified) ciphertext using the standard 276 * CBC mode. 277 * 5. Truncate the plaintext to the length of the original 278 * ciphertext. 279 */ 280 TEE_Result tee_aes_cbc_cts_update(void *cbc_ctx, void *ecb_ctx, 281 TEE_OperationMode mode, bool last_block, 282 const uint8_t *data, size_t len, 283 uint8_t *dst) 284 { 285 TEE_Result res; 286 int nb_blocks, len_last_block, block_size = 16; 287 uint8_t tmp_block[64], tmp2_block[64]; 288 289 if (!last_block) 290 return tee_do_cipher_update(cbc_ctx, TEE_ALG_AES_CBC_NOPAD, 291 mode, last_block, data, len, dst); 292 293 /* Compute the last block length and check constraints */ 294 nb_blocks = ((len + block_size - 1) / block_size); 295 if (nb_blocks < 2) 296 return TEE_ERROR_BAD_STATE; 297 len_last_block = len % block_size; 298 if (len_last_block == 0) 299 len_last_block = block_size; 300 301 if (mode == TEE_MODE_ENCRYPT) { 302 memcpy(tmp_block, 303 data + ((nb_blocks - 1) * block_size), 304 len_last_block); 305 memset(tmp_block + len_last_block, 306 0, 307 block_size - len_last_block); 308 309 res = tee_do_cipher_update(cbc_ctx, TEE_ALG_AES_CBC_NOPAD, 310 mode, 0, data, 311 (nb_blocks - 1) * block_size, dst); 312 if (res != TEE_SUCCESS) 313 return res; 314 315 memcpy(dst + (nb_blocks - 1) * block_size, 316 dst + (nb_blocks - 2) * block_size, 317 len_last_block); 318 319 res = tee_do_cipher_update(cbc_ctx, TEE_ALG_AES_CBC_NOPAD, 320 mode, 0, tmp_block, block_size, 321 dst + (nb_blocks - 2) * block_size); 322 if (res != TEE_SUCCESS) 323 return res; 324 } else { 325 /* 1. Decrypt the second to last ciphertext block */ 326 res = tee_do_cipher_update(ecb_ctx, TEE_ALG_AES_ECB_NOPAD, 327 mode, 0, 328 data + (nb_blocks - 2) * block_size, 329 block_size, tmp2_block); 330 if (res != TEE_SUCCESS) 331 return res; 332 333 /* 2. Cn = Cn || Tail (Dn, B-M) */ 334 memcpy(tmp_block, data + ((nb_blocks - 1) * block_size), 335 len_last_block); 336 memcpy(tmp_block + len_last_block, tmp2_block + len_last_block, 337 block_size - len_last_block); 338 339 /* 3. Swap the last two ciphertext blocks */ 340 /* done by passing the correct buffers in step 4. */ 341 342 /* 4. Decrypt the (modified) ciphertext */ 343 if (nb_blocks > 2) { 344 res = tee_do_cipher_update(cbc_ctx, 345 TEE_ALG_AES_CBC_NOPAD, mode, 346 0, data, 347 (nb_blocks - 2) * 348 block_size, dst); 349 if (res != TEE_SUCCESS) 350 return res; 351 } 352 353 res = tee_do_cipher_update(cbc_ctx, TEE_ALG_AES_CBC_NOPAD, 354 mode, 0, tmp_block, block_size, 355 dst + 356 ((nb_blocks - 2) * block_size)); 357 if (res != TEE_SUCCESS) 358 return res; 359 360 res = tee_do_cipher_update(cbc_ctx, TEE_ALG_AES_CBC_NOPAD, 361 mode, 0, data + 362 ((nb_blocks - 2) * block_size), 363 block_size, tmp_block); 364 if (res != TEE_SUCCESS) 365 return res; 366 367 /* 5. Truncate the plaintext */ 368 memcpy(dst + (nb_blocks - 1) * block_size, tmp_block, 369 len_last_block); 370 } 371 return TEE_SUCCESS; 372 } 373 374 TEE_Result tee_prng_add_entropy(const uint8_t *in, size_t len) 375 { 376 return crypto_rng_add_entropy(in, len); 377 } 378 379 /* 380 * Override this in your platform code to feed the PRNG platform-specific 381 * jitter entropy. This implementation does not efficiently deliver entropy 382 * and is here for backwards-compatibility. 383 */ 384 __weak void plat_prng_add_jitter_entropy(void) 385 { 386 TEE_Time current; 387 388 if (tee_time_get_sys_time(¤t) == TEE_SUCCESS) 389 tee_prng_add_entropy((uint8_t *)¤t, sizeof(current)); 390 } 391 392 __weak void plat_prng_add_jitter_entropy_norpc(void) 393 { 394 #ifndef CFG_SECURE_TIME_SOURCE_REE 395 plat_prng_add_jitter_entropy(); 396 #endif 397 } 398 399 static TEE_Result tee_cryp_init(void) 400 { 401 if (crypto_ops.init) 402 return crypto_ops.init(); 403 404 return TEE_SUCCESS; 405 } 406 407 service_init(tee_cryp_init); 408