source: azure_iot_hub_f767zi/trunk/wolfssl-4.4.0/wolfcrypt/src/sha256.c@ 457

Last change on this file since 457 was 457, checked in by coas-nagasima, 4 years ago

ファイルを追加

  • Property svn:eol-style set to native
  • Property svn:mime-type set to text/x-csrc;charset=UTF-8
File size: 48.1 KB
Line 
1/* sha256.c
2 *
3 * Copyright (C) 2006-2020 wolfSSL Inc.
4 *
5 * This file is part of wolfSSL.
6 *
7 * wolfSSL is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * wolfSSL is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with this program; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
20 */
21
22#ifdef HAVE_CONFIG_H
23 #include <config.h>
24#endif
25
26#include <wolfssl/wolfcrypt/settings.h>
27
28/*
29 * SHA256 Build Options:
30 * USE_SLOW_SHA256: Reduces code size by not partially unrolling
31 (~2KB smaller and ~25% slower) (default OFF)
32 * WOLFSSL_SHA256_BY_SPEC: Uses the Ch/Maj based on SHA256 specification
33 (default ON)
34 * WOLFSSL_SHA256_ALT_CH_MAJ: Alternate Ch/Maj that is easier for compilers to
35 optimize and recognize as SHA256 (default OFF)
36 * SHA256_MANY_REGISTERS: A SHA256 version that keeps all data in registers
37 and partial unrolled (default OFF)
38 */
39
40/* Default SHA256 to use Ch/Maj based on specification */
41#if !defined(WOLFSSL_SHA256_BY_SPEC) && !defined(WOLFSSL_SHA256_ALT_CH_MAJ)
42 #define WOLFSSL_SHA256_BY_SPEC
43#endif
44
45
46#if !defined(NO_SHA256) && !defined(WOLFSSL_ARMASM)
47
48#if defined(HAVE_FIPS) && \
49 defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
50
51 /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
52 #define FIPS_NO_WRAPPERS
53
54 #ifdef USE_WINDOWS_API
55 #pragma code_seg(".fipsA$d")
56 #pragma const_seg(".fipsB$d")
57 #endif
58#endif
59
60#include <wolfssl/wolfcrypt/sha256.h>
61#include <wolfssl/wolfcrypt/error-crypt.h>
62#include <wolfssl/wolfcrypt/cpuid.h>
63#include <wolfssl/wolfcrypt/hash.h>
64
65#ifdef WOLF_CRYPTO_CB
66 #include <wolfssl/wolfcrypt/cryptocb.h>
67#endif
68
69/* fips wrapper calls, user can call direct */
70#if defined(HAVE_FIPS) && \
71 (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 2))
72
73 int wc_InitSha256(wc_Sha256* sha)
74 {
75 if (sha == NULL) {
76 return BAD_FUNC_ARG;
77 }
78 return InitSha256_fips(sha);
79 }
80 int wc_InitSha256_ex(wc_Sha256* sha, void* heap, int devId)
81 {
82 (void)heap;
83 (void)devId;
84 if (sha == NULL) {
85 return BAD_FUNC_ARG;
86 }
87 return InitSha256_fips(sha);
88 }
89 int wc_Sha256Update(wc_Sha256* sha, const byte* data, word32 len)
90 {
91 if (sha == NULL || (data == NULL && len > 0)) {
92 return BAD_FUNC_ARG;
93 }
94
95 if (data == NULL && len == 0) {
96 /* valid, but do nothing */
97 return 0;
98 }
99
100 return Sha256Update_fips(sha, data, len);
101 }
102 int wc_Sha256Final(wc_Sha256* sha, byte* out)
103 {
104 if (sha == NULL || out == NULL) {
105 return BAD_FUNC_ARG;
106 }
107 return Sha256Final_fips(sha, out);
108 }
109 void wc_Sha256Free(wc_Sha256* sha)
110 {
111 (void)sha;
112 /* Not supported in FIPS */
113 }
114
115#else /* else build without fips, or for FIPS v2 */
116
117
118#if defined(WOLFSSL_TI_HASH)
119 /* #include <wolfcrypt/src/port/ti/ti-hash.c> included by wc_port.c */
120#elif defined(WOLFSSL_CRYPTOCELL)
121 /* wc_port.c includes wolfcrypt/src/port/arm/cryptoCellHash.c */
122#else
123
124#include <wolfssl/wolfcrypt/logging.h>
125
126#ifdef NO_INLINE
127 #include <wolfssl/wolfcrypt/misc.h>
128#else
129 #define WOLFSSL_MISC_INCLUDED
130 #include <wolfcrypt/src/misc.c>
131#endif
132
133#ifdef WOLFSSL_DEVCRYPTO_HASH
134 #include <wolfssl/wolfcrypt/port/devcrypto/wc_devcrypto.h>
135#endif
136
137
138
139#if defined(USE_INTEL_SPEEDUP)
140 #if defined(__GNUC__) && ((__GNUC__ < 4) || \
141 (__GNUC__ == 4 && __GNUC_MINOR__ <= 8))
142 #undef NO_AVX2_SUPPORT
143 #define NO_AVX2_SUPPORT
144 #endif
145 #if defined(__clang__) && ((__clang_major__ < 3) || \
146 (__clang_major__ == 3 && __clang_minor__ <= 5))
147 #define NO_AVX2_SUPPORT
148 #elif defined(__clang__) && defined(NO_AVX2_SUPPORT)
149 #undef NO_AVX2_SUPPORT
150 #endif
151
152 #define HAVE_INTEL_AVX1
153 #ifndef NO_AVX2_SUPPORT
154 #define HAVE_INTEL_AVX2
155 #endif
156#endif /* USE_INTEL_SPEEDUP */
157
158#if defined(HAVE_INTEL_AVX2)
159 #define HAVE_INTEL_RORX
160#endif
161
162
163#if !defined(WOLFSSL_PIC32MZ_HASH) && !defined(STM32_HASH_SHA2) && \
164 (!defined(WOLFSSL_IMX6_CAAM) || defined(NO_IMX6_CAAM_HASH)) && \
165 !defined(WOLFSSL_AFALG_HASH) && !defined(WOLFSSL_DEVCRYPTO_HASH) && \
166 (!defined(WOLFSSL_ESP32WROOM32_CRYPT) || defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)) && \
167 (!defined(WOLFSSL_RENESAS_TSIP_CRYPT) || defined(NO_WOLFSSL_RENESAS_TSIP_HASH))
168
169static int InitSha256(wc_Sha256* sha256)
170{
171 int ret = 0;
172
173 if (sha256 == NULL)
174 return BAD_FUNC_ARG;
175
176 XMEMSET(sha256->digest, 0, sizeof(sha256->digest));
177 sha256->digest[0] = 0x6A09E667L;
178 sha256->digest[1] = 0xBB67AE85L;
179 sha256->digest[2] = 0x3C6EF372L;
180 sha256->digest[3] = 0xA54FF53AL;
181 sha256->digest[4] = 0x510E527FL;
182 sha256->digest[5] = 0x9B05688CL;
183 sha256->digest[6] = 0x1F83D9ABL;
184 sha256->digest[7] = 0x5BE0CD19L;
185
186 sha256->buffLen = 0;
187 sha256->loLen = 0;
188 sha256->hiLen = 0;
189#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
190 sha256->flags = 0;
191#endif
192
193 return ret;
194}
195#endif
196
197
198/* Hardware Acceleration */
199#if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
200
201 /* in case intel instructions aren't available, plus we need the K[] global */
202 #define NEED_SOFT_SHA256
203
204 /*****
205 Intel AVX1/AVX2 Macro Control Structure
206
207 #define HAVE_INTEL_AVX1
208 #define HAVE_INTEL_AVX2
209
210 #define HAVE_INTEL_RORX
211
212
213 int InitSha256(wc_Sha256* sha256) {
214 Save/Recover XMM, YMM
215 ...
216 }
217
218 #if defined(HAVE_INTEL_AVX1)|| defined(HAVE_INTEL_AVX2)
219 Transform_Sha256(); Function prototype
220 #else
221 Transform_Sha256() { }
222 int Sha256Final() {
223 Save/Recover XMM, YMM
224 ...
225 }
226 #endif
227
228 #if defined(HAVE_INTEL_AVX1)|| defined(HAVE_INTEL_AVX2)
229 #if defined(HAVE_INTEL_RORX
230 #define RND with rorx instruction
231 #else
232 #define RND
233 #endif
234 #endif
235
236 #if defined(HAVE_INTEL_AVX1)
237
238 #define XMM Instructions/inline asm
239
240 int Transform_Sha256() {
241 Stitched Message Sched/Round
242 }
243
244 #elif defined(HAVE_INTEL_AVX2)
245
246 #define YMM Instructions/inline asm
247
248 int Transform_Sha256() {
249 More granular Stitched Message Sched/Round
250 }
251
252 #endif
253
254 */
255
256 /* Each platform needs to query info type 1 from cpuid to see if aesni is
257 * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
258 */
259
260 /* #if defined(HAVE_INTEL_AVX1/2) at the tail of sha256 */
261 static int Transform_Sha256(wc_Sha256* sha256, const byte* data);
262
263#ifdef __cplusplus
264 extern "C" {
265#endif
266
267 #if defined(HAVE_INTEL_AVX1)
268 extern int Transform_Sha256_AVX1(wc_Sha256 *sha256, const byte* data);
269 extern int Transform_Sha256_AVX1_Len(wc_Sha256* sha256,
270 const byte* data, word32 len);
271 #endif
272 #if defined(HAVE_INTEL_AVX2)
273 extern int Transform_Sha256_AVX2(wc_Sha256 *sha256, const byte* data);
274 extern int Transform_Sha256_AVX2_Len(wc_Sha256* sha256,
275 const byte* data, word32 len);
276 #ifdef HAVE_INTEL_RORX
277 extern int Transform_Sha256_AVX1_RORX(wc_Sha256 *sha256, const byte* data);
278 extern int Transform_Sha256_AVX1_RORX_Len(wc_Sha256* sha256,
279 const byte* data, word32 len);
280 extern int Transform_Sha256_AVX2_RORX(wc_Sha256 *sha256, const byte* data);
281 extern int Transform_Sha256_AVX2_RORX_Len(wc_Sha256* sha256,
282 const byte* data, word32 len);
283 #endif /* HAVE_INTEL_RORX */
284 #endif /* HAVE_INTEL_AVX2 */
285
286#ifdef __cplusplus
287 } /* extern "C" */
288#endif
289
290 static int (*Transform_Sha256_p)(wc_Sha256* sha256, const byte* data);
291 /* = _Transform_Sha256 */
292 static int (*Transform_Sha256_Len_p)(wc_Sha256* sha256, const byte* data,
293 word32 len);
294 /* = NULL */
295 static int transform_check = 0;
296 static word32 intel_flags;
297
298 #define XTRANSFORM(S, D) (*Transform_Sha256_p)((S),(D))
299 #define XTRANSFORM_LEN(S, D, L) (*Transform_Sha256_Len_p)((S),(D),(L))
300
301 static void Sha256_SetTransform(void)
302 {
303
304 if (transform_check)
305 return;
306
307 intel_flags = cpuid_get_flags();
308
309 #ifdef HAVE_INTEL_AVX2
310 if (1 && IS_INTEL_AVX2(intel_flags)) {
311 #ifdef HAVE_INTEL_RORX
312 if (IS_INTEL_BMI2(intel_flags)) {
313 Transform_Sha256_p = Transform_Sha256_AVX2_RORX;
314 Transform_Sha256_Len_p = Transform_Sha256_AVX2_RORX_Len;
315 }
316 else
317 #endif
318 if (1)
319 {
320 Transform_Sha256_p = Transform_Sha256_AVX2;
321 Transform_Sha256_Len_p = Transform_Sha256_AVX2_Len;
322 }
323 #ifdef HAVE_INTEL_RORX
324 else {
325 Transform_Sha256_p = Transform_Sha256_AVX1_RORX;
326 Transform_Sha256_Len_p = Transform_Sha256_AVX1_RORX_Len;
327 }
328 #endif
329 }
330 else
331 #endif
332 #ifdef HAVE_INTEL_AVX1
333 if (IS_INTEL_AVX1(intel_flags)) {
334 Transform_Sha256_p = Transform_Sha256_AVX1;
335 Transform_Sha256_Len_p = Transform_Sha256_AVX1_Len;
336 }
337 else
338 #endif
339 {
340 Transform_Sha256_p = Transform_Sha256;
341 Transform_Sha256_Len_p = NULL;
342 }
343
344 transform_check = 1;
345 }
346
347 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
348 {
349 int ret = 0;
350 if (sha256 == NULL)
351 return BAD_FUNC_ARG;
352
353 sha256->heap = heap;
354 #ifdef WOLF_CRYPTO_CB
355 sha256->devId = devId;
356 #endif
357
358 ret = InitSha256(sha256);
359 if (ret != 0)
360 return ret;
361
362 /* choose best Transform function under this runtime environment */
363 Sha256_SetTransform();
364
365 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
366 ret = wolfAsync_DevCtxInit(&sha256->asyncDev,
367 WOLFSSL_ASYNC_MARKER_SHA256, sha256->heap, devId);
368 #else
369 (void)devId;
370 #endif /* WOLFSSL_ASYNC_CRYPT */
371
372 return ret;
373 }
374
375#elif defined(FREESCALE_LTC_SHA)
376 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
377 {
378 (void)heap;
379 (void)devId;
380
381 LTC_HASH_Init(LTC_BASE, &sha256->ctx, kLTC_Sha256, NULL, 0);
382
383 return 0;
384 }
385
386#elif defined(FREESCALE_MMCAU_SHA)
387
388 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
389 #include "cau_api.h"
390 #else
391 #include "fsl_mmcau.h"
392 #endif
393
394 #define XTRANSFORM(S, D) Transform_Sha256((S),(D))
395 #define XTRANSFORM_LEN(S, D, L) Transform_Sha256_Len((S),(D),(L))
396
397 #ifndef WC_HASH_DATA_ALIGNMENT
398 /* these hardware API's require 4 byte (word32) alignment */
399 #define WC_HASH_DATA_ALIGNMENT 4
400 #endif
401
402 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
403 {
404 int ret = 0;
405
406 (void)heap;
407 (void)devId;
408
409 ret = wolfSSL_CryptHwMutexLock();
410 if (ret != 0) {
411 return ret;
412 }
413
414 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
415 cau_sha256_initialize_output(sha256->digest);
416 #else
417 MMCAU_SHA256_InitializeOutput((uint32_t*)sha256->digest);
418 #endif
419 wolfSSL_CryptHwMutexUnLock();
420
421 sha256->buffLen = 0;
422 sha256->loLen = 0;
423 sha256->hiLen = 0;
424 #ifdef WOLFSSL_SMALL_STACK_CACHE
425 sha256->W = NULL;
426 #endif
427
428 return ret;
429 }
430
431 static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
432 {
433 int ret = wolfSSL_CryptHwMutexLock();
434 if (ret == 0) {
435 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
436 cau_sha256_hash_n((byte*)data, 1, sha256->digest);
437 #else
438 MMCAU_SHA256_HashN((byte*)data, 1, sha256->digest);
439 #endif
440 wolfSSL_CryptHwMutexUnLock();
441 }
442 return ret;
443 }
444
445 static int Transform_Sha256_Len(wc_Sha256* sha256, const byte* data,
446 word32 len)
447 {
448 int ret = wolfSSL_CryptHwMutexLock();
449 if (ret == 0) {
450 #if defined(WC_HASH_DATA_ALIGNMENT) && WC_HASH_DATA_ALIGNMENT > 0
451 if ((size_t)data % WC_HASH_DATA_ALIGNMENT) {
452 /* data pointer is NOT aligned,
453 * so copy and perform one block at a time */
454 byte* local = (byte*)sha256->buffer;
455 while (len >= WC_SHA256_BLOCK_SIZE) {
456 XMEMCPY(local, data, WC_SHA256_BLOCK_SIZE);
457 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
458 cau_sha256_hash_n(local, 1, sha256->digest);
459 #else
460 MMCAU_SHA256_HashN(local, 1, sha256->digest);
461 #endif
462 data += WC_SHA256_BLOCK_SIZE;
463 len -= WC_SHA256_BLOCK_SIZE;
464 }
465 }
466 else
467 #endif
468 {
469 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
470 cau_sha256_hash_n((byte*)data, len/WC_SHA256_BLOCK_SIZE,
471 sha256->digest);
472 #else
473 MMCAU_SHA256_HashN((byte*)data, len/WC_SHA256_BLOCK_SIZE,
474 sha256->digest);
475 #endif
476 }
477 wolfSSL_CryptHwMutexUnLock();
478 }
479 return ret;
480 }
481
482#elif defined(WOLFSSL_PIC32MZ_HASH)
483 #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
484
485#elif defined(STM32_HASH_SHA2)
486
487 /* Supports CubeMX HAL or Standard Peripheral Library */
488
489 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
490 {
491 if (sha256 == NULL)
492 return BAD_FUNC_ARG;
493
494 (void)devId;
495 (void)heap;
496
497 wc_Stm32_Hash_Init(&sha256->stmCtx);
498 return 0;
499 }
500
501 int wc_Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
502 {
503 int ret = 0;
504
505 if (sha256 == NULL || (data == NULL && len > 0)) {
506 return BAD_FUNC_ARG;
507 }
508
509 ret = wolfSSL_CryptHwMutexLock();
510 if (ret == 0) {
511 ret = wc_Stm32_Hash_Update(&sha256->stmCtx,
512 HASH_AlgoSelection_SHA256, data, len);
513 wolfSSL_CryptHwMutexUnLock();
514 }
515 return ret;
516 }
517
518 int wc_Sha256Final(wc_Sha256* sha256, byte* hash)
519 {
520 int ret = 0;
521
522 if (sha256 == NULL || hash == NULL) {
523 return BAD_FUNC_ARG;
524 }
525
526 ret = wolfSSL_CryptHwMutexLock();
527 if (ret == 0) {
528 ret = wc_Stm32_Hash_Final(&sha256->stmCtx,
529 HASH_AlgoSelection_SHA256, hash, WC_SHA256_DIGEST_SIZE);
530 wolfSSL_CryptHwMutexUnLock();
531 }
532
533 (void)wc_InitSha256(sha256); /* reset state */
534
535 return ret;
536 }
537
538#elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_HASH)
539 /* functions defined in wolfcrypt/src/port/caam/caam_sha256.c */
540
541#elif defined(WOLFSSL_AFALG_HASH)
542 /* implemented in wolfcrypt/src/port/af_alg/afalg_hash.c */
543
544#elif defined(WOLFSSL_DEVCRYPTO_HASH)
545 /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
546
547#elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_HASH)
548 #include "hal_data.h"
549
550 #ifndef WOLFSSL_SCE_SHA256_HANDLE
551 #define WOLFSSL_SCE_SHA256_HANDLE g_sce_hash_0
552 #endif
553
554 #define WC_SHA256_DIGEST_WORD_SIZE 16
555 #define XTRANSFORM(S, D) wc_Sha256SCE_XTRANSFORM((S), (D))
556 static int wc_Sha256SCE_XTRANSFORM(wc_Sha256* sha256, const byte* data)
557 {
558 if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
559 CRYPTO_WORD_ENDIAN_LITTLE)
560 {
561 ByteReverseWords((word32*)data, (word32*)data,
562 WC_SHA256_BLOCK_SIZE);
563 ByteReverseWords(sha256->digest, sha256->digest,
564 WC_SHA256_DIGEST_SIZE);
565 }
566
567 if (WOLFSSL_SCE_SHA256_HANDLE.p_api->hashUpdate(
568 WOLFSSL_SCE_SHA256_HANDLE.p_ctrl, (word32*)data,
569 WC_SHA256_DIGEST_WORD_SIZE, sha256->digest) != SSP_SUCCESS){
570 WOLFSSL_MSG("Unexpected hardware return value");
571 return WC_HW_E;
572 }
573
574 if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
575 CRYPTO_WORD_ENDIAN_LITTLE)
576 {
577 ByteReverseWords((word32*)data, (word32*)data,
578 WC_SHA256_BLOCK_SIZE);
579 ByteReverseWords(sha256->digest, sha256->digest,
580 WC_SHA256_DIGEST_SIZE);
581 }
582
583 return 0;
584 }
585
586
587 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
588 {
589 int ret = 0;
590 if (sha256 == NULL)
591 return BAD_FUNC_ARG;
592
593 sha256->heap = heap;
594
595 ret = InitSha256(sha256);
596 if (ret != 0)
597 return ret;
598
599 (void)devId;
600
601 return ret;
602 }
603
604#elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
605 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
606
607 #define NEED_SOFT_SHA256
608
609 static int InitSha256(wc_Sha256* sha256)
610 {
611 int ret = 0;
612
613 if (sha256 == NULL)
614 return BAD_FUNC_ARG;
615
616 XMEMSET(sha256->digest, 0, sizeof(sha256->digest));
617 sha256->digest[0] = 0x6A09E667L;
618 sha256->digest[1] = 0xBB67AE85L;
619 sha256->digest[2] = 0x3C6EF372L;
620 sha256->digest[3] = 0xA54FF53AL;
621 sha256->digest[4] = 0x510E527FL;
622 sha256->digest[5] = 0x9B05688CL;
623 sha256->digest[6] = 0x1F83D9ABL;
624 sha256->digest[7] = 0x5BE0CD19L;
625
626 sha256->buffLen = 0;
627 sha256->loLen = 0;
628 sha256->hiLen = 0;
629
630 /* always start firstblock = 1 when using hw engine */
631 sha256->ctx.isfirstblock = 1;
632 sha256->ctx.sha_type = SHA2_256;
633 if(sha256->ctx.mode == ESP32_SHA_HW) {
634 /* release hw */
635 esp_sha_hw_unlock();
636 }
637 /* always set mode as INIT
638 * whether using HW or SW is determined at first call of update()
639 */
640 sha256->ctx.mode = ESP32_SHA_INIT;
641
642 return ret;
643 }
644 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
645 {
646 int ret = 0;
647
648 if (sha256 == NULL)
649 return BAD_FUNC_ARG;
650
651 XMEMSET(sha256, 0, sizeof(wc_Sha256));
652 sha256->ctx.mode = ESP32_SHA_INIT;
653 sha256->ctx.isfirstblock = 1;
654 (void)devId;
655
656 ret = InitSha256(sha256);
657
658 return ret;
659 }
660
661#elif defined(WOLFSSL_RENESAS_TSIP_CRYPT) && \
662 !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)
663
664 /* implemented in wolfcrypt/src/port/Renesas/renesas_tsip_sha.c */
665
666#else
667 #define NEED_SOFT_SHA256
668
669 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
670 {
671 int ret = 0;
672 if (sha256 == NULL)
673 return BAD_FUNC_ARG;
674
675 sha256->heap = heap;
676 #ifdef WOLF_CRYPTO_CB
677 sha256->devId = devId;
678 sha256->devCtx = NULL;
679 #endif
680
681 ret = InitSha256(sha256);
682 if (ret != 0)
683 return ret;
684
685 #ifdef WOLFSSL_SMALL_STACK_CACHE
686 sha256->W = NULL;
687 #endif
688
689 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
690 ret = wolfAsync_DevCtxInit(&sha256->asyncDev,
691 WOLFSSL_ASYNC_MARKER_SHA256, sha256->heap, devId);
692 #else
693 (void)devId;
694 #endif /* WOLFSSL_ASYNC_CRYPT */
695
696 return ret;
697 }
698#endif /* End Hardware Acceleration */
699
700#ifdef NEED_SOFT_SHA256
701
702 static const ALIGN32 word32 K[64] = {
703 0x428A2F98L, 0x71374491L, 0xB5C0FBCFL, 0xE9B5DBA5L, 0x3956C25BL,
704 0x59F111F1L, 0x923F82A4L, 0xAB1C5ED5L, 0xD807AA98L, 0x12835B01L,
705 0x243185BEL, 0x550C7DC3L, 0x72BE5D74L, 0x80DEB1FEL, 0x9BDC06A7L,
706 0xC19BF174L, 0xE49B69C1L, 0xEFBE4786L, 0x0FC19DC6L, 0x240CA1CCL,
707 0x2DE92C6FL, 0x4A7484AAL, 0x5CB0A9DCL, 0x76F988DAL, 0x983E5152L,
708 0xA831C66DL, 0xB00327C8L, 0xBF597FC7L, 0xC6E00BF3L, 0xD5A79147L,
709 0x06CA6351L, 0x14292967L, 0x27B70A85L, 0x2E1B2138L, 0x4D2C6DFCL,
710 0x53380D13L, 0x650A7354L, 0x766A0ABBL, 0x81C2C92EL, 0x92722C85L,
711 0xA2BFE8A1L, 0xA81A664BL, 0xC24B8B70L, 0xC76C51A3L, 0xD192E819L,
712 0xD6990624L, 0xF40E3585L, 0x106AA070L, 0x19A4C116L, 0x1E376C08L,
713 0x2748774CL, 0x34B0BCB5L, 0x391C0CB3L, 0x4ED8AA4AL, 0x5B9CCA4FL,
714 0x682E6FF3L, 0x748F82EEL, 0x78A5636FL, 0x84C87814L, 0x8CC70208L,
715 0x90BEFFFAL, 0xA4506CEBL, 0xBEF9A3F7L, 0xC67178F2L
716 };
717
718/* Both versions of Ch and Maj are logically the same, but with the second set
719 the compilers can recognize them better for optimization */
720#ifdef WOLFSSL_SHA256_BY_SPEC
721 /* SHA256 math based on specification */
722 #define Ch(x,y,z) ((z) ^ ((x) & ((y) ^ (z))))
723 #define Maj(x,y,z) ((((x) | (y)) & (z)) | ((x) & (y)))
724#else
725 /* SHA256 math reworked for easier compiler optimization */
726 #define Ch(x,y,z) ((((y) ^ (z)) & (x)) ^ (z))
727 #define Maj(x,y,z) ((((x) ^ (y)) & ((y) ^ (z))) ^ (y))
728#endif
729 #define R(x, n) (((x) & 0xFFFFFFFFU) >> (n))
730
731 #define S(x, n) rotrFixed(x, n)
732 #define Sigma0(x) (S(x, 2) ^ S(x, 13) ^ S(x, 22))
733 #define Sigma1(x) (S(x, 6) ^ S(x, 11) ^ S(x, 25))
734 #define Gamma0(x) (S(x, 7) ^ S(x, 18) ^ R(x, 3))
735 #define Gamma1(x) (S(x, 17) ^ S(x, 19) ^ R(x, 10))
736
737 #define a(i) S[(0-i) & 7]
738 #define b(i) S[(1-i) & 7]
739 #define c(i) S[(2-i) & 7]
740 #define d(i) S[(3-i) & 7]
741 #define e(i) S[(4-i) & 7]
742 #define f(i) S[(5-i) & 7]
743 #define g(i) S[(6-i) & 7]
744 #define h(i) S[(7-i) & 7]
745
746 #ifndef XTRANSFORM
747 #define XTRANSFORM(S, D) Transform_Sha256((S),(D))
748 #endif
749
750#ifndef SHA256_MANY_REGISTERS
751 #define RND(j) \
752 t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + W[i+j]; \
753 t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
754 d(j) += t0; \
755 h(j) = t0 + t1
756
757 static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
758 {
759 word32 S[8], t0, t1;
760 int i;
761
762 #ifdef WOLFSSL_SMALL_STACK_CACHE
763 word32* W = sha256->W;
764 if (W == NULL) {
765 W = (word32*)XMALLOC(sizeof(word32) * WC_SHA256_BLOCK_SIZE, NULL,
766 DYNAMIC_TYPE_DIGEST);
767 if (W == NULL)
768 return MEMORY_E;
769 sha256->W = W;
770 }
771 #elif defined(WOLFSSL_SMALL_STACK)
772 word32* W;
773 W = (word32*)XMALLOC(sizeof(word32) * WC_SHA256_BLOCK_SIZE, NULL,
774 DYNAMIC_TYPE_TMP_BUFFER);
775 if (W == NULL)
776 return MEMORY_E;
777 #else
778 word32 W[WC_SHA256_BLOCK_SIZE];
779 #endif
780
781 /* Copy context->state[] to working vars */
782 for (i = 0; i < 8; i++)
783 S[i] = sha256->digest[i];
784
785 for (i = 0; i < 16; i++)
786 W[i] = *((word32*)&data[i*sizeof(word32)]);
787
788 for (i = 16; i < WC_SHA256_BLOCK_SIZE; i++)
789 W[i] = Gamma1(W[i-2]) + W[i-7] + Gamma0(W[i-15]) + W[i-16];
790
791 #ifdef USE_SLOW_SHA256
792 /* not unrolled - ~2k smaller and ~25% slower */
793 for (i = 0; i < WC_SHA256_BLOCK_SIZE; i += 8) {
794 int j;
795 for (j = 0; j < 8; j++) { /* braces needed here for macros {} */
796 RND(j);
797 }
798 }
799 #else
800 /* partially loop unrolled */
801 for (i = 0; i < WC_SHA256_BLOCK_SIZE; i += 8) {
802 RND(0); RND(1); RND(2); RND(3);
803 RND(4); RND(5); RND(6); RND(7);
804 }
805 #endif /* USE_SLOW_SHA256 */
806
807 /* Add the working vars back into digest state[] */
808 for (i = 0; i < 8; i++) {
809 sha256->digest[i] += S[i];
810 }
811
812 #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_SMALL_STACK_CACHE)
813 XFREE(W, NULL, DYNAMIC_TYPE_TMP_BUFFER);
814 #endif
815 return 0;
816 }
817#else
818 /* SHA256 version that keeps all data in registers */
819 #define SCHED1(j) (W[j] = *((word32*)&data[j*sizeof(word32)]))
820 #define SCHED(j) ( \
821 W[ j & 15] += \
822 Gamma1(W[(j-2) & 15])+ \
823 W[(j-7) & 15] + \
824 Gamma0(W[(j-15) & 15]) \
825 )
826
827 #define RND1(j) \
828 t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + SCHED1(j); \
829 t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
830 d(j) += t0; \
831 h(j) = t0 + t1
832 #define RNDN(j) \
833 t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + SCHED(j); \
834 t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
835 d(j) += t0; \
836 h(j) = t0 + t1
837
838 static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
839 {
840 word32 S[8], t0, t1;
841 int i;
842 word32 W[WC_SHA256_BLOCK_SIZE/sizeof(word32)];
843
844 /* Copy digest to working vars */
845 S[0] = sha256->digest[0];
846 S[1] = sha256->digest[1];
847 S[2] = sha256->digest[2];
848 S[3] = sha256->digest[3];
849 S[4] = sha256->digest[4];
850 S[5] = sha256->digest[5];
851 S[6] = sha256->digest[6];
852 S[7] = sha256->digest[7];
853
854 i = 0;
855 RND1( 0); RND1( 1); RND1( 2); RND1( 3);
856 RND1( 4); RND1( 5); RND1( 6); RND1( 7);
857 RND1( 8); RND1( 9); RND1(10); RND1(11);
858 RND1(12); RND1(13); RND1(14); RND1(15);
859 /* 64 operations, partially loop unrolled */
860 for (i = 16; i < 64; i += 16) {
861 RNDN( 0); RNDN( 1); RNDN( 2); RNDN( 3);
862 RNDN( 4); RNDN( 5); RNDN( 6); RNDN( 7);
863 RNDN( 8); RNDN( 9); RNDN(10); RNDN(11);
864 RNDN(12); RNDN(13); RNDN(14); RNDN(15);
865 }
866
867 /* Add the working vars back into digest */
868 sha256->digest[0] += S[0];
869 sha256->digest[1] += S[1];
870 sha256->digest[2] += S[2];
871 sha256->digest[3] += S[3];
872 sha256->digest[4] += S[4];
873 sha256->digest[5] += S[5];
874 sha256->digest[6] += S[6];
875 sha256->digest[7] += S[7];
876
877 return 0;
878 }
879#endif /* SHA256_MANY_REGISTERS */
880#endif
881/* End wc_ software implementation */
882
883
884#ifdef XTRANSFORM
885
886 static WC_INLINE void AddLength(wc_Sha256* sha256, word32 len)
887 {
888 word32 tmp = sha256->loLen;
889 if ((sha256->loLen += len) < tmp) {
890 sha256->hiLen++; /* carry low to high */
891 }
892 }
893
894 /* do block size increments/updates */
895 static WC_INLINE int Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
896 {
897 int ret = 0;
898 word32 blocksLen;
899 byte* local;
900
901 if (sha256 == NULL || (data == NULL && len > 0)) {
902 return BAD_FUNC_ARG;
903 }
904
905 if (data == NULL && len == 0) {
906 /* valid, but do nothing */
907 return 0;
908 }
909
910 /* check that internal buffLen is valid */
911 if (sha256->buffLen >= WC_SHA256_BLOCK_SIZE) {
912 return BUFFER_E;
913 }
914
915 /* add length for final */
916 AddLength(sha256, len);
917
918 local = (byte*)sha256->buffer;
919
920 /* process any remainder from previous operation */
921 if (sha256->buffLen > 0) {
922 blocksLen = min(len, WC_SHA256_BLOCK_SIZE - sha256->buffLen);
923 XMEMCPY(&local[sha256->buffLen], data, blocksLen);
924
925 sha256->buffLen += blocksLen;
926 data += blocksLen;
927 len -= blocksLen;
928
929 if (sha256->buffLen == WC_SHA256_BLOCK_SIZE) {
930 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
931 #if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
932 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
933 #endif
934 {
935 ByteReverseWords(sha256->buffer, sha256->buffer,
936 WC_SHA256_BLOCK_SIZE);
937 }
938 #endif
939
940 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
941 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
942 if (sha256->ctx.mode == ESP32_SHA_INIT){
943 esp_sha_try_hw_lock(&sha256->ctx);
944 }
945 if (sha256->ctx.mode == ESP32_SHA_SW){
946 ret = XTRANSFORM(sha256, (const byte*)local);
947 } else {
948 esp_sha256_process(sha256, (const byte*)local);
949 }
950 #else
951 ret = XTRANSFORM(sha256, (const byte*)local);
952 #endif
953
954 if (ret == 0)
955 sha256->buffLen = 0;
956 else
957 len = 0; /* error */
958 }
959 }
960
961 /* process blocks */
962 #ifdef XTRANSFORM_LEN
963 #if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
964 if (Transform_Sha256_Len_p != NULL)
965 #endif
966 {
967 /* get number of blocks */
968 /* 64-1 = 0x3F (~ Inverted = 0xFFFFFFC0) */
969 /* len (masked by 0xFFFFFFC0) returns block aligned length */
970 blocksLen = len & ~(WC_SHA256_BLOCK_SIZE-1);
971 if (blocksLen > 0) {
972 /* Byte reversal and alignment handled in function if required */
973 XTRANSFORM_LEN(sha256, data, blocksLen);
974 data += blocksLen;
975 len -= blocksLen;
976 }
977 }
978 #if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
979 else
980 #endif
981 #endif /* XTRANSFORM_LEN */
982 #if !defined(XTRANSFORM_LEN) || defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
983 {
984 while (len >= WC_SHA256_BLOCK_SIZE) {
985 word32* local32 = sha256->buffer;
986 /* optimization to avoid memcpy if data pointer is properly aligned */
987 /* Intel transform function requires use of sha256->buffer */
988 /* Little Endian requires byte swap, so can't use data directly */
989 #if defined(WC_HASH_DATA_ALIGNMENT) && !defined(LITTLE_ENDIAN_ORDER) && \
990 !defined(HAVE_INTEL_AVX1) && !defined(HAVE_INTEL_AVX2)
991 if (((size_t)data % WC_HASH_DATA_ALIGNMENT) == 0) {
992 local32 = (word32*)data;
993 }
994 else
995 #endif
996 {
997 XMEMCPY(local32, data, WC_SHA256_BLOCK_SIZE);
998 }
999
1000 data += WC_SHA256_BLOCK_SIZE;
1001 len -= WC_SHA256_BLOCK_SIZE;
1002
1003 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
1004 #if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
1005 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
1006 #endif
1007 {
1008 ByteReverseWords(local32, local32, WC_SHA256_BLOCK_SIZE);
1009 }
1010 #endif
1011
1012 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1013 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1014 if (sha256->ctx.mode == ESP32_SHA_INIT){
1015 esp_sha_try_hw_lock(&sha256->ctx);
1016 }
1017 if (sha256->ctx.mode == ESP32_SHA_SW){
1018 ret = XTRANSFORM(sha256, (const byte*)local32);
1019 } else {
1020 esp_sha256_process(sha256, (const byte*)local32);
1021 }
1022 #else
1023 ret = XTRANSFORM(sha256, (const byte*)local32);
1024 #endif
1025
1026 if (ret != 0)
1027 break;
1028 }
1029 }
1030 #endif
1031
1032 /* save remainder */
1033 if (len > 0) {
1034 XMEMCPY(local, data, len);
1035 sha256->buffLen = len;
1036 }
1037
1038 return ret;
1039 }
1040
1041 int wc_Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
1042 {
1043 if (sha256 == NULL || (data == NULL && len > 0)) {
1044 return BAD_FUNC_ARG;
1045 }
1046
1047 if (data == NULL && len == 0) {
1048 /* valid, but do nothing */
1049 return 0;
1050 }
1051
1052 #ifdef WOLF_CRYPTO_CB
1053 if (sha256->devId != INVALID_DEVID) {
1054 int ret = wc_CryptoCb_Sha256Hash(sha256, data, len, NULL);
1055 if (ret != CRYPTOCB_UNAVAILABLE)
1056 return ret;
1057 /* fall-through when unavailable */
1058 }
1059 #endif
1060 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
1061 if (sha256->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA256) {
1062 #if defined(HAVE_INTEL_QA)
1063 return IntelQaSymSha256(&sha256->asyncDev, NULL, data, len);
1064 #endif
1065 }
1066 #endif /* WOLFSSL_ASYNC_CRYPT */
1067
1068 return Sha256Update(sha256, data, len);
1069 }
1070
1071 static WC_INLINE int Sha256Final(wc_Sha256* sha256)
1072 {
1073
1074 int ret;
1075 byte* local;
1076
1077 if (sha256 == NULL) {
1078 return BAD_FUNC_ARG;
1079 }
1080
1081 local = (byte*)sha256->buffer;
1082 local[sha256->buffLen++] = 0x80; /* add 1 */
1083
1084 /* pad with zeros */
1085 if (sha256->buffLen > WC_SHA256_PAD_SIZE) {
1086 XMEMSET(&local[sha256->buffLen], 0,
1087 WC_SHA256_BLOCK_SIZE - sha256->buffLen);
1088 sha256->buffLen += WC_SHA256_BLOCK_SIZE - sha256->buffLen;
1089
1090 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
1091 #if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
1092 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
1093 #endif
1094 {
1095 ByteReverseWords(sha256->buffer, sha256->buffer,
1096 WC_SHA256_BLOCK_SIZE);
1097 }
1098 #endif
1099
1100 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1101 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1102 if (sha256->ctx.mode == ESP32_SHA_INIT) {
1103 esp_sha_try_hw_lock(&sha256->ctx);
1104 }
1105 if (sha256->ctx.mode == ESP32_SHA_SW) {
1106 ret = XTRANSFORM(sha256, (const byte*)local);
1107 } else {
1108 ret = esp_sha256_process(sha256, (const byte*)local);
1109 }
1110 #else
1111 ret = XTRANSFORM(sha256, (const byte*)local);
1112 #endif
1113 if (ret != 0)
1114 return ret;
1115
1116 sha256->buffLen = 0;
1117 }
1118 XMEMSET(&local[sha256->buffLen], 0,
1119 WC_SHA256_PAD_SIZE - sha256->buffLen);
1120
1121 /* put lengths in bits */
1122 sha256->hiLen = (sha256->loLen >> (8 * sizeof(sha256->loLen) - 3)) +
1123 (sha256->hiLen << 3);
1124 sha256->loLen = sha256->loLen << 3;
1125
1126 /* store lengths */
1127 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
1128 #if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
1129 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
1130 #endif
1131 {
1132 ByteReverseWords(sha256->buffer, sha256->buffer,
1133 WC_SHA256_BLOCK_SIZE);
1134 }
1135 #endif
1136 /* ! length ordering dependent on digest endian type ! */
1137 XMEMCPY(&local[WC_SHA256_PAD_SIZE], &sha256->hiLen, sizeof(word32));
1138 XMEMCPY(&local[WC_SHA256_PAD_SIZE + sizeof(word32)], &sha256->loLen,
1139 sizeof(word32));
1140
1141 #if defined(FREESCALE_MMCAU_SHA) || defined(HAVE_INTEL_AVX1) || \
1142 defined(HAVE_INTEL_AVX2)
1143 /* Kinetis requires only these bytes reversed */
1144 #if defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)
1145 if (IS_INTEL_AVX1(intel_flags) || IS_INTEL_AVX2(intel_flags))
1146 #endif
1147 {
1148 ByteReverseWords(
1149 &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)],
1150 &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)],
1151 2 * sizeof(word32));
1152 }
1153 #endif
1154
1155 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1156 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1157 if (sha256->ctx.mode == ESP32_SHA_INIT) {
1158 esp_sha_try_hw_lock(&sha256->ctx);
1159 }
1160 if (sha256->ctx.mode == ESP32_SHA_SW) {
1161 ret = XTRANSFORM(sha256, (const byte*)local);
1162 } else {
1163 ret = esp_sha256_digest_process(sha256, 1);
1164 }
1165 #else
1166 ret = XTRANSFORM(sha256, (const byte*)local);
1167 #endif
1168
1169 return ret;
1170 }
1171
1172 int wc_Sha256FinalRaw(wc_Sha256* sha256, byte* hash)
1173 {
1174 #ifdef LITTLE_ENDIAN_ORDER
1175 word32 digest[WC_SHA256_DIGEST_SIZE / sizeof(word32)];
1176 #endif
1177
1178 if (sha256 == NULL || hash == NULL) {
1179 return BAD_FUNC_ARG;
1180 }
1181
1182 #ifdef LITTLE_ENDIAN_ORDER
1183 ByteReverseWords((word32*)digest, (word32*)sha256->digest,
1184 WC_SHA256_DIGEST_SIZE);
1185 XMEMCPY(hash, digest, WC_SHA256_DIGEST_SIZE);
1186 #else
1187 XMEMCPY(hash, sha256->digest, WC_SHA256_DIGEST_SIZE);
1188 #endif
1189
1190 return 0;
1191 }
1192
1193 int wc_Sha256Final(wc_Sha256* sha256, byte* hash)
1194 {
1195 int ret;
1196
1197 if (sha256 == NULL || hash == NULL) {
1198 return BAD_FUNC_ARG;
1199 }
1200
1201 #ifdef WOLF_CRYPTO_CB
1202 if (sha256->devId != INVALID_DEVID) {
1203 ret = wc_CryptoCb_Sha256Hash(sha256, NULL, 0, hash);
1204 if (ret != CRYPTOCB_UNAVAILABLE)
1205 return ret;
1206 /* fall-through when unavailable */
1207 }
1208 #endif
1209
1210 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
1211 if (sha256->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA256) {
1212 #if defined(HAVE_INTEL_QA)
1213 return IntelQaSymSha256(&sha256->asyncDev, hash, NULL,
1214 WC_SHA256_DIGEST_SIZE);
1215 #endif
1216 }
1217 #endif /* WOLFSSL_ASYNC_CRYPT */
1218
1219 ret = Sha256Final(sha256);
1220 if (ret != 0)
1221 return ret;
1222
1223 #if defined(LITTLE_ENDIAN_ORDER)
1224 ByteReverseWords(sha256->digest, sha256->digest, WC_SHA256_DIGEST_SIZE);
1225 #endif
1226 XMEMCPY(hash, sha256->digest, WC_SHA256_DIGEST_SIZE);
1227
1228 return InitSha256(sha256); /* reset state */
1229 }
1230
1231#endif /* XTRANSFORM */
1232
1233#ifdef WOLFSSL_SHA224
1234
1235#ifdef STM32_HASH_SHA2
1236
1237 /* Supports CubeMX HAL or Standard Peripheral Library */
1238
1239 int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId)
1240 {
1241 if (sha224 == NULL)
1242 return BAD_FUNC_ARG;
1243
1244 (void)devId;
1245 (void)heap;
1246
1247 wc_Stm32_Hash_Init(&sha224->stmCtx);
1248 return 0;
1249 }
1250
1251 int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len)
1252 {
1253 int ret = 0;
1254
1255 if (sha224 == NULL || (data == NULL && len > 0)) {
1256 return BAD_FUNC_ARG;
1257 }
1258
1259 ret = wolfSSL_CryptHwMutexLock();
1260 if (ret == 0) {
1261 ret = wc_Stm32_Hash_Update(&sha224->stmCtx,
1262 HASH_AlgoSelection_SHA224, data, len);
1263 wolfSSL_CryptHwMutexUnLock();
1264 }
1265 return ret;
1266 }
1267
1268 int wc_Sha224Final(wc_Sha224* sha224, byte* hash)
1269 {
1270 int ret = 0;
1271
1272 if (sha224 == NULL || hash == NULL) {
1273 return BAD_FUNC_ARG;
1274 }
1275
1276 ret = wolfSSL_CryptHwMutexLock();
1277 if (ret == 0) {
1278 ret = wc_Stm32_Hash_Final(&sha224->stmCtx,
1279 HASH_AlgoSelection_SHA224, hash, WC_SHA224_DIGEST_SIZE);
1280 wolfSSL_CryptHwMutexUnLock();
1281 }
1282
1283 (void)wc_InitSha224(sha224); /* reset state */
1284
1285 return ret;
1286 }
1287
1288#elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_HASH)
1289 /* functions defined in wolfcrypt/src/port/caam/caam_sha256.c */
1290
1291#elif defined(WOLFSSL_AFALG_HASH)
1292 #error SHA224 currently not supported with AF_ALG enabled
1293
1294#elif defined(WOLFSSL_DEVCRYPTO_HASH)
1295 /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
1296
1297#else
1298
1299 #define NEED_SOFT_SHA224
1300
1301
1302 static int InitSha224(wc_Sha224* sha224)
1303 {
1304 int ret = 0;
1305
1306 if (sha224 == NULL) {
1307 return BAD_FUNC_ARG;
1308 }
1309
1310 sha224->digest[0] = 0xc1059ed8;
1311 sha224->digest[1] = 0x367cd507;
1312 sha224->digest[2] = 0x3070dd17;
1313 sha224->digest[3] = 0xf70e5939;
1314 sha224->digest[4] = 0xffc00b31;
1315 sha224->digest[5] = 0x68581511;
1316 sha224->digest[6] = 0x64f98fa7;
1317 sha224->digest[7] = 0xbefa4fa4;
1318
1319 sha224->buffLen = 0;
1320 sha224->loLen = 0;
1321 sha224->hiLen = 0;
1322
1323 #if defined(HAVE_INTEL_AVX1)|| defined(HAVE_INTEL_AVX2)
1324 /* choose best Transform function under this runtime environment */
1325 Sha256_SetTransform();
1326 #endif
1327 #if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1328 sha224->flags = 0;
1329 #endif
1330
1331 return ret;
1332 }
1333
1334#endif
1335
1336#ifdef NEED_SOFT_SHA224
1337 int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId)
1338 {
1339 int ret = 0;
1340
1341 if (sha224 == NULL)
1342 return BAD_FUNC_ARG;
1343
1344 sha224->heap = heap;
1345
1346 ret = InitSha224(sha224);
1347 if (ret != 0)
1348 return ret;
1349
1350 #ifdef WOLFSSL_SMALL_STACK_CACHE
1351 sha224->W = NULL;
1352 #endif
1353
1354 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1355 ret = wolfAsync_DevCtxInit(&sha224->asyncDev,
1356 WOLFSSL_ASYNC_MARKER_SHA224, sha224->heap, devId);
1357 #else
1358 (void)devId;
1359 #endif /* WOLFSSL_ASYNC_CRYPT */
1360
1361 return ret;
1362 }
1363
1364 int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len)
1365 {
1366 int ret;
1367
1368 if (sha224 == NULL || (data == NULL && len > 0)) {
1369 return BAD_FUNC_ARG;
1370 }
1371
1372 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1373 if (sha224->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA224) {
1374 #if defined(HAVE_INTEL_QA)
1375 return IntelQaSymSha224(&sha224->asyncDev, NULL, data, len);
1376 #endif
1377 }
1378 #endif /* WOLFSSL_ASYNC_CRYPT */
1379
1380 ret = Sha256Update((wc_Sha256*)sha224, data, len);
1381
1382 return ret;
1383 }
1384
1385 int wc_Sha224Final(wc_Sha224* sha224, byte* hash)
1386 {
1387 int ret;
1388
1389 if (sha224 == NULL || hash == NULL) {
1390 return BAD_FUNC_ARG;
1391 }
1392
1393 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1394 if (sha224->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA224) {
1395 #if defined(HAVE_INTEL_QA)
1396 return IntelQaSymSha224(&sha224->asyncDev, hash, NULL,
1397 WC_SHA224_DIGEST_SIZE);
1398 #endif
1399 }
1400 #endif /* WOLFSSL_ASYNC_CRYPT */
1401
1402 ret = Sha256Final((wc_Sha256*)sha224);
1403 if (ret != 0)
1404 return ret;
1405
1406 #if defined(LITTLE_ENDIAN_ORDER)
1407 ByteReverseWords(sha224->digest, sha224->digest, WC_SHA224_DIGEST_SIZE);
1408 #endif
1409 XMEMCPY(hash, sha224->digest, WC_SHA224_DIGEST_SIZE);
1410
1411 return InitSha224(sha224); /* reset state */
1412 }
1413#endif /* end of SHA224 software implementation */
1414
1415 int wc_InitSha224(wc_Sha224* sha224)
1416 {
1417 return wc_InitSha224_ex(sha224, NULL, INVALID_DEVID);
1418 }
1419
1420 void wc_Sha224Free(wc_Sha224* sha224)
1421 {
1422 if (sha224 == NULL)
1423 return;
1424
1425#ifdef WOLFSSL_SMALL_STACK_CACHE
1426 if (sha224->W != NULL) {
1427 XFREE(sha224->W, NULL, DYNAMIC_TYPE_DIGEST);
1428 sha224->W = NULL;
1429 }
1430#endif
1431
1432 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1433 wolfAsync_DevCtxFree(&sha224->asyncDev, WOLFSSL_ASYNC_MARKER_SHA224);
1434 #endif /* WOLFSSL_ASYNC_CRYPT */
1435
1436 #ifdef WOLFSSL_PIC32MZ_HASH
1437 wc_Sha256Pic32Free(sha224);
1438 #endif
1439 }
1440#endif /* WOLFSSL_SHA224 */
1441
1442
1443int wc_InitSha256(wc_Sha256* sha256)
1444{
1445 return wc_InitSha256_ex(sha256, NULL, INVALID_DEVID);
1446}
1447
1448void wc_Sha256Free(wc_Sha256* sha256)
1449{
1450 if (sha256 == NULL)
1451 return;
1452
1453#ifdef WOLFSSL_SMALL_STACK_CACHE
1454 if (sha256->W != NULL) {
1455 XFREE(sha256->W, NULL, DYNAMIC_TYPE_DIGEST);
1456 sha256->W = NULL;
1457 }
1458#endif
1459
1460#if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
1461 wolfAsync_DevCtxFree(&sha256->asyncDev, WOLFSSL_ASYNC_MARKER_SHA256);
1462#endif /* WOLFSSL_ASYNC_CRYPT */
1463#ifdef WOLFSSL_PIC32MZ_HASH
1464 wc_Sha256Pic32Free(sha256);
1465#endif
1466#if defined(WOLFSSL_AFALG_HASH)
1467 if (sha256->alFd > 0) {
1468 close(sha256->alFd);
1469 sha256->alFd = -1; /* avoid possible double close on socket */
1470 }
1471 if (sha256->rdFd > 0) {
1472 close(sha256->rdFd);
1473 sha256->rdFd = -1; /* avoid possible double close on socket */
1474 }
1475#endif /* WOLFSSL_AFALG_HASH */
1476#ifdef WOLFSSL_DEVCRYPTO_HASH
1477 wc_DevCryptoFree(&sha256->ctx);
1478#endif /* WOLFSSL_DEVCRYPTO */
1479#if (defined(WOLFSSL_AFALG_HASH) && defined(WOLFSSL_AFALG_HASH_KEEP)) || \
1480 (defined(WOLFSSL_DEVCRYPTO_HASH) && defined(WOLFSSL_DEVCRYPTO_HASH_KEEP)) || \
1481 (defined(WOLFSSL_RENESAS_TSIP_CRYPT) && \
1482 !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH))
1483 if (sha256->msg != NULL) {
1484 XFREE(sha256->msg, sha256->heap, DYNAMIC_TYPE_TMP_BUFFER);
1485 sha256->msg = NULL;
1486 }
1487#endif
1488}
1489
1490#endif /* !WOLFSSL_TI_HASH */
1491#endif /* HAVE_FIPS */
1492
1493
1494#ifndef WOLFSSL_TI_HASH
1495#ifdef WOLFSSL_SHA224
1496 int wc_Sha224GetHash(wc_Sha224* sha224, byte* hash)
1497 {
1498 int ret;
1499 wc_Sha224 tmpSha224;
1500
1501 if (sha224 == NULL || hash == NULL)
1502 return BAD_FUNC_ARG;
1503
1504 ret = wc_Sha224Copy(sha224, &tmpSha224);
1505 if (ret == 0) {
1506 ret = wc_Sha224Final(&tmpSha224, hash);
1507 wc_Sha224Free(&tmpSha224);
1508 }
1509 return ret;
1510 }
1511 int wc_Sha224Copy(wc_Sha224* src, wc_Sha224* dst)
1512 {
1513 int ret = 0;
1514
1515 if (src == NULL || dst == NULL)
1516 return BAD_FUNC_ARG;
1517
1518 XMEMCPY(dst, src, sizeof(wc_Sha224));
1519 #ifdef WOLFSSL_SMALL_STACK_CACHE
1520 dst->W = NULL;
1521 #endif
1522
1523 #ifdef WOLFSSL_ASYNC_CRYPT
1524 ret = wolfAsync_DevCopy(&src->asyncDev, &dst->asyncDev);
1525 #endif
1526 #if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1527 dst->flags |= WC_HASH_FLAG_ISCOPY;
1528 #endif
1529
1530 return ret;
1531 }
1532
1533#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1534 int wc_Sha224SetFlags(wc_Sha224* sha224, word32 flags)
1535 {
1536 if (sha224) {
1537 sha224->flags = flags;
1538 }
1539 return 0;
1540 }
1541 int wc_Sha224GetFlags(wc_Sha224* sha224, word32* flags)
1542 {
1543 if (sha224 && flags) {
1544 *flags = sha224->flags;
1545 }
1546 return 0;
1547 }
1548#endif
1549
1550#endif /* WOLFSSL_SHA224 */
1551
1552#ifdef WOLFSSL_AFALG_HASH
1553 /* implemented in wolfcrypt/src/port/af_alg/afalg_hash.c */
1554
1555#elif defined(WOLFSSL_DEVCRYPTO_HASH)
1556 /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
1557
1558#elif defined(WOLFSSL_RENESAS_TSIP_CRYPT) && \
1559 !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)
1560
1561 /* implemented in wolfcrypt/src/port/Renesas/renesas_tsip_sha.c */
1562#else
1563
1564int wc_Sha256GetHash(wc_Sha256* sha256, byte* hash)
1565{
1566 int ret;
1567 wc_Sha256 tmpSha256;
1568
1569 if (sha256 == NULL || hash == NULL)
1570 return BAD_FUNC_ARG;
1571
1572#if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1573 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1574 if(sha256->ctx.mode == ESP32_SHA_INIT){
1575 esp_sha_try_hw_lock(&sha256->ctx);
1576 }
1577 if(sha256->ctx.mode == ESP32_SHA_HW)
1578 {
1579 esp_sha256_digest_process(sha256, 0);
1580 }
1581#endif
1582 ret = wc_Sha256Copy(sha256, &tmpSha256);
1583 if (ret == 0) {
1584 ret = wc_Sha256Final(&tmpSha256, hash);
1585#if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1586 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1587 sha256->ctx.mode = ESP32_SHA_SW;
1588#endif
1589
1590 wc_Sha256Free(&tmpSha256);
1591 }
1592 return ret;
1593}
1594int wc_Sha256Copy(wc_Sha256* src, wc_Sha256* dst)
1595{
1596 int ret = 0;
1597
1598 if (src == NULL || dst == NULL)
1599 return BAD_FUNC_ARG;
1600
1601 XMEMCPY(dst, src, sizeof(wc_Sha256));
1602#ifdef WOLFSSL_SMALL_STACK_CACHE
1603 dst->W = NULL;
1604#endif
1605
1606#ifdef WOLFSSL_ASYNC_CRYPT
1607 ret = wolfAsync_DevCopy(&src->asyncDev, &dst->asyncDev);
1608#endif
1609#ifdef WOLFSSL_PIC32MZ_HASH
1610 ret = wc_Pic32HashCopy(&src->cache, &dst->cache);
1611#endif
1612#if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1613 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1614 dst->ctx.mode = src->ctx.mode;
1615 dst->ctx.isfirstblock = src->ctx.isfirstblock;
1616 dst->ctx.sha_type = src->ctx.sha_type;
1617#endif
1618#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1619 dst->flags |= WC_HASH_FLAG_ISCOPY;
1620#endif
1621
1622 return ret;
1623}
1624#endif
1625
1626#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1627int wc_Sha256SetFlags(wc_Sha256* sha256, word32 flags)
1628{
1629 if (sha256) {
1630 sha256->flags = flags;
1631 }
1632 return 0;
1633}
1634int wc_Sha256GetFlags(wc_Sha256* sha256, word32* flags)
1635{
1636 if (sha256 && flags) {
1637 *flags = sha256->flags;
1638 }
1639 return 0;
1640}
1641#endif
1642#endif /* !WOLFSSL_TI_HASH */
1643
1644#endif /* NO_SHA256 */
Note: See TracBrowser for help on using the repository browser.