source: azure_iot_hub_f767zi/trunk/wolfssl-4.7.0/wolfcrypt/src/sha256.c@ 464

Last change on this file since 464 was 464, checked in by coas-nagasima, 3 years ago

WolfSSLとAzure IoT SDKを更新

  • Property svn:eol-style set to native
  • Property svn:mime-type set to text/x-csrc;charset=UTF-8
File size: 51.5 KB
Line 
1/* sha256.c
2 *
3 * Copyright (C) 2006-2020 wolfSSL Inc.
4 *
5 * This file is part of wolfSSL.
6 *
7 * wolfSSL is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * wolfSSL is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License
18 * along with this program; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
20 */
21
22/* For more info on the algorithm, see https://tools.ietf.org/html/rfc6234 */
23/*
24
25DESCRIPTION
26This library provides the interface to SHA-256 secure hash algorithms.
27SHA-256 performs processing on message blocks to produce a final hash digest
28output. It can be used to hash a message, M, having a length of L bits,
29where 0 <= L < 2^64.
30
31*/
32#ifdef HAVE_CONFIG_H
33 #include <config.h>
34#endif
35
36#include <wolfssl/wolfcrypt/settings.h>
37
38/*
39 * SHA256 Build Options:
40 * USE_SLOW_SHA256: Reduces code size by not partially unrolling
41 (~2KB smaller and ~25% slower) (default OFF)
42 * WOLFSSL_SHA256_BY_SPEC: Uses the Ch/Maj based on SHA256 specification
43 (default ON)
44 * WOLFSSL_SHA256_ALT_CH_MAJ: Alternate Ch/Maj that is easier for compilers to
45 optimize and recognize as SHA256 (default OFF)
46 * SHA256_MANY_REGISTERS: A SHA256 version that keeps all data in registers
47 and partial unrolled (default OFF)
48 */
49
50/* Default SHA256 to use Ch/Maj based on specification */
51#if !defined(WOLFSSL_SHA256_BY_SPEC) && !defined(WOLFSSL_SHA256_ALT_CH_MAJ)
52 #define WOLFSSL_SHA256_BY_SPEC
53#endif
54
55
56#if !defined(NO_SHA256) && !defined(WOLFSSL_ARMASM)
57
58#if defined(HAVE_FIPS) && \
59 defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2)
60
61 /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */
62 #define FIPS_NO_WRAPPERS
63
64 #ifdef USE_WINDOWS_API
65 #pragma code_seg(".fipsA$d")
66 #pragma const_seg(".fipsB$d")
67 #endif
68#endif
69
70#include <wolfssl/wolfcrypt/sha256.h>
71#include <wolfssl/wolfcrypt/error-crypt.h>
72#include <wolfssl/wolfcrypt/cpuid.h>
73#include <wolfssl/wolfcrypt/hash.h>
74
75#ifdef WOLF_CRYPTO_CB
76 #include <wolfssl/wolfcrypt/cryptocb.h>
77#endif
78
79/* fips wrapper calls, user can call direct */
80#if defined(HAVE_FIPS) && \
81 (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 2))
82
83 int wc_InitSha256(wc_Sha256* sha)
84 {
85 if (sha == NULL) {
86 return BAD_FUNC_ARG;
87 }
88 return InitSha256_fips(sha);
89 }
90 int wc_InitSha256_ex(wc_Sha256* sha, void* heap, int devId)
91 {
92 (void)heap;
93 (void)devId;
94 if (sha == NULL) {
95 return BAD_FUNC_ARG;
96 }
97 return InitSha256_fips(sha);
98 }
99 int wc_Sha256Update(wc_Sha256* sha, const byte* data, word32 len)
100 {
101 if (sha == NULL || (data == NULL && len > 0)) {
102 return BAD_FUNC_ARG;
103 }
104
105 if (data == NULL && len == 0) {
106 /* valid, but do nothing */
107 return 0;
108 }
109
110 return Sha256Update_fips(sha, data, len);
111 }
112 int wc_Sha256Final(wc_Sha256* sha, byte* out)
113 {
114 if (sha == NULL || out == NULL) {
115 return BAD_FUNC_ARG;
116 }
117 return Sha256Final_fips(sha, out);
118 }
119 void wc_Sha256Free(wc_Sha256* sha)
120 {
121 (void)sha;
122 /* Not supported in FIPS */
123 }
124
125#else /* else build without fips, or for FIPS v2 */
126
127
128#if defined(WOLFSSL_TI_HASH)
129 /* #include <wolfcrypt/src/port/ti/ti-hash.c> included by wc_port.c */
130#elif defined(WOLFSSL_CRYPTOCELL)
131 /* wc_port.c includes wolfcrypt/src/port/arm/cryptoCellHash.c */
132
133#elif defined(WOLFSSL_IMXRT_DCP)
134
135#elif defined(WOLFSSL_PSOC6_CRYPTO)
136
137
138#else
139
140#include <wolfssl/wolfcrypt/logging.h>
141
142#ifdef NO_INLINE
143 #include <wolfssl/wolfcrypt/misc.h>
144#else
145 #define WOLFSSL_MISC_INCLUDED
146 #include <wolfcrypt/src/misc.c>
147#endif
148
149#ifdef WOLFSSL_DEVCRYPTO_HASH
150 #include <wolfssl/wolfcrypt/port/devcrypto/wc_devcrypto.h>
151#endif
152
153
154
155#if defined(USE_INTEL_SPEEDUP)
156 #if defined(__GNUC__) && ((__GNUC__ < 4) || \
157 (__GNUC__ == 4 && __GNUC_MINOR__ <= 8))
158 #undef NO_AVX2_SUPPORT
159 #define NO_AVX2_SUPPORT
160 #endif
161 #if defined(__clang__) && ((__clang_major__ < 3) || \
162 (__clang_major__ == 3 && __clang_minor__ <= 5))
163 #define NO_AVX2_SUPPORT
164 #elif defined(__clang__) && defined(NO_AVX2_SUPPORT)
165 #undef NO_AVX2_SUPPORT
166 #endif
167
168 #define HAVE_INTEL_AVX1
169 #ifndef NO_AVX2_SUPPORT
170 #define HAVE_INTEL_AVX2
171 #endif
172#endif /* USE_INTEL_SPEEDUP */
173
174#if defined(HAVE_INTEL_AVX2)
175 #define HAVE_INTEL_RORX
176#endif
177
178
179#if !defined(WOLFSSL_PIC32MZ_HASH) && !defined(STM32_HASH_SHA2) && \
180 (!defined(WOLFSSL_IMX6_CAAM) || defined(NO_IMX6_CAAM_HASH)) && \
181 !defined(WOLFSSL_AFALG_HASH) && !defined(WOLFSSL_DEVCRYPTO_HASH) && \
182 (!defined(WOLFSSL_ESP32WROOM32_CRYPT) || defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)) && \
183 (!defined(WOLFSSL_RENESAS_TSIP_CRYPT) || defined(NO_WOLFSSL_RENESAS_TSIP_HASH)) && \
184 !defined(WOLFSSL_PSOC6_CRYPTO) && !defined(WOLFSSL_IMXRT_DCP) && !defined(WOLFSSL_SILABS_SE_ACCEL)
185
186
187static int InitSha256(wc_Sha256* sha256)
188{
189 int ret = 0;
190
191 if (sha256 == NULL)
192 return BAD_FUNC_ARG;
193
194 XMEMSET(sha256->digest, 0, sizeof(sha256->digest));
195 sha256->digest[0] = 0x6A09E667L;
196 sha256->digest[1] = 0xBB67AE85L;
197 sha256->digest[2] = 0x3C6EF372L;
198 sha256->digest[3] = 0xA54FF53AL;
199 sha256->digest[4] = 0x510E527FL;
200 sha256->digest[5] = 0x9B05688CL;
201 sha256->digest[6] = 0x1F83D9ABL;
202 sha256->digest[7] = 0x5BE0CD19L;
203
204 sha256->buffLen = 0;
205 sha256->loLen = 0;
206 sha256->hiLen = 0;
207#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
208 sha256->flags = 0;
209#endif
210
211 return ret;
212}
213#endif
214
215
216/* Hardware Acceleration */
217#if defined(USE_INTEL_SPEEDUP) && (defined(HAVE_INTEL_AVX1) || \
218 defined(HAVE_INTEL_AVX2))
219
220 /* in case intel instructions aren't available, plus we need the K[] global */
221 #define NEED_SOFT_SHA256
222
223 /*****
224 Intel AVX1/AVX2 Macro Control Structure
225
226 #define HAVE_INTEL_AVX1
227 #define HAVE_INTEL_AVX2
228
229 #define HAVE_INTEL_RORX
230
231
232 int InitSha256(wc_Sha256* sha256) {
233 Save/Recover XMM, YMM
234 ...
235 }
236
237 #if defined(HAVE_INTEL_AVX1)|| defined(HAVE_INTEL_AVX2)
238 Transform_Sha256(); Function prototype
239 #else
240 Transform_Sha256() { }
241 int Sha256Final() {
242 Save/Recover XMM, YMM
243 ...
244 }
245 #endif
246
247 #if defined(HAVE_INTEL_AVX1)|| defined(HAVE_INTEL_AVX2)
248 #if defined(HAVE_INTEL_RORX
249 #define RND with rorx instruction
250 #else
251 #define RND
252 #endif
253 #endif
254
255 #if defined(HAVE_INTEL_AVX1)
256
257 #define XMM Instructions/inline asm
258
259 int Transform_Sha256() {
260 Stitched Message Sched/Round
261 }
262
263 #elif defined(HAVE_INTEL_AVX2)
264
265 #define YMM Instructions/inline asm
266
267 int Transform_Sha256() {
268 More granular Stitched Message Sched/Round
269 }
270
271 #endif
272
273 */
274
275 /* Each platform needs to query info type 1 from cpuid to see if aesni is
276 * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts
277 */
278
279 /* #if defined(HAVE_INTEL_AVX1/2) at the tail of sha256 */
280 static int Transform_Sha256(wc_Sha256* sha256, const byte* data);
281
282#ifdef __cplusplus
283 extern "C" {
284#endif
285
286 #if defined(HAVE_INTEL_AVX1)
287 extern int Transform_Sha256_AVX1(wc_Sha256 *sha256, const byte* data);
288 extern int Transform_Sha256_AVX1_Len(wc_Sha256* sha256,
289 const byte* data, word32 len);
290 #endif
291 #if defined(HAVE_INTEL_AVX2)
292 extern int Transform_Sha256_AVX2(wc_Sha256 *sha256, const byte* data);
293 extern int Transform_Sha256_AVX2_Len(wc_Sha256* sha256,
294 const byte* data, word32 len);
295 #ifdef HAVE_INTEL_RORX
296 extern int Transform_Sha256_AVX1_RORX(wc_Sha256 *sha256, const byte* data);
297 extern int Transform_Sha256_AVX1_RORX_Len(wc_Sha256* sha256,
298 const byte* data, word32 len);
299 extern int Transform_Sha256_AVX2_RORX(wc_Sha256 *sha256, const byte* data);
300 extern int Transform_Sha256_AVX2_RORX_Len(wc_Sha256* sha256,
301 const byte* data, word32 len);
302 #endif /* HAVE_INTEL_RORX */
303 #endif /* HAVE_INTEL_AVX2 */
304
305#ifdef __cplusplus
306 } /* extern "C" */
307#endif
308
309 static int (*Transform_Sha256_p)(wc_Sha256* sha256, const byte* data);
310 /* = _Transform_Sha256 */
311 static int (*Transform_Sha256_Len_p)(wc_Sha256* sha256, const byte* data,
312 word32 len);
313 /* = NULL */
314 static int transform_check = 0;
315 static word32 intel_flags;
316 static int Transform_Sha256_is_vectorized = 0;
317
318 static WC_INLINE int inline_XTRANSFORM(wc_Sha256* S, const byte* D) {
319 int ret;
320 if (Transform_Sha256_is_vectorized)
321 SAVE_VECTOR_REGISTERS();
322 ret = (*Transform_Sha256_p)(S, D);
323 if (Transform_Sha256_is_vectorized)
324 RESTORE_VECTOR_REGISTERS();
325 return ret;
326 }
327#define XTRANSFORM(...) inline_XTRANSFORM(__VA_ARGS__)
328
329 static WC_INLINE int inline_XTRANSFORM_LEN(wc_Sha256* S, const byte* D, word32 L) {
330 int ret;
331 if (Transform_Sha256_is_vectorized)
332 SAVE_VECTOR_REGISTERS();
333 ret = (*Transform_Sha256_Len_p)(S, D, L);
334 if (Transform_Sha256_is_vectorized)
335 RESTORE_VECTOR_REGISTERS();
336 return ret;
337 }
338#define XTRANSFORM_LEN(...) inline_XTRANSFORM_LEN(__VA_ARGS__)
339
340 static void Sha256_SetTransform(void)
341 {
342
343 if (transform_check)
344 return;
345
346 intel_flags = cpuid_get_flags();
347
348 #ifdef HAVE_INTEL_AVX2
349 if (1 && IS_INTEL_AVX2(intel_flags)) {
350 #ifdef HAVE_INTEL_RORX
351 if (IS_INTEL_BMI2(intel_flags)) {
352 Transform_Sha256_p = Transform_Sha256_AVX2_RORX;
353 Transform_Sha256_Len_p = Transform_Sha256_AVX2_RORX_Len;
354 Transform_Sha256_is_vectorized = 1;
355 }
356 else
357 #endif
358 if (1)
359 {
360 Transform_Sha256_p = Transform_Sha256_AVX2;
361 Transform_Sha256_Len_p = Transform_Sha256_AVX2_Len;
362 Transform_Sha256_is_vectorized = 1;
363 }
364 #ifdef HAVE_INTEL_RORX
365 else {
366 Transform_Sha256_p = Transform_Sha256_AVX1_RORX;
367 Transform_Sha256_Len_p = Transform_Sha256_AVX1_RORX_Len;
368 Transform_Sha256_is_vectorized = 1;
369 }
370 #endif
371 }
372 else
373 #endif
374 #ifdef HAVE_INTEL_AVX1
375 if (IS_INTEL_AVX1(intel_flags)) {
376 Transform_Sha256_p = Transform_Sha256_AVX1;
377 Transform_Sha256_Len_p = Transform_Sha256_AVX1_Len;
378 Transform_Sha256_is_vectorized = 1;
379 }
380 else
381 #endif
382 {
383 Transform_Sha256_p = Transform_Sha256;
384 Transform_Sha256_Len_p = NULL;
385 Transform_Sha256_is_vectorized = 0;
386 }
387
388 transform_check = 1;
389 }
390
391 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
392 {
393 int ret = 0;
394 if (sha256 == NULL)
395 return BAD_FUNC_ARG;
396
397 sha256->heap = heap;
398 #ifdef WOLF_CRYPTO_CB
399 sha256->devId = devId;
400 #endif
401 #ifdef WOLFSSL_SMALL_STACK_CACHE
402 sha256->W = NULL;
403 #endif
404
405 ret = InitSha256(sha256);
406 if (ret != 0)
407 return ret;
408
409 /* choose best Transform function under this runtime environment */
410 Sha256_SetTransform();
411
412 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
413 ret = wolfAsync_DevCtxInit(&sha256->asyncDev,
414 WOLFSSL_ASYNC_MARKER_SHA256, sha256->heap, devId);
415 #else
416 (void)devId;
417 #endif /* WOLFSSL_ASYNC_CRYPT */
418
419 return ret;
420 }
421
422#elif defined(FREESCALE_LTC_SHA)
423 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
424 {
425 (void)heap;
426 (void)devId;
427
428 LTC_HASH_Init(LTC_BASE, &sha256->ctx, kLTC_Sha256, NULL, 0);
429
430 return 0;
431 }
432
433#elif defined(FREESCALE_MMCAU_SHA)
434
435 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
436 #include "cau_api.h"
437 #else
438 #include "fsl_mmcau.h"
439 #endif
440
441 #define XTRANSFORM(S, D) Transform_Sha256((S),(D))
442 #define XTRANSFORM_LEN(S, D, L) Transform_Sha256_Len((S),(D),(L))
443
444 #ifndef WC_HASH_DATA_ALIGNMENT
445 /* these hardware API's require 4 byte (word32) alignment */
446 #define WC_HASH_DATA_ALIGNMENT 4
447 #endif
448
449 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
450 {
451 int ret = 0;
452
453 (void)heap;
454 (void)devId;
455
456 ret = wolfSSL_CryptHwMutexLock();
457 if (ret != 0) {
458 return ret;
459 }
460
461 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
462 cau_sha256_initialize_output(sha256->digest);
463 #else
464 MMCAU_SHA256_InitializeOutput((word32*)sha256->digest);
465 #endif
466 wolfSSL_CryptHwMutexUnLock();
467
468 sha256->buffLen = 0;
469 sha256->loLen = 0;
470 sha256->hiLen = 0;
471 #ifdef WOLFSSL_SMALL_STACK_CACHE
472 sha256->W = NULL;
473 #endif
474
475 return ret;
476 }
477
478 static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
479 {
480 int ret = wolfSSL_CryptHwMutexLock();
481 if (ret == 0) {
482 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
483 cau_sha256_hash_n((byte*)data, 1, sha256->digest);
484 #else
485 MMCAU_SHA256_HashN((byte*)data, 1, sha256->digest);
486 #endif
487 wolfSSL_CryptHwMutexUnLock();
488 }
489 return ret;
490 }
491
492 static int Transform_Sha256_Len(wc_Sha256* sha256, const byte* data,
493 word32 len)
494 {
495 int ret = wolfSSL_CryptHwMutexLock();
496 if (ret == 0) {
497 #if defined(WC_HASH_DATA_ALIGNMENT) && WC_HASH_DATA_ALIGNMENT > 0
498 if ((size_t)data % WC_HASH_DATA_ALIGNMENT) {
499 /* data pointer is NOT aligned,
500 * so copy and perform one block at a time */
501 byte* local = (byte*)sha256->buffer;
502 while (len >= WC_SHA256_BLOCK_SIZE) {
503 XMEMCPY(local, data, WC_SHA256_BLOCK_SIZE);
504 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
505 cau_sha256_hash_n(local, 1, sha256->digest);
506 #else
507 MMCAU_SHA256_HashN(local, 1, sha256->digest);
508 #endif
509 data += WC_SHA256_BLOCK_SIZE;
510 len -= WC_SHA256_BLOCK_SIZE;
511 }
512 }
513 else
514 #endif
515 {
516 #ifdef FREESCALE_MMCAU_CLASSIC_SHA
517 cau_sha256_hash_n((byte*)data, len/WC_SHA256_BLOCK_SIZE,
518 sha256->digest);
519 #else
520 MMCAU_SHA256_HashN((byte*)data, len/WC_SHA256_BLOCK_SIZE,
521 sha256->digest);
522 #endif
523 }
524 wolfSSL_CryptHwMutexUnLock();
525 }
526 return ret;
527 }
528
529#elif defined(WOLFSSL_PIC32MZ_HASH)
530 #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h>
531
532#elif defined(STM32_HASH_SHA2)
533
534 /* Supports CubeMX HAL or Standard Peripheral Library */
535
536 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
537 {
538 if (sha256 == NULL)
539 return BAD_FUNC_ARG;
540
541 (void)devId;
542 (void)heap;
543
544 XMEMSET(sha256, 0, sizeof(wc_Sha256));
545 wc_Stm32_Hash_Init(&sha256->stmCtx);
546 return 0;
547 }
548
549 int wc_Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
550 {
551 int ret = 0;
552
553 if (sha256 == NULL || (data == NULL && len > 0)) {
554 return BAD_FUNC_ARG;
555 }
556
557 ret = wolfSSL_CryptHwMutexLock();
558 if (ret == 0) {
559 ret = wc_Stm32_Hash_Update(&sha256->stmCtx,
560 HASH_AlgoSelection_SHA256, data, len);
561 wolfSSL_CryptHwMutexUnLock();
562 }
563 return ret;
564 }
565
566 int wc_Sha256Final(wc_Sha256* sha256, byte* hash)
567 {
568 int ret = 0;
569
570 if (sha256 == NULL || hash == NULL) {
571 return BAD_FUNC_ARG;
572 }
573
574 ret = wolfSSL_CryptHwMutexLock();
575 if (ret == 0) {
576 ret = wc_Stm32_Hash_Final(&sha256->stmCtx,
577 HASH_AlgoSelection_SHA256, hash, WC_SHA256_DIGEST_SIZE);
578 wolfSSL_CryptHwMutexUnLock();
579 }
580
581 (void)wc_InitSha256(sha256); /* reset state */
582
583 return ret;
584 }
585
586#elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_HASH)
587 /* functions defined in wolfcrypt/src/port/caam/caam_sha256.c */
588
589#elif defined(WOLFSSL_AFALG_HASH)
590 /* implemented in wolfcrypt/src/port/af_alg/afalg_hash.c */
591
592#elif defined(WOLFSSL_DEVCRYPTO_HASH)
593 /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
594
595#elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_HASH)
596 #include "hal_data.h"
597
598 #ifndef WOLFSSL_SCE_SHA256_HANDLE
599 #define WOLFSSL_SCE_SHA256_HANDLE g_sce_hash_0
600 #endif
601
602 #define WC_SHA256_DIGEST_WORD_SIZE 16
603 #define XTRANSFORM(S, D) wc_Sha256SCE_XTRANSFORM((S), (D))
604 static int wc_Sha256SCE_XTRANSFORM(wc_Sha256* sha256, const byte* data)
605 {
606 if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
607 CRYPTO_WORD_ENDIAN_LITTLE)
608 {
609 ByteReverseWords((word32*)data, (word32*)data,
610 WC_SHA256_BLOCK_SIZE);
611 ByteReverseWords(sha256->digest, sha256->digest,
612 WC_SHA256_DIGEST_SIZE);
613 }
614
615 if (WOLFSSL_SCE_SHA256_HANDLE.p_api->hashUpdate(
616 WOLFSSL_SCE_SHA256_HANDLE.p_ctrl, (word32*)data,
617 WC_SHA256_DIGEST_WORD_SIZE, sha256->digest) != SSP_SUCCESS){
618 WOLFSSL_MSG("Unexpected hardware return value");
619 return WC_HW_E;
620 }
621
622 if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag ==
623 CRYPTO_WORD_ENDIAN_LITTLE)
624 {
625 ByteReverseWords((word32*)data, (word32*)data,
626 WC_SHA256_BLOCK_SIZE);
627 ByteReverseWords(sha256->digest, sha256->digest,
628 WC_SHA256_DIGEST_SIZE);
629 }
630
631 return 0;
632 }
633
634
635 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
636 {
637 int ret = 0;
638 if (sha256 == NULL)
639 return BAD_FUNC_ARG;
640
641 sha256->heap = heap;
642
643 ret = InitSha256(sha256);
644 if (ret != 0)
645 return ret;
646
647 (void)devId;
648
649 return ret;
650 }
651
652#elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
653 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
654
655 #define NEED_SOFT_SHA256
656
657 static int InitSha256(wc_Sha256* sha256)
658 {
659 int ret = 0;
660
661 if (sha256 == NULL)
662 return BAD_FUNC_ARG;
663
664 XMEMSET(sha256->digest, 0, sizeof(sha256->digest));
665 sha256->digest[0] = 0x6A09E667L;
666 sha256->digest[1] = 0xBB67AE85L;
667 sha256->digest[2] = 0x3C6EF372L;
668 sha256->digest[3] = 0xA54FF53AL;
669 sha256->digest[4] = 0x510E527FL;
670 sha256->digest[5] = 0x9B05688CL;
671 sha256->digest[6] = 0x1F83D9ABL;
672 sha256->digest[7] = 0x5BE0CD19L;
673
674 sha256->buffLen = 0;
675 sha256->loLen = 0;
676 sha256->hiLen = 0;
677
678 /* always start firstblock = 1 when using hw engine */
679 sha256->ctx.isfirstblock = 1;
680 sha256->ctx.sha_type = SHA2_256;
681 if(sha256->ctx.mode == ESP32_SHA_HW) {
682 /* release hw */
683 esp_sha_hw_unlock();
684 }
685 /* always set mode as INIT
686 * whether using HW or SW is determined at first call of update()
687 */
688 sha256->ctx.mode = ESP32_SHA_INIT;
689
690 return ret;
691 }
692 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
693 {
694 int ret = 0;
695
696 if (sha256 == NULL)
697 return BAD_FUNC_ARG;
698
699 XMEMSET(sha256, 0, sizeof(wc_Sha256));
700 sha256->ctx.mode = ESP32_SHA_INIT;
701 sha256->ctx.isfirstblock = 1;
702 (void)devId;
703
704 ret = InitSha256(sha256);
705
706 return ret;
707 }
708
709#elif defined(WOLFSSL_RENESAS_TSIP_CRYPT) && \
710 !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)
711
712 /* implemented in wolfcrypt/src/port/Renesas/renesas_tsip_sha.c */
713
714#elif defined(WOLFSSL_PSOC6_CRYPTO)
715
716 /* implemented in wolfcrypt/src/port/cypress/psoc6_crypto.c */
717
718#elif defined(WOLFSSL_IMXRT_DCP)
719 #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h>
720 /* implemented in wolfcrypt/src/port/nxp/dcp_port.c */
721
722#elif defined(WOLFSSL_SILABS_SE_ACCEL)
723 /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
724
725#else
726 #define NEED_SOFT_SHA256
727
728 int wc_InitSha256_ex(wc_Sha256* sha256, void* heap, int devId)
729 {
730 int ret = 0;
731 if (sha256 == NULL)
732 return BAD_FUNC_ARG;
733
734 sha256->heap = heap;
735 #ifdef WOLF_CRYPTO_CB
736 sha256->devId = devId;
737 sha256->devCtx = NULL;
738 #endif
739 #ifdef WOLFSSL_SMALL_STACK_CACHE
740 sha256->W = NULL;
741 #endif
742
743 ret = InitSha256(sha256);
744 if (ret != 0)
745 return ret;
746
747 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
748 ret = wolfAsync_DevCtxInit(&sha256->asyncDev,
749 WOLFSSL_ASYNC_MARKER_SHA256, sha256->heap, devId);
750 #else
751 (void)devId;
752 #endif /* WOLFSSL_ASYNC_CRYPT */
753
754 return ret;
755 }
756#endif /* End Hardware Acceleration */
757
758#ifdef NEED_SOFT_SHA256
759
760 static const FLASH_QUALIFIER ALIGN32 word32 K[64] = {
761 0x428A2F98L, 0x71374491L, 0xB5C0FBCFL, 0xE9B5DBA5L, 0x3956C25BL,
762 0x59F111F1L, 0x923F82A4L, 0xAB1C5ED5L, 0xD807AA98L, 0x12835B01L,
763 0x243185BEL, 0x550C7DC3L, 0x72BE5D74L, 0x80DEB1FEL, 0x9BDC06A7L,
764 0xC19BF174L, 0xE49B69C1L, 0xEFBE4786L, 0x0FC19DC6L, 0x240CA1CCL,
765 0x2DE92C6FL, 0x4A7484AAL, 0x5CB0A9DCL, 0x76F988DAL, 0x983E5152L,
766 0xA831C66DL, 0xB00327C8L, 0xBF597FC7L, 0xC6E00BF3L, 0xD5A79147L,
767 0x06CA6351L, 0x14292967L, 0x27B70A85L, 0x2E1B2138L, 0x4D2C6DFCL,
768 0x53380D13L, 0x650A7354L, 0x766A0ABBL, 0x81C2C92EL, 0x92722C85L,
769 0xA2BFE8A1L, 0xA81A664BL, 0xC24B8B70L, 0xC76C51A3L, 0xD192E819L,
770 0xD6990624L, 0xF40E3585L, 0x106AA070L, 0x19A4C116L, 0x1E376C08L,
771 0x2748774CL, 0x34B0BCB5L, 0x391C0CB3L, 0x4ED8AA4AL, 0x5B9CCA4FL,
772 0x682E6FF3L, 0x748F82EEL, 0x78A5636FL, 0x84C87814L, 0x8CC70208L,
773 0x90BEFFFAL, 0xA4506CEBL, 0xBEF9A3F7L, 0xC67178F2L
774 };
775
776/* Both versions of Ch and Maj are logically the same, but with the second set
777 the compilers can recognize them better for optimization */
778#ifdef WOLFSSL_SHA256_BY_SPEC
779 /* SHA256 math based on specification */
780 #define Ch(x,y,z) ((z) ^ ((x) & ((y) ^ (z))))
781 #define Maj(x,y,z) ((((x) | (y)) & (z)) | ((x) & (y)))
782#else
783 /* SHA256 math reworked for easier compiler optimization */
784 #define Ch(x,y,z) ((((y) ^ (z)) & (x)) ^ (z))
785 #define Maj(x,y,z) ((((x) ^ (y)) & ((y) ^ (z))) ^ (y))
786#endif
787 #define R(x, n) (((x) & 0xFFFFFFFFU) >> (n))
788
789 #define S(x, n) rotrFixed(x, n)
790 #define Sigma0(x) (S(x, 2) ^ S(x, 13) ^ S(x, 22))
791 #define Sigma1(x) (S(x, 6) ^ S(x, 11) ^ S(x, 25))
792 #define Gamma0(x) (S(x, 7) ^ S(x, 18) ^ R(x, 3))
793 #define Gamma1(x) (S(x, 17) ^ S(x, 19) ^ R(x, 10))
794
795 #define a(i) S[(0-i) & 7]
796 #define b(i) S[(1-i) & 7]
797 #define c(i) S[(2-i) & 7]
798 #define d(i) S[(3-i) & 7]
799 #define e(i) S[(4-i) & 7]
800 #define f(i) S[(5-i) & 7]
801 #define g(i) S[(6-i) & 7]
802 #define h(i) S[(7-i) & 7]
803
804 #ifndef XTRANSFORM
805 #define XTRANSFORM(S, D) Transform_Sha256((S),(D))
806 #endif
807
808#ifndef SHA256_MANY_REGISTERS
809 #define RND(j) \
810 t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + W[i+j]; \
811 t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
812 d(j) += t0; \
813 h(j) = t0 + t1
814
815 static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
816 {
817 word32 S[8], t0, t1;
818 int i;
819
820 #ifdef WOLFSSL_SMALL_STACK_CACHE
821 word32* W = sha256->W;
822 if (W == NULL) {
823 W = (word32*)XMALLOC(sizeof(word32) * WC_SHA256_BLOCK_SIZE, NULL,
824 DYNAMIC_TYPE_DIGEST);
825 if (W == NULL)
826 return MEMORY_E;
827 sha256->W = W;
828 }
829 #elif defined(WOLFSSL_SMALL_STACK)
830 word32* W;
831 W = (word32*)XMALLOC(sizeof(word32) * WC_SHA256_BLOCK_SIZE, NULL,
832 DYNAMIC_TYPE_TMP_BUFFER);
833 if (W == NULL)
834 return MEMORY_E;
835 #else
836 word32 W[WC_SHA256_BLOCK_SIZE];
837 #endif
838
839 /* Copy context->state[] to working vars */
840 for (i = 0; i < 8; i++)
841 S[i] = sha256->digest[i];
842
843 for (i = 0; i < 16; i++)
844 W[i] = *((word32*)&data[i*sizeof(word32)]);
845
846 for (i = 16; i < WC_SHA256_BLOCK_SIZE; i++)
847 W[i] = Gamma1(W[i-2]) + W[i-7] + Gamma0(W[i-15]) + W[i-16];
848
849 #ifdef USE_SLOW_SHA256
850 /* not unrolled - ~2k smaller and ~25% slower */
851 for (i = 0; i < WC_SHA256_BLOCK_SIZE; i += 8) {
852 int j;
853 for (j = 0; j < 8; j++) { /* braces needed here for macros {} */
854 RND(j);
855 }
856 }
857 #else
858 /* partially loop unrolled */
859 for (i = 0; i < WC_SHA256_BLOCK_SIZE; i += 8) {
860 RND(0); RND(1); RND(2); RND(3);
861 RND(4); RND(5); RND(6); RND(7);
862 }
863 #endif /* USE_SLOW_SHA256 */
864
865 /* Add the working vars back into digest state[] */
866 for (i = 0; i < 8; i++) {
867 sha256->digest[i] += S[i];
868 }
869
870 #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_SMALL_STACK_CACHE)
871 XFREE(W, NULL, DYNAMIC_TYPE_TMP_BUFFER);
872 #endif
873 return 0;
874 }
875#else
876 /* SHA256 version that keeps all data in registers */
877 #define SCHED1(j) (W[j] = *((word32*)&data[j*sizeof(word32)]))
878 #define SCHED(j) ( \
879 W[ j & 15] += \
880 Gamma1(W[(j-2) & 15])+ \
881 W[(j-7) & 15] + \
882 Gamma0(W[(j-15) & 15]) \
883 )
884
885 #define RND1(j) \
886 t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + SCHED1(j); \
887 t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
888 d(j) += t0; \
889 h(j) = t0 + t1
890 #define RNDN(j) \
891 t0 = h(j) + Sigma1(e(j)) + Ch(e(j), f(j), g(j)) + K[i+j] + SCHED(j); \
892 t1 = Sigma0(a(j)) + Maj(a(j), b(j), c(j)); \
893 d(j) += t0; \
894 h(j) = t0 + t1
895
896 static int Transform_Sha256(wc_Sha256* sha256, const byte* data)
897 {
898 word32 S[8], t0, t1;
899 int i;
900 word32 W[WC_SHA256_BLOCK_SIZE/sizeof(word32)];
901
902 /* Copy digest to working vars */
903 S[0] = sha256->digest[0];
904 S[1] = sha256->digest[1];
905 S[2] = sha256->digest[2];
906 S[3] = sha256->digest[3];
907 S[4] = sha256->digest[4];
908 S[5] = sha256->digest[5];
909 S[6] = sha256->digest[6];
910 S[7] = sha256->digest[7];
911
912 i = 0;
913 RND1( 0); RND1( 1); RND1( 2); RND1( 3);
914 RND1( 4); RND1( 5); RND1( 6); RND1( 7);
915 RND1( 8); RND1( 9); RND1(10); RND1(11);
916 RND1(12); RND1(13); RND1(14); RND1(15);
917 /* 64 operations, partially loop unrolled */
918 for (i = 16; i < 64; i += 16) {
919 RNDN( 0); RNDN( 1); RNDN( 2); RNDN( 3);
920 RNDN( 4); RNDN( 5); RNDN( 6); RNDN( 7);
921 RNDN( 8); RNDN( 9); RNDN(10); RNDN(11);
922 RNDN(12); RNDN(13); RNDN(14); RNDN(15);
923 }
924
925 /* Add the working vars back into digest */
926 sha256->digest[0] += S[0];
927 sha256->digest[1] += S[1];
928 sha256->digest[2] += S[2];
929 sha256->digest[3] += S[3];
930 sha256->digest[4] += S[4];
931 sha256->digest[5] += S[5];
932 sha256->digest[6] += S[6];
933 sha256->digest[7] += S[7];
934
935 return 0;
936 }
937#endif /* SHA256_MANY_REGISTERS */
938#endif
939/* End wc_ software implementation */
940
941
942#ifdef XTRANSFORM
943
944 static WC_INLINE void AddLength(wc_Sha256* sha256, word32 len)
945 {
946 word32 tmp = sha256->loLen;
947 if ((sha256->loLen += len) < tmp) {
948 sha256->hiLen++; /* carry low to high */
949 }
950 }
951
952 /* do block size increments/updates */
953 static WC_INLINE int Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
954 {
955 int ret = 0;
956 word32 blocksLen;
957 byte* local;
958
959 if (sha256 == NULL || (data == NULL && len > 0)) {
960 return BAD_FUNC_ARG;
961 }
962
963 if (data == NULL && len == 0) {
964 /* valid, but do nothing */
965 return 0;
966 }
967
968 /* check that internal buffLen is valid */
969 if (sha256->buffLen >= WC_SHA256_BLOCK_SIZE) {
970 return BUFFER_E;
971 }
972
973 /* add length for final */
974 AddLength(sha256, len);
975
976 local = (byte*)sha256->buffer;
977
978 /* process any remainder from previous operation */
979 if (sha256->buffLen > 0) {
980 blocksLen = min(len, WC_SHA256_BLOCK_SIZE - sha256->buffLen);
981 XMEMCPY(&local[sha256->buffLen], data, blocksLen);
982
983 sha256->buffLen += blocksLen;
984 data += blocksLen;
985 len -= blocksLen;
986
987 if (sha256->buffLen == WC_SHA256_BLOCK_SIZE) {
988 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
989 #if defined(USE_INTEL_SPEEDUP) && \
990 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
991 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
992 #endif
993 {
994 ByteReverseWords(sha256->buffer, sha256->buffer,
995 WC_SHA256_BLOCK_SIZE);
996 }
997 #endif
998
999 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1000 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1001 if (sha256->ctx.mode == ESP32_SHA_INIT){
1002 esp_sha_try_hw_lock(&sha256->ctx);
1003 }
1004 if (sha256->ctx.mode == ESP32_SHA_SW){
1005 ret = XTRANSFORM(sha256, (const byte*)local);
1006 } else {
1007 esp_sha256_process(sha256, (const byte*)local);
1008 }
1009 #else
1010 ret = XTRANSFORM(sha256, (const byte*)local);
1011 #endif
1012
1013 if (ret == 0)
1014 sha256->buffLen = 0;
1015 else
1016 len = 0; /* error */
1017 }
1018 }
1019
1020 /* process blocks */
1021 #ifdef XTRANSFORM_LEN
1022 #if defined(USE_INTEL_SPEEDUP) && \
1023 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
1024 if (Transform_Sha256_Len_p != NULL)
1025 #endif
1026 {
1027 /* get number of blocks */
1028 /* 64-1 = 0x3F (~ Inverted = 0xFFFFFFC0) */
1029 /* len (masked by 0xFFFFFFC0) returns block aligned length */
1030 blocksLen = len & ~(WC_SHA256_BLOCK_SIZE-1);
1031 if (blocksLen > 0) {
1032 /* Byte reversal and alignment handled in function if required */
1033 XTRANSFORM_LEN(sha256, data, blocksLen);
1034 data += blocksLen;
1035 len -= blocksLen;
1036 }
1037 }
1038 #if defined(USE_INTEL_SPEEDUP) && \
1039 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
1040 else
1041 #endif
1042 #endif /* XTRANSFORM_LEN */
1043 #if !defined(XTRANSFORM_LEN) || (defined(USE_INTEL_SPEEDUP) && \
1044 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)))
1045 {
1046 while (len >= WC_SHA256_BLOCK_SIZE) {
1047 word32* local32 = sha256->buffer;
1048 /* optimization to avoid memcpy if data pointer is properly aligned */
1049 /* Intel transform function requires use of sha256->buffer */
1050 /* Little Endian requires byte swap, so can't use data directly */
1051 #if defined(WC_HASH_DATA_ALIGNMENT) && !defined(LITTLE_ENDIAN_ORDER) && \
1052 !(defined(USE_INTEL_SPEEDUP) && \
1053 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)))
1054 if (((size_t)data % WC_HASH_DATA_ALIGNMENT) == 0) {
1055 local32 = (word32*)data;
1056 }
1057 else
1058 #endif
1059 {
1060 XMEMCPY(local32, data, WC_SHA256_BLOCK_SIZE);
1061 }
1062
1063 data += WC_SHA256_BLOCK_SIZE;
1064 len -= WC_SHA256_BLOCK_SIZE;
1065
1066 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
1067 #if defined(USE_INTEL_SPEEDUP) && \
1068 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
1069 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
1070 #endif
1071 {
1072 ByteReverseWords(local32, local32, WC_SHA256_BLOCK_SIZE);
1073 }
1074 #endif
1075
1076 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1077 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1078 if (sha256->ctx.mode == ESP32_SHA_INIT){
1079 esp_sha_try_hw_lock(&sha256->ctx);
1080 }
1081 if (sha256->ctx.mode == ESP32_SHA_SW){
1082 ret = XTRANSFORM(sha256, (const byte*)local32);
1083 } else {
1084 esp_sha256_process(sha256, (const byte*)local32);
1085 }
1086 #else
1087 ret = XTRANSFORM(sha256, (const byte*)local32);
1088 #endif
1089
1090 if (ret != 0)
1091 break;
1092 }
1093 }
1094 #endif
1095
1096 /* save remainder */
1097 if (ret == 0 && len > 0) {
1098 XMEMCPY(local, data, len);
1099 sha256->buffLen = len;
1100 }
1101
1102 return ret;
1103 }
1104
1105 int wc_Sha256Update(wc_Sha256* sha256, const byte* data, word32 len)
1106 {
1107 if (sha256 == NULL || (data == NULL && len > 0)) {
1108 return BAD_FUNC_ARG;
1109 }
1110
1111 if (data == NULL && len == 0) {
1112 /* valid, but do nothing */
1113 return 0;
1114 }
1115
1116 #ifdef WOLF_CRYPTO_CB
1117 if (sha256->devId != INVALID_DEVID) {
1118 int ret = wc_CryptoCb_Sha256Hash(sha256, data, len, NULL);
1119 if (ret != CRYPTOCB_UNAVAILABLE)
1120 return ret;
1121 /* fall-through when unavailable */
1122 }
1123 #endif
1124 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
1125 if (sha256->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA256) {
1126 #if defined(HAVE_INTEL_QA)
1127 return IntelQaSymSha256(&sha256->asyncDev, NULL, data, len);
1128 #endif
1129 }
1130 #endif /* WOLFSSL_ASYNC_CRYPT */
1131
1132 return Sha256Update(sha256, data, len);
1133 }
1134
1135 static WC_INLINE int Sha256Final(wc_Sha256* sha256)
1136 {
1137
1138 int ret;
1139 byte* local;
1140
1141 if (sha256 == NULL) {
1142 return BAD_FUNC_ARG;
1143 }
1144
1145 local = (byte*)sha256->buffer;
1146 local[sha256->buffLen++] = 0x80; /* add 1 */
1147
1148 /* pad with zeros */
1149 if (sha256->buffLen > WC_SHA256_PAD_SIZE) {
1150 XMEMSET(&local[sha256->buffLen], 0,
1151 WC_SHA256_BLOCK_SIZE - sha256->buffLen);
1152 sha256->buffLen += WC_SHA256_BLOCK_SIZE - sha256->buffLen;
1153
1154 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
1155 #if defined(USE_INTEL_SPEEDUP) && \
1156 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
1157 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
1158 #endif
1159 {
1160 ByteReverseWords(sha256->buffer, sha256->buffer,
1161 WC_SHA256_BLOCK_SIZE);
1162 }
1163 #endif
1164
1165 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1166 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1167 if (sha256->ctx.mode == ESP32_SHA_INIT) {
1168 esp_sha_try_hw_lock(&sha256->ctx);
1169 }
1170 if (sha256->ctx.mode == ESP32_SHA_SW) {
1171 ret = XTRANSFORM(sha256, (const byte*)local);
1172 } else {
1173 ret = esp_sha256_process(sha256, (const byte*)local);
1174 }
1175 #else
1176 ret = XTRANSFORM(sha256, (const byte*)local);
1177 #endif
1178 if (ret != 0)
1179 return ret;
1180
1181 sha256->buffLen = 0;
1182 }
1183 XMEMSET(&local[sha256->buffLen], 0,
1184 WC_SHA256_PAD_SIZE - sha256->buffLen);
1185
1186 /* put lengths in bits */
1187 sha256->hiLen = (sha256->loLen >> (8 * sizeof(sha256->loLen) - 3)) +
1188 (sha256->hiLen << 3);
1189 sha256->loLen = sha256->loLen << 3;
1190
1191 /* store lengths */
1192 #if defined(LITTLE_ENDIAN_ORDER) && !defined(FREESCALE_MMCAU_SHA)
1193 #if defined(USE_INTEL_SPEEDUP) && \
1194 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
1195 if (!IS_INTEL_AVX1(intel_flags) && !IS_INTEL_AVX2(intel_flags))
1196 #endif
1197 {
1198 ByteReverseWords(sha256->buffer, sha256->buffer,
1199 WC_SHA256_BLOCK_SIZE);
1200 }
1201 #endif
1202 /* ! length ordering dependent on digest endian type ! */
1203 XMEMCPY(&local[WC_SHA256_PAD_SIZE], &sha256->hiLen, sizeof(word32));
1204 XMEMCPY(&local[WC_SHA256_PAD_SIZE + sizeof(word32)], &sha256->loLen,
1205 sizeof(word32));
1206
1207 #if defined(FREESCALE_MMCAU_SHA) || (defined(USE_INTEL_SPEEDUP) && \
1208 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2)))
1209 /* Kinetis requires only these bytes reversed */
1210 #if defined(USE_INTEL_SPEEDUP) && \
1211 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
1212 if (IS_INTEL_AVX1(intel_flags) || IS_INTEL_AVX2(intel_flags))
1213 #endif
1214 {
1215 ByteReverseWords(
1216 &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)],
1217 &sha256->buffer[WC_SHA256_PAD_SIZE / sizeof(word32)],
1218 2 * sizeof(word32));
1219 }
1220 #endif
1221
1222 #if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1223 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1224 if (sha256->ctx.mode == ESP32_SHA_INIT) {
1225 esp_sha_try_hw_lock(&sha256->ctx);
1226 }
1227 if (sha256->ctx.mode == ESP32_SHA_SW) {
1228 ret = XTRANSFORM(sha256, (const byte*)local);
1229 } else {
1230 ret = esp_sha256_digest_process(sha256, 1);
1231 }
1232 #else
1233 ret = XTRANSFORM(sha256, (const byte*)local);
1234 #endif
1235
1236 return ret;
1237 }
1238
1239 int wc_Sha256FinalRaw(wc_Sha256* sha256, byte* hash)
1240 {
1241 #ifdef LITTLE_ENDIAN_ORDER
1242 word32 digest[WC_SHA256_DIGEST_SIZE / sizeof(word32)];
1243 #endif
1244
1245 if (sha256 == NULL || hash == NULL) {
1246 return BAD_FUNC_ARG;
1247 }
1248
1249 #ifdef LITTLE_ENDIAN_ORDER
1250 ByteReverseWords((word32*)digest, (word32*)sha256->digest,
1251 WC_SHA256_DIGEST_SIZE);
1252 XMEMCPY(hash, digest, WC_SHA256_DIGEST_SIZE);
1253 #else
1254 XMEMCPY(hash, sha256->digest, WC_SHA256_DIGEST_SIZE);
1255 #endif
1256
1257 return 0;
1258 }
1259
1260 int wc_Sha256Final(wc_Sha256* sha256, byte* hash)
1261 {
1262 int ret;
1263
1264 if (sha256 == NULL || hash == NULL) {
1265 return BAD_FUNC_ARG;
1266 }
1267
1268 #ifdef WOLF_CRYPTO_CB
1269 if (sha256->devId != INVALID_DEVID) {
1270 ret = wc_CryptoCb_Sha256Hash(sha256, NULL, 0, hash);
1271 if (ret != CRYPTOCB_UNAVAILABLE)
1272 return ret;
1273 /* fall-through when unavailable */
1274 }
1275 #endif
1276
1277 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
1278 if (sha256->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA256) {
1279 #if defined(HAVE_INTEL_QA)
1280 return IntelQaSymSha256(&sha256->asyncDev, hash, NULL,
1281 WC_SHA256_DIGEST_SIZE);
1282 #endif
1283 }
1284 #endif /* WOLFSSL_ASYNC_CRYPT */
1285
1286 ret = Sha256Final(sha256);
1287 if (ret != 0)
1288 return ret;
1289
1290 #if defined(LITTLE_ENDIAN_ORDER)
1291 ByteReverseWords(sha256->digest, sha256->digest, WC_SHA256_DIGEST_SIZE);
1292 #endif
1293 XMEMCPY(hash, sha256->digest, WC_SHA256_DIGEST_SIZE);
1294
1295 return InitSha256(sha256); /* reset state */
1296 }
1297
1298#endif /* XTRANSFORM */
1299
1300#ifdef WOLFSSL_SHA224
1301
1302#ifdef STM32_HASH_SHA2
1303
1304 /* Supports CubeMX HAL or Standard Peripheral Library */
1305
1306 int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId)
1307 {
1308 if (sha224 == NULL)
1309 return BAD_FUNC_ARG;
1310
1311 (void)devId;
1312 (void)heap;
1313
1314 XMEMSET(sha224, 0, sizeof(wc_Sha224));
1315 wc_Stm32_Hash_Init(&sha224->stmCtx);
1316 return 0;
1317 }
1318
1319 int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len)
1320 {
1321 int ret = 0;
1322
1323 if (sha224 == NULL || (data == NULL && len > 0)) {
1324 return BAD_FUNC_ARG;
1325 }
1326
1327 ret = wolfSSL_CryptHwMutexLock();
1328 if (ret == 0) {
1329 ret = wc_Stm32_Hash_Update(&sha224->stmCtx,
1330 HASH_AlgoSelection_SHA224, data, len);
1331 wolfSSL_CryptHwMutexUnLock();
1332 }
1333 return ret;
1334 }
1335
1336 int wc_Sha224Final(wc_Sha224* sha224, byte* hash)
1337 {
1338 int ret = 0;
1339
1340 if (sha224 == NULL || hash == NULL) {
1341 return BAD_FUNC_ARG;
1342 }
1343
1344 ret = wolfSSL_CryptHwMutexLock();
1345 if (ret == 0) {
1346 ret = wc_Stm32_Hash_Final(&sha224->stmCtx,
1347 HASH_AlgoSelection_SHA224, hash, WC_SHA224_DIGEST_SIZE);
1348 wolfSSL_CryptHwMutexUnLock();
1349 }
1350
1351 (void)wc_InitSha224(sha224); /* reset state */
1352
1353 return ret;
1354 }
1355
1356#elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_HASH)
1357 /* functions defined in wolfcrypt/src/port/caam/caam_sha256.c */
1358
1359#elif defined(WOLFSSL_AFALG_HASH)
1360 #error SHA224 currently not supported with AF_ALG enabled
1361
1362#elif defined(WOLFSSL_DEVCRYPTO_HASH)
1363 /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
1364
1365#elif defined(WOLFSSL_SILABS_SE_ACCEL)
1366 /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */
1367
1368#else
1369
1370 #define NEED_SOFT_SHA224
1371
1372
1373 static int InitSha224(wc_Sha224* sha224)
1374 {
1375 int ret = 0;
1376
1377 if (sha224 == NULL) {
1378 return BAD_FUNC_ARG;
1379 }
1380
1381 sha224->digest[0] = 0xc1059ed8;
1382 sha224->digest[1] = 0x367cd507;
1383 sha224->digest[2] = 0x3070dd17;
1384 sha224->digest[3] = 0xf70e5939;
1385 sha224->digest[4] = 0xffc00b31;
1386 sha224->digest[5] = 0x68581511;
1387 sha224->digest[6] = 0x64f98fa7;
1388 sha224->digest[7] = 0xbefa4fa4;
1389
1390 sha224->buffLen = 0;
1391 sha224->loLen = 0;
1392 sha224->hiLen = 0;
1393
1394 #if defined(USE_INTEL_SPEEDUP) && \
1395 (defined(HAVE_INTEL_AVX1) || defined(HAVE_INTEL_AVX2))
1396 /* choose best Transform function under this runtime environment */
1397 Sha256_SetTransform();
1398 #endif
1399 #if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1400 sha224->flags = 0;
1401 #endif
1402
1403 return ret;
1404 }
1405
1406#endif
1407
1408#ifdef NEED_SOFT_SHA224
1409 int wc_InitSha224_ex(wc_Sha224* sha224, void* heap, int devId)
1410 {
1411 int ret = 0;
1412
1413 if (sha224 == NULL)
1414 return BAD_FUNC_ARG;
1415
1416 sha224->heap = heap;
1417 #ifdef WOLFSSL_SMALL_STACK_CACHE
1418 sha224->W = NULL;
1419 #endif
1420
1421 ret = InitSha224(sha224);
1422 if (ret != 0)
1423 return ret;
1424
1425 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1426 ret = wolfAsync_DevCtxInit(&sha224->asyncDev,
1427 WOLFSSL_ASYNC_MARKER_SHA224, sha224->heap, devId);
1428 #else
1429 (void)devId;
1430 #endif /* WOLFSSL_ASYNC_CRYPT */
1431
1432 return ret;
1433 }
1434
1435 int wc_Sha224Update(wc_Sha224* sha224, const byte* data, word32 len)
1436 {
1437 int ret;
1438
1439 if (sha224 == NULL || (data == NULL && len > 0)) {
1440 return BAD_FUNC_ARG;
1441 }
1442
1443 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1444 if (sha224->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA224) {
1445 #if defined(HAVE_INTEL_QA)
1446 return IntelQaSymSha224(&sha224->asyncDev, NULL, data, len);
1447 #endif
1448 }
1449 #endif /* WOLFSSL_ASYNC_CRYPT */
1450
1451 ret = Sha256Update((wc_Sha256*)sha224, data, len);
1452
1453 return ret;
1454 }
1455
1456 int wc_Sha224Final(wc_Sha224* sha224, byte* hash)
1457 {
1458 int ret;
1459
1460 if (sha224 == NULL || hash == NULL) {
1461 return BAD_FUNC_ARG;
1462 }
1463
1464 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1465 if (sha224->asyncDev.marker == WOLFSSL_ASYNC_MARKER_SHA224) {
1466 #if defined(HAVE_INTEL_QA)
1467 return IntelQaSymSha224(&sha224->asyncDev, hash, NULL,
1468 WC_SHA224_DIGEST_SIZE);
1469 #endif
1470 }
1471 #endif /* WOLFSSL_ASYNC_CRYPT */
1472
1473 ret = Sha256Final((wc_Sha256*)sha224);
1474 if (ret != 0)
1475 return ret;
1476
1477 #if defined(LITTLE_ENDIAN_ORDER)
1478 ByteReverseWords(sha224->digest, sha224->digest, WC_SHA224_DIGEST_SIZE);
1479 #endif
1480 XMEMCPY(hash, sha224->digest, WC_SHA224_DIGEST_SIZE);
1481
1482 return InitSha224(sha224); /* reset state */
1483 }
1484#endif /* end of SHA224 software implementation */
1485
1486 int wc_InitSha224(wc_Sha224* sha224)
1487 {
1488 return wc_InitSha224_ex(sha224, NULL, INVALID_DEVID);
1489 }
1490
1491 void wc_Sha224Free(wc_Sha224* sha224)
1492 {
1493 if (sha224 == NULL)
1494 return;
1495
1496#ifdef WOLFSSL_SMALL_STACK_CACHE
1497 if (sha224->W != NULL) {
1498 XFREE(sha224->W, NULL, DYNAMIC_TYPE_DIGEST);
1499 sha224->W = NULL;
1500 }
1501#endif
1502
1503 #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA224)
1504 wolfAsync_DevCtxFree(&sha224->asyncDev, WOLFSSL_ASYNC_MARKER_SHA224);
1505 #endif /* WOLFSSL_ASYNC_CRYPT */
1506
1507 #ifdef WOLFSSL_PIC32MZ_HASH
1508 wc_Sha256Pic32Free(sha224);
1509 #endif
1510 }
1511#endif /* WOLFSSL_SHA224 */
1512
1513
1514int wc_InitSha256(wc_Sha256* sha256)
1515{
1516 return wc_InitSha256_ex(sha256, NULL, INVALID_DEVID);
1517}
1518
1519void wc_Sha256Free(wc_Sha256* sha256)
1520{
1521 if (sha256 == NULL)
1522 return;
1523
1524#ifdef WOLFSSL_SMALL_STACK_CACHE
1525 if (sha256->W != NULL) {
1526 XFREE(sha256->W, NULL, DYNAMIC_TYPE_DIGEST);
1527 sha256->W = NULL;
1528 }
1529#endif
1530
1531#if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_SHA256)
1532 wolfAsync_DevCtxFree(&sha256->asyncDev, WOLFSSL_ASYNC_MARKER_SHA256);
1533#endif /* WOLFSSL_ASYNC_CRYPT */
1534#ifdef WOLFSSL_PIC32MZ_HASH
1535 wc_Sha256Pic32Free(sha256);
1536#endif
1537#if defined(WOLFSSL_AFALG_HASH)
1538 if (sha256->alFd > 0) {
1539 close(sha256->alFd);
1540 sha256->alFd = -1; /* avoid possible double close on socket */
1541 }
1542 if (sha256->rdFd > 0) {
1543 close(sha256->rdFd);
1544 sha256->rdFd = -1; /* avoid possible double close on socket */
1545 }
1546#endif /* WOLFSSL_AFALG_HASH */
1547#ifdef WOLFSSL_DEVCRYPTO_HASH
1548 wc_DevCryptoFree(&sha256->ctx);
1549#endif /* WOLFSSL_DEVCRYPTO */
1550#if (defined(WOLFSSL_AFALG_HASH) && defined(WOLFSSL_AFALG_HASH_KEEP)) || \
1551 (defined(WOLFSSL_DEVCRYPTO_HASH) && defined(WOLFSSL_DEVCRYPTO_HASH_KEEP)) || \
1552 (defined(WOLFSSL_RENESAS_TSIP_CRYPT) && \
1553 !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH))
1554 if (sha256->msg != NULL) {
1555 XFREE(sha256->msg, sha256->heap, DYNAMIC_TYPE_TMP_BUFFER);
1556 sha256->msg = NULL;
1557 }
1558#endif
1559#ifdef WOLFSSL_IMXRT_DCP
1560 DCPSha256Free(sha256);
1561#endif
1562}
1563
1564#endif /* !WOLFSSL_TI_HASH */
1565#endif /* HAVE_FIPS */
1566
1567
1568#ifndef WOLFSSL_TI_HASH
1569#ifdef WOLFSSL_SHA224
1570 int wc_Sha224GetHash(wc_Sha224* sha224, byte* hash)
1571 {
1572 int ret;
1573 wc_Sha224 tmpSha224;
1574
1575 if (sha224 == NULL || hash == NULL)
1576 return BAD_FUNC_ARG;
1577
1578 ret = wc_Sha224Copy(sha224, &tmpSha224);
1579 if (ret == 0) {
1580 ret = wc_Sha224Final(&tmpSha224, hash);
1581 wc_Sha224Free(&tmpSha224);
1582 }
1583 return ret;
1584 }
1585 int wc_Sha224Copy(wc_Sha224* src, wc_Sha224* dst)
1586 {
1587 int ret = 0;
1588
1589 if (src == NULL || dst == NULL)
1590 return BAD_FUNC_ARG;
1591
1592 XMEMCPY(dst, src, sizeof(wc_Sha224));
1593 #ifdef WOLFSSL_SMALL_STACK_CACHE
1594 dst->W = NULL;
1595 #endif
1596
1597 #ifdef WOLFSSL_SILABS_SE_ACCEL
1598 dst->silabsCtx.hash_ctx.cmd_ctx = &(dst->silabsCtx.cmd_ctx);
1599 dst->silabsCtx.hash_ctx.hash_type_ctx = &(dst->silabsCtx.hash_type_ctx);
1600 #endif
1601
1602 #ifdef WOLFSSL_ASYNC_CRYPT
1603 ret = wolfAsync_DevCopy(&src->asyncDev, &dst->asyncDev);
1604 #endif
1605 #if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1606 dst->flags |= WC_HASH_FLAG_ISCOPY;
1607 #endif
1608
1609 return ret;
1610 }
1611
1612#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1613 int wc_Sha224SetFlags(wc_Sha224* sha224, word32 flags)
1614 {
1615 if (sha224) {
1616 sha224->flags = flags;
1617 }
1618 return 0;
1619 }
1620 int wc_Sha224GetFlags(wc_Sha224* sha224, word32* flags)
1621 {
1622 if (sha224 && flags) {
1623 *flags = sha224->flags;
1624 }
1625 return 0;
1626 }
1627#endif
1628
1629#endif /* WOLFSSL_SHA224 */
1630
1631#ifdef WOLFSSL_AFALG_HASH
1632 /* implemented in wolfcrypt/src/port/af_alg/afalg_hash.c */
1633
1634#elif defined(WOLFSSL_DEVCRYPTO_HASH)
1635 /* implemented in wolfcrypt/src/port/devcrypto/devcrypt_hash.c */
1636
1637#elif defined(WOLFSSL_RENESAS_TSIP_CRYPT) && \
1638 !defined(NO_WOLFSSL_RENESAS_TSIP_CRYPT_HASH)
1639
1640 /* implemented in wolfcrypt/src/port/Renesas/renesas_tsip_sha.c */
1641#elif defined(WOLFSSL_PSOC6_CRYPTO)
1642 /* implemented in wolfcrypt/src/port/cypress/psoc6_crypto.c */
1643#elif defined(WOLFSSL_IMXRT_DCP)
1644 /* implemented in wolfcrypt/src/port/nxp/dcp_port.c */
1645#else
1646
1647int wc_Sha256GetHash(wc_Sha256* sha256, byte* hash)
1648{
1649 int ret;
1650 wc_Sha256 tmpSha256;
1651
1652 if (sha256 == NULL || hash == NULL)
1653 return BAD_FUNC_ARG;
1654
1655#if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1656 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1657 if(sha256->ctx.mode == ESP32_SHA_INIT){
1658 esp_sha_try_hw_lock(&sha256->ctx);
1659 }
1660 if(sha256->ctx.mode == ESP32_SHA_HW)
1661 {
1662 esp_sha256_digest_process(sha256, 0);
1663 }
1664#endif
1665 ret = wc_Sha256Copy(sha256, &tmpSha256);
1666 if (ret == 0) {
1667 ret = wc_Sha256Final(&tmpSha256, hash);
1668#if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1669 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1670 sha256->ctx.mode = ESP32_SHA_SW;
1671#endif
1672
1673 wc_Sha256Free(&tmpSha256);
1674 }
1675 return ret;
1676}
1677int wc_Sha256Copy(wc_Sha256* src, wc_Sha256* dst)
1678{
1679 int ret = 0;
1680
1681 if (src == NULL || dst == NULL)
1682 return BAD_FUNC_ARG;
1683
1684 XMEMCPY(dst, src, sizeof(wc_Sha256));
1685#ifdef WOLFSSL_SMALL_STACK_CACHE
1686 dst->W = NULL;
1687#endif
1688
1689#ifdef WOLFSSL_SILABS_SE_ACCEL
1690 dst->silabsCtx.hash_ctx.cmd_ctx = &(dst->silabsCtx.cmd_ctx);
1691 dst->silabsCtx.hash_ctx.hash_type_ctx = &(dst->silabsCtx.hash_type_ctx);
1692#endif
1693
1694#ifdef WOLFSSL_ASYNC_CRYPT
1695 ret = wolfAsync_DevCopy(&src->asyncDev, &dst->asyncDev);
1696#endif
1697#ifdef WOLFSSL_PIC32MZ_HASH
1698 ret = wc_Pic32HashCopy(&src->cache, &dst->cache);
1699#endif
1700#if defined(WOLFSSL_ESP32WROOM32_CRYPT) && \
1701 !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_HASH)
1702 dst->ctx.mode = src->ctx.mode;
1703 dst->ctx.isfirstblock = src->ctx.isfirstblock;
1704 dst->ctx.sha_type = src->ctx.sha_type;
1705#endif
1706#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1707 dst->flags |= WC_HASH_FLAG_ISCOPY;
1708#endif
1709
1710 return ret;
1711}
1712#endif
1713
1714#if defined(WOLFSSL_HASH_FLAGS) || defined(WOLF_CRYPTO_CB)
1715int wc_Sha256SetFlags(wc_Sha256* sha256, word32 flags)
1716{
1717 if (sha256) {
1718 sha256->flags = flags;
1719 }
1720 return 0;
1721}
1722int wc_Sha256GetFlags(wc_Sha256* sha256, word32* flags)
1723{
1724 if (sha256 && flags) {
1725 *flags = sha256->flags;
1726 }
1727 return 0;
1728}
1729#endif
1730#endif /* !WOLFSSL_TI_HASH */
1731
1732#endif /* NO_SHA256 */
Note: See TracBrowser for help on using the repository browser.