1 /*
2 * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include <stddef.h>
16 #include <hvb_sysdeps.h>
17 #include "hvb_crypto.h"
18 #include "hvb_hash_sha256.h"
19
20 #ifndef htobe32
21 #define htobe32(value) \
22 ((((value)&0x000000FF) << 24) | (((value)&0x0000FF00) << 8) | (((value)&0x00FF0000) >> 8) | \
23 (((value)&0xFF000000) >> 24))
24 #endif
25
26 #define word2byte(w) ((w) * sizeof(uint32_t))
27 #define PAD_BLK_WORD_SIZE_SHA256 (BLK_WORD_SIZE_SHA256 * 2)
28 #define PAD_BLK_BYTE_SIZE_SHA256 WOR2BYTE(PAD_BLK_WORD_SIZE_SHA256)
29 #define PAD_INFO_BYTE_LEN_SHA256 8
30
31 #define shr(x, n) (((uint32_t)(x)) >> (n))
32 #define rotr(x, n) (shr(x, n) | (((uint32_t)(x)) << (32 - (n))))
33
34 #define sigma_0(x) (rotr(x, 2) ^ rotr(x, 13) ^ rotr(x, 22))
35 #define sigma_1(x) (rotr(x, 6) ^ rotr(x, 11) ^ rotr(x, 25))
36 #define sigma_2(x) (rotr(x, 7) ^ rotr(x, 18) ^ shr(x, 3))
37 #define sigma_3(x) (rotr(x, 17) ^ rotr(x, 19) ^ shr(x, 10))
38
39 #define maj(x, y, z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z)))
40 #define ch(x, y, z) (((x) & (y)) ^ ((~(x)) & (z)))
41
42 static const uint32_t const_key[] = {
43 0x428A2F98,
44 0x71374491,
45 0xB5C0FBCF,
46 0xE9B5DBA5,
47 0x3956C25B,
48 0x59F111F1,
49 0x923F82A4,
50 0xAB1C5ED5,
51 0xD807AA98,
52 0x12835B01,
53 0x243185BE,
54 0x550C7DC3,
55 0x72BE5D74,
56 0x80DEB1FE,
57 0x9BDC06A7,
58 0xC19BF174,
59 0xE49B69C1,
60 0xEFBE4786,
61 0x0FC19DC6,
62 0x240CA1CC,
63 0x2DE92C6F,
64 0x4A7484AA,
65 0x5CB0A9DC,
66 0x76F988DA,
67 0x983E5152,
68 0xA831C66D,
69 0xB00327C8,
70 0xBF597FC7,
71 0xC6E00BF3,
72 0xD5A79147,
73 0x06CA6351,
74 0x14292967,
75 0x27B70A85,
76 0x2E1B2138,
77 0x4D2C6DFC,
78 0x53380D13,
79 0x650A7354,
80 0x766A0ABB,
81 0x81C2C92E,
82 0x92722C85,
83 0xA2BFE8A1,
84 0xA81A664B,
85 0xC24B8B70,
86 0xC76C51A3,
87 0xD192E819,
88 0xD6990624,
89 0xF40E3585,
90 0x106AA070,
91 0x19A4C116,
92 0x1E376C08,
93 0x2748774C,
94 0x34B0BCB5,
95 0x391C0CB3,
96 0x4ED8AA4A,
97 0x5B9CCA4F,
98 0x682E6FF3,
99 0x748F82EE,
100 0x78A5636F,
101 0x84C87814,
102 0x8CC70208,
103 0x90BEFFFA,
104 0xA4506CEB,
105 0xBEF9A3F7,
106 0xC67178F2,
107 };
108
109
110 static uint32_t sha256_iv_init[IV_WORD_SIZE_SHA256] = {
111 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 };
112
bigend_read_word(const uint8_t * data)113 static inline uint32_t bigend_read_word(const uint8_t *data)
114 {
115 uint32_t res;
116
117 res = data[0];
118 res = (res << 8) | data[1];
119 res = (res << 8) | data[2];
120 res = (res << 8) | data[3];
121
122 return res;
123 }
124
w_schedule(uint32_t w[64],uint32_t t)125 static inline uint32_t w_schedule(uint32_t w[64], uint32_t t)
126 {
127 return sigma_3(w[t - 2]) + w[t - 7] + sigma_2(w[t - 15]) + w[t - 16];
128 }
129
rotate_regs(uint32_t regs[8])130 static inline void rotate_regs(uint32_t regs[8])
131 {
132 uint32_t backup;
133 backup = regs[6];
134 regs[6] = regs[5];
135 regs[5] = regs[4];
136 regs[4] = regs[3];
137 regs[3] = regs[2];
138 regs[2] = regs[1];
139 regs[1] = regs[0];
140 regs[0] = regs[7];
141 regs[7] = backup;
142 }
143
sha256_block_calc(uint32_t regs[8],const uint8_t * data)144 static void sha256_block_calc(uint32_t regs[8], const uint8_t *data)
145 {
146 uint32_t t1;
147 uint32_t w[64];
148 uint32_t t;
149
150 for (t = 0; t < 64; t++, data += 4) {
151 w[t] = t < 16 ? bigend_read_word(data) : w_schedule(w, t);
152 t1 = regs[7] + sigma_1(regs[4]) + ch(regs[4], regs[5], regs[6]) + const_key[t] + w[t];
153 regs[3] += t1;
154 regs[7] = sigma_0(regs[0]) + maj(regs[0], regs[1], regs[2]) + t1;
155
156 rotate_regs(regs);
157 }
158 }
159
160 int sha256_data_blk_update(uint32_t *iv, const void *msg, uint64_t len);
161
sha256_data_blk_update(uint32_t * iv,const void * msg,uint64_t len)162 __attribute__((weak)) int sha256_data_blk_update(uint32_t *iv, const void *msg, uint64_t len)
163 {
164 uint32_t regs[8];
165 const uint8_t *pdata = msg;
166 uint64_t i;
167 uint32_t j;
168
169 for (i = 0; i < len / 64; i++, pdata += 64) {
170 for (j = 0; j < 8; j++) {
171 regs[j] = iv[j];
172 }
173
174 sha256_block_calc(regs, pdata);
175
176 for (j = 0; j < 8; j++) {
177 iv[j] += regs[j];
178 }
179 }
180 return 0;
181 }
182
hash_sha256_pad_update(uint32_t * iv,const void * left_msg,uint64_t left_len,uint64_t total_bit_len)183 static void hash_sha256_pad_update(uint32_t *iv, const void *left_msg, uint64_t left_len, uint64_t total_bit_len)
184 {
185 uint32_t pad_word_len;
186 uint32_t sha256_pad[PAD_BLK_WORD_SIZE_SHA256];
187 uint8_t *pad_ptr = NULL;
188 uint32_t fill_zero_len;
189
190 if (left_len != 0) {
191 hvb_memcpy(sha256_pad, left_msg, (uint32_t)left_len);
192 }
193
194 pad_ptr = (uint8_t *)sha256_pad;
195 pad_ptr[left_len] = 0x80; // padding 0x80
196 left_len++;
197
198 if (left_len + PAD_INFO_BYTE_LEN_SHA256 <= BLK_BYTE_SIZE_SHA256) {
199 pad_word_len = BLK_WORD_SIZE_SHA256;
200 } else {
201 pad_word_len = PAD_BLK_WORD_SIZE_SHA256;
202 }
203
204 fill_zero_len = word2byte(pad_word_len) - (uint32_t)left_len - PAD_INFO_BYTE_LEN_SHA256;
205 hvb_memset(pad_ptr + left_len, 0, fill_zero_len);
206
207 sha256_pad[pad_word_len - 1] = htobe32((uint32_t)total_bit_len);
208 total_bit_len = total_bit_len >> 32;
209 sha256_pad[pad_word_len - 2] = htobe32((uint32_t)total_bit_len);
210
211 sha256_data_blk_update(iv, sha256_pad, word2byte(pad_word_len));
212 }
213
hash_sha256_output_iv(uint32_t * iv,uint8_t * out,uint32_t out_len)214 static int hash_sha256_output_iv(uint32_t *iv, uint8_t *out, uint32_t out_len)
215 {
216 if (out == NULL)
217 return HASH_ERR_PARAM_NULL;
218
219 if (out_len < IV_BYTE_SIZE_SHA256) {
220 return HASH_ERR_OUTBUF_NO_ENOUGH;
221 }
222
223 for (int i = 0; i < IV_WORD_SIZE_SHA256; i++) {
224 iv[i] = htobe32(iv[i]);
225 }
226
227 hvb_memcpy(out, iv, IV_BYTE_SIZE_SHA256);
228
229 return HASH_OK;
230 }
231
hash_sha256_single(const void * msg,uint32_t msg_len,uint8_t * out,uint32_t out_len)232 int hash_sha256_single(const void *msg, uint32_t msg_len, uint8_t *out, uint32_t out_len)
233 {
234 uint64_t data_size;
235 uint64_t total_bit_len;
236 uint32_t iv[IV_WORD_SIZE_SHA256];
237
238 if (msg == NULL || out == NULL) {
239 return HASH_ERR_PARAM_NULL;
240 }
241
242 if (msg_len == 0) {
243 return HASH_ERR_BUF_LEN;
244 }
245
246 total_bit_len = (uint64_t)msg_len * 8; // 8bit per byte
247 if (total_bit_len < msg_len) {
248 return HASH_ERR_TOTAL_LEN;
249 }
250
251 hvb_memcpy(iv, sha256_iv_init, sizeof(sha256_iv_init));
252
253 data_size = (msg_len / BLK_BYTE_SIZE_SHA256) * BLK_BYTE_SIZE_SHA256;
254
255 if (data_size > 0) {
256 sha256_data_blk_update(iv, msg, data_size);
257 }
258
259 hash_sha256_pad_update(iv, (uint8_t *)msg + data_size, msg_len - data_size, total_bit_len);
260
261 return hash_sha256_output_iv(iv, out, out_len);
262 }
263
hash_alg_get_blklen(enum hash_alg_type alg_type)264 static uint32_t hash_alg_get_blklen(enum hash_alg_type alg_type)
265 {
266 switch (alg_type) {
267 case HASH_ALG_SHA256:
268 return BLK_BYTE_SIZE_SHA256;
269 default:
270 return 0;
271 }
272 return 0;
273 }
274
275
hash_ctx_init(struct hash_ctx_t * hash_ctx,enum hash_alg_type alg_type)276 int hash_ctx_init(struct hash_ctx_t *hash_ctx, enum hash_alg_type alg_type)
277 {
278 if (alg_type != HASH_ALG_SHA256) {
279 return HASH_ERR_ALG_NO_SUPPORT;
280 }
281
282 if (hash_ctx == NULL) {
283 return HASH_ERR_PARAM_NULL;
284 }
285
286 hash_ctx->alg_type = (uint32_t)alg_type;
287 hash_ctx->buf_len = 0;
288 hash_ctx->total_len = 0;
289
290 (void)hvb_memcpy(hash_ctx->iv, sha256_iv_init, sizeof(sha256_iv_init));
291
292 return HASH_OK;
293 }
294
hash_calc_update(struct hash_ctx_t * hash_ctx,const void * msg,uint32_t msg_len)295 int hash_calc_update(struct hash_ctx_t *hash_ctx, const void *msg, uint32_t msg_len)
296 {
297 uint32_t left_len;
298 uint32_t blk_len;
299 uint32_t calc_len;
300
301 if(msg_len == 0) {
302 return HASH_OK;
303 }
304
305 if (hash_ctx == NULL || msg == NULL) {
306 return HASH_ERR_PARAM_NULL;
307 }
308
309 blk_len = hash_alg_get_blklen(hash_ctx->alg_type);
310 if (blk_len == 0) {
311 return HASH_ERR_ALG_NO_SUPPORT;
312 }
313
314 if (hash_ctx->buf_len >= blk_len) {
315 return HASH_ERR_BUF_LEN;
316 }
317
318 hash_ctx->total_len = hash_ctx->total_len + msg_len;
319
320 left_len = blk_len - hash_ctx->buf_len;
321
322 if (hash_ctx->buf_len != 0 && msg_len >= left_len) {
323 hvb_memcpy(hash_ctx->blk_buf + hash_ctx->buf_len, msg, left_len);
324 (void)sha256_data_blk_update(hash_ctx->iv, hash_ctx->blk_buf, blk_len);
325
326 hash_ctx->buf_len = 0;
327
328 msg_len = msg_len - left_len;
329 msg = (uint8_t *)msg + left_len;
330 }
331
332 if (msg_len >= blk_len) {
333 calc_len = msg_len / blk_len * blk_len;
334 sha256_data_blk_update(hash_ctx->iv, msg, calc_len);
335
336 msg_len = msg_len - calc_len;
337 msg = (uint8_t *)msg + calc_len;
338 }
339
340 if (msg_len != 0) {
341 hvb_memcpy(hash_ctx->blk_buf + hash_ctx->buf_len, msg, msg_len);
342 hash_ctx->buf_len = hash_ctx->buf_len + msg_len;
343 }
344
345 return HASH_OK;
346 }
347
hash_calc_do_final(struct hash_ctx_t * hash_ctx,const void * msg,uint32_t msg_len,uint8_t * out,uint32_t out_len)348 int hash_calc_do_final(struct hash_ctx_t *hash_ctx, const void *msg, uint32_t msg_len, uint8_t *out, uint32_t out_len)
349 {
350 uint64_t total_bit_len;
351 int ret;
352
353 ret = hash_calc_update(hash_ctx, msg, msg_len);
354 if (ret != HASH_OK) {
355 return ret;
356 }
357
358 total_bit_len = hash_ctx->total_len * 8;
359 if (total_bit_len <= hash_ctx->total_len) {
360 return HASH_ERR_TOTAL_LEN;
361 }
362
363 hash_sha256_pad_update(hash_ctx->iv, hash_ctx->blk_buf, hash_ctx->buf_len, total_bit_len);
364
365 return hash_sha256_output_iv(hash_ctx->iv, out, out_len);
366 }
367