18 #if defined(CRYPTOPP_AVX2_AVAILABLE) && defined(CRYPTOPP_ENABLE_64BIT_SSE)
20 #if defined(CRYPTOPP_AVX2_AVAILABLE)
21 # include <emmintrin.h>
22 # include <immintrin.h>
26 #if (CRYPTOPP_GCC_VERSION >= 40500)
27 # include <x86intrin.h>
30 ANONYMOUS_NAMESPACE_BEGIN
34 const unsigned int LSH256_MSG_BLK_BYTE_LEN = 128;
37 const unsigned int LSH256_HASH_VAL_MAX_BYTE_LEN = 32;
40 const unsigned int CV_WORD_LEN = 16;
41 const unsigned int CONST_WORD_LEN = 8;
44 const unsigned int NUM_STEPS = 26;
46 const unsigned int ROT_EVEN_ALPHA = 29;
47 const unsigned int ROT_EVEN_BETA = 1;
48 const unsigned int ROT_ODD_ALPHA = 5;
49 const unsigned int ROT_ODD_BETA = 17;
51 const unsigned int LSH_TYPE_256_256 = 0x0000020;
52 const unsigned int LSH_TYPE_256_224 = 0x000001C;
59 const unsigned int LSH_SUCCESS = 0x0;
62 const unsigned int LSH_ERR_INVALID_DATABITLEN = 0x2403;
63 const unsigned int LSH_ERR_INVALID_STATE = 0x2404;
67 const unsigned int AlgorithmType = 80;
68 const unsigned int RemainingBits = 81;
76 extern const word32 LSH256_IV224[CV_WORD_LEN];
77 extern const word32 LSH256_IV256[CV_WORD_LEN];
78 extern const word32 LSH256_StepConstants[CONST_WORD_LEN * NUM_STEPS];
83 ANONYMOUS_NAMESPACE_BEGIN
90 using CryptoPP::GetBlock;
101 using CryptoPP::LSH::LSH256_IV224;
102 using CryptoPP::LSH::LSH256_IV256;
103 using CryptoPP::LSH::LSH256_StepConstants;
105 struct LSH256_AVX2_Context
108 cv_l(state+0), cv_r(state+8), sub_msgs(state+16),
109 last_block(reinterpret_cast<
byte*>(state+48)),
110 remain_databitlen(remainingBitLength),
111 alg_type(static_cast<lsh_type>(algType)) {}
117 lsh_u32& remain_databitlen;
121 struct LSH256_AVX2_Internal
123 LSH256_AVX2_Internal(
word32* state) :
124 submsg_e_l(state+16), submsg_e_r(state+24),
125 submsg_o_l(state+32), submsg_o_r(state+40) { }
149 inline bool LSH_IS_LSH512(lsh_uint val) {
150 return (val & 0xf0000) == 0;
153 inline lsh_uint LSH_GET_SMALL_HASHBIT(lsh_uint val) {
157 inline lsh_uint LSH_GET_HASHBYTE(lsh_uint val) {
161 inline lsh_uint LSH_GET_HASHBIT(lsh_uint val) {
162 return (LSH_GET_HASHBYTE(val) << 3) - LSH_GET_SMALL_HASHBIT(val);
165 inline lsh_u32 loadLE32(lsh_u32 v) {
169 lsh_u32 ROTL(lsh_u32 x, lsh_u32 r) {
174 inline void load_msg_blk(LSH256_AVX2_Internal* i_state,
const lsh_u8 msgblk[LSH256_MSG_BLK_BYTE_LEN])
178 lsh_u32* submsg_e_l = i_state->submsg_e_l;
179 lsh_u32* submsg_e_r = i_state->submsg_e_r;
180 lsh_u32* submsg_o_l = i_state->submsg_o_l;
181 lsh_u32* submsg_o_r = i_state->submsg_o_r;
183 _mm256_storeu_si256(M256_CAST(submsg_e_l+0),
184 _mm256_loadu_si256(CONST_M256_CAST(msgblk+0)));
185 _mm256_storeu_si256(M256_CAST(submsg_e_r+0),
186 _mm256_loadu_si256(CONST_M256_CAST(msgblk+32)));
187 _mm256_storeu_si256(M256_CAST(submsg_o_l+0),
188 _mm256_loadu_si256(CONST_M256_CAST(msgblk+64)));
189 _mm256_storeu_si256(M256_CAST(submsg_o_r+0),
190 _mm256_loadu_si256(CONST_M256_CAST(msgblk+96)));
193 inline void msg_exp_even(LSH256_AVX2_Internal* i_state)
197 lsh_u32* submsg_e_l = i_state->submsg_e_l;
198 lsh_u32* submsg_e_r = i_state->submsg_e_r;
199 lsh_u32* submsg_o_l = i_state->submsg_o_l;
200 lsh_u32* submsg_o_r = i_state->submsg_o_r;
202 const __m256i mask = _mm256_set_epi32(0x1b1a1918, 0x17161514,
203 0x13121110, 0x1f1e1d1c, 0x07060504, 0x03020100, 0x0b0a0908, 0x0f0e0d0c);
205 _mm256_storeu_si256(M256_CAST(submsg_e_l+0), _mm256_add_epi32(
206 _mm256_loadu_si256(CONST_M256_CAST(submsg_o_l+0)),
208 _mm256_loadu_si256(CONST_M256_CAST(submsg_e_l+0)), mask)));
209 _mm256_storeu_si256(M256_CAST(submsg_e_r+0), _mm256_add_epi32(
210 _mm256_loadu_si256(CONST_M256_CAST(submsg_o_r+0)),
212 _mm256_loadu_si256(CONST_M256_CAST(submsg_e_r+0)), mask)));
215 inline void msg_exp_odd(LSH256_AVX2_Internal* i_state)
219 lsh_u32* submsg_e_l = i_state->submsg_e_l;
220 lsh_u32* submsg_e_r = i_state->submsg_e_r;
221 lsh_u32* submsg_o_l = i_state->submsg_o_l;
222 lsh_u32* submsg_o_r = i_state->submsg_o_r;
224 const __m256i mask = _mm256_set_epi32(0x1b1a1918, 0x17161514,
225 0x13121110, 0x1f1e1d1c, 0x07060504, 0x03020100, 0x0b0a0908, 0x0f0e0d0c);
227 _mm256_storeu_si256(M256_CAST(submsg_o_l+0), _mm256_add_epi32(
228 _mm256_loadu_si256(CONST_M256_CAST(submsg_e_l+0)),
230 _mm256_loadu_si256(CONST_M256_CAST(submsg_o_l+0)), mask)));
231 _mm256_storeu_si256(M256_CAST(submsg_o_r+0), _mm256_add_epi32(
232 _mm256_loadu_si256(CONST_M256_CAST(submsg_e_r+0)),
234 _mm256_loadu_si256(CONST_M256_CAST(submsg_o_r+0)), mask)));
237 inline void load_sc(
const lsh_u32** p_const_v,
size_t i)
241 *p_const_v = &LSH256_StepConstants[i];
244 inline void msg_add_even(lsh_u32 cv_l[8], lsh_u32 cv_r[8], LSH256_AVX2_Internal* i_state)
248 lsh_u32* submsg_e_l = i_state->submsg_e_l;
249 lsh_u32* submsg_e_r = i_state->submsg_e_r;
251 _mm256_storeu_si256(M256_CAST(cv_l+0), _mm256_xor_si256(
252 _mm256_loadu_si256(CONST_M256_CAST(cv_l+0)),
253 _mm256_loadu_si256(CONST_M256_CAST(submsg_e_l+0))));
254 _mm256_storeu_si256(M256_CAST(cv_r+0), _mm256_xor_si256(
255 _mm256_loadu_si256(CONST_M256_CAST(cv_r+0)),
256 _mm256_loadu_si256(CONST_M256_CAST(submsg_e_r+0))));
259 inline void msg_add_odd(lsh_u32 cv_l[8], lsh_u32 cv_r[8], LSH256_AVX2_Internal* i_state)
263 lsh_u32* submsg_o_l = i_state->submsg_o_l;
264 lsh_u32* submsg_o_r = i_state->submsg_o_r;
266 _mm256_storeu_si256(M256_CAST(cv_l), _mm256_xor_si256(
267 _mm256_loadu_si256(CONST_M256_CAST(cv_l)),
268 _mm256_loadu_si256(CONST_M256_CAST(submsg_o_l))));
269 _mm256_storeu_si256(M256_CAST(cv_r), _mm256_xor_si256(
270 _mm256_loadu_si256(CONST_M256_CAST(cv_r)),
271 _mm256_loadu_si256(CONST_M256_CAST(submsg_o_r))));
274 inline void add_blk(lsh_u32 cv_l[8], lsh_u32 cv_r[8])
276 _mm256_storeu_si256(M256_CAST(cv_l), _mm256_add_epi32(
277 _mm256_loadu_si256(CONST_M256_CAST(cv_l)),
278 _mm256_loadu_si256(CONST_M256_CAST(cv_r))));
281 template <
unsigned int R>
282 inline void rotate_blk(lsh_u32 cv[8])
284 _mm256_storeu_si256(M256_CAST(cv), _mm256_or_si256(
285 _mm256_slli_epi32(_mm256_loadu_si256(CONST_M256_CAST(cv)), R),
286 _mm256_srli_epi32(_mm256_loadu_si256(CONST_M256_CAST(cv)), 32-R)));
289 inline void xor_with_const(lsh_u32 cv_l[8],
const lsh_u32 const_v[8])
291 _mm256_storeu_si256(M256_CAST(cv_l), _mm256_xor_si256(
292 _mm256_loadu_si256(CONST_M256_CAST(cv_l)),
293 _mm256_loadu_si256(CONST_M256_CAST(const_v))));
296 inline void rotate_msg_gamma(lsh_u32 cv_r[8])
299 _mm256_storeu_si256(M256_CAST(cv_r+0),
300 _mm256_shuffle_epi8(_mm256_loadu_si256(CONST_M256_CAST(cv_r+0)),
302 15,14,13,12, 10,9,8,11, 5,4,7,6, 0,3,2,1,
303 12,15,14,13, 9,8,11,10, 6,5,4,7, 3,2,1,0)));
306 inline void word_perm(lsh_u32 cv_l[8], lsh_u32 cv_r[8])
308 __m256i temp = _mm256_shuffle_epi32(
309 _mm256_loadu_si256(CONST_M256_CAST(cv_l)), _MM_SHUFFLE(3,1,0,2));
310 _mm256_storeu_si256(M256_CAST(cv_r),
311 _mm256_shuffle_epi32(
312 _mm256_loadu_si256(CONST_M256_CAST(cv_r)), _MM_SHUFFLE(1,2,3,0)));
313 _mm256_storeu_si256(M256_CAST(cv_l),
314 _mm256_permute2x128_si256(temp,
315 _mm256_loadu_si256(CONST_M256_CAST(cv_r)), _MM_SHUFFLE(0,3,0,1)));
316 _mm256_storeu_si256(M256_CAST(cv_r),
317 _mm256_permute2x128_si256(temp,
318 _mm256_loadu_si256(CONST_M256_CAST(cv_r)), _MM_SHUFFLE(0,2,0,0)));
325 template <
unsigned int Alpha,
unsigned int Beta>
326 inline void mix(lsh_u32 cv_l[8], lsh_u32 cv_r[8],
const lsh_u32 const_v[8])
329 rotate_blk<Alpha>(cv_l);
330 xor_with_const(cv_l, const_v);
332 rotate_blk<Beta>(cv_r);
334 rotate_msg_gamma(cv_r);
341 inline void compress(LSH256_AVX2_Context* ctx,
const lsh_u8 pdMsgBlk[LSH256_MSG_BLK_BYTE_LEN])
345 LSH256_AVX2_Internal s_state(ctx->cv_l);
346 LSH256_AVX2_Internal* i_state = &s_state;
348 const lsh_u32* const_v = NULL;
349 lsh_u32* cv_l = ctx->cv_l;
350 lsh_u32* cv_r = ctx->cv_r;
352 load_msg_blk(i_state, pdMsgBlk);
354 msg_add_even(cv_l, cv_r, i_state);
355 load_sc(&const_v, 0);
356 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
357 word_perm(cv_l, cv_r);
359 msg_add_odd(cv_l, cv_r, i_state);
360 load_sc(&const_v, 8);
361 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
362 word_perm(cv_l, cv_r);
364 for (
size_t i = 1; i < NUM_STEPS / 2; i++)
366 msg_exp_even(i_state);
367 msg_add_even(cv_l, cv_r, i_state);
368 load_sc(&const_v, 16 * i);
369 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
370 word_perm(cv_l, cv_r);
372 msg_exp_odd(i_state);
373 msg_add_odd(cv_l, cv_r, i_state);
374 load_sc(&const_v, 16 * i + 8);
375 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
376 word_perm(cv_l, cv_r);
379 msg_exp_even(i_state);
380 msg_add_even(cv_l, cv_r, i_state);
388 _mm256_storeu_si256(M256_CAST(cv_l+0),
389 _mm256_load_si256(CONST_M256_CAST(iv+0)));
390 _mm256_storeu_si256(M256_CAST(cv_r+0),
391 _mm256_load_si256(CONST_M256_CAST(iv+8)));
394 inline void zero_iv(lsh_u32 cv_l[8], lsh_u32 cv_r[8])
396 _mm256_storeu_si256(M256_CAST(cv_l+0), _mm256_setzero_si256());
397 _mm256_storeu_si256(M256_CAST(cv_r+0), _mm256_setzero_si256());
400 inline void zero_submsgs(LSH256_AVX2_Context* ctx)
402 lsh_u32* sub_msgs = ctx->sub_msgs;
404 _mm256_storeu_si256(M256_CAST(sub_msgs+ 0), _mm256_setzero_si256());
405 _mm256_storeu_si256(M256_CAST(sub_msgs+ 8), _mm256_setzero_si256());
406 _mm256_storeu_si256(M256_CAST(sub_msgs+16), _mm256_setzero_si256());
407 _mm256_storeu_si256(M256_CAST(sub_msgs+24), _mm256_setzero_si256());
410 inline void init224(LSH256_AVX2_Context* ctx)
415 load_iv(ctx->cv_l, ctx->cv_r, LSH256_IV224);
418 inline void init256(LSH256_AVX2_Context* ctx)
423 load_iv(ctx->cv_l, ctx->cv_r, LSH256_IV256);
428 inline void fin(LSH256_AVX2_Context* ctx)
432 _mm256_storeu_si256(M256_CAST(ctx->cv_l+0), _mm256_xor_si256(
433 _mm256_loadu_si256(CONST_M256_CAST(ctx->cv_l+0)),
434 _mm256_loadu_si256(CONST_M256_CAST(ctx->cv_r+0))));
439 inline void get_hash(LSH256_AVX2_Context* ctx, lsh_u8* pbHashVal)
445 lsh_uint alg_type = ctx->alg_type;
446 lsh_uint hash_val_byte_len = LSH_GET_HASHBYTE(alg_type);
447 lsh_uint hash_val_bit_len = LSH_GET_SMALL_HASHBIT(alg_type);
450 memcpy(pbHashVal, ctx->cv_l, hash_val_byte_len);
451 if (hash_val_bit_len){
452 pbHashVal[hash_val_byte_len-1] &= (((lsh_u8)0xff) << hash_val_bit_len);
458 lsh_err lsh256_init_avx2(LSH256_AVX2_Context* ctx)
463 lsh_u32 alg_type = ctx->alg_type;
464 const lsh_u32* const_v = NULL;
465 ctx->remain_databitlen = 0;
472 case LSH_TYPE_256_256:
475 case LSH_TYPE_256_224:
482 lsh_u32* cv_l = ctx->cv_l;
483 lsh_u32* cv_r = ctx->cv_r;
486 cv_l[0] = LSH256_HASH_VAL_MAX_BYTE_LEN;
487 cv_l[1] = LSH_GET_HASHBIT(alg_type);
489 for (
size_t i = 0; i < NUM_STEPS / 2; i++)
492 load_sc(&const_v, i * 16);
493 mix<ROT_EVEN_ALPHA, ROT_EVEN_BETA>(cv_l, cv_r, const_v);
494 word_perm(cv_l, cv_r);
496 load_sc(&const_v, i * 16 + 8);
497 mix<ROT_ODD_ALPHA, ROT_ODD_BETA>(cv_l, cv_r, const_v);
498 word_perm(cv_l, cv_r);
504 lsh_err lsh256_update_avx2(LSH256_AVX2_Context* ctx,
const lsh_u8* data,
size_t databitlen)
514 if (databitlen == 0){
519 size_t databytelen = databitlen >> 3;
521 const size_t pos2 = 0;
523 size_t remain_msg_byte = ctx->remain_databitlen >> 3;
525 const size_t remain_msg_bit = 0;
527 if (remain_msg_byte >= LSH256_MSG_BLK_BYTE_LEN){
528 return LSH_ERR_INVALID_STATE;
530 if (remain_msg_bit > 0){
531 return LSH_ERR_INVALID_DATABITLEN;
534 if (databytelen + remain_msg_byte < LSH256_MSG_BLK_BYTE_LEN)
536 memcpy(ctx->last_block + remain_msg_byte, data, databytelen);
537 ctx->remain_databitlen += (lsh_uint)databitlen;
538 remain_msg_byte += (lsh_uint)databytelen;
540 ctx->last_block[remain_msg_byte] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
545 if (remain_msg_byte > 0){
546 size_t more_byte = LSH256_MSG_BLK_BYTE_LEN - remain_msg_byte;
547 memcpy(ctx->last_block + remain_msg_byte, data, more_byte);
548 compress(ctx, ctx->last_block);
550 databytelen -= more_byte;
552 ctx->remain_databitlen = 0;
555 while (databytelen >= LSH256_MSG_BLK_BYTE_LEN)
561 data += LSH256_MSG_BLK_BYTE_LEN;
562 databytelen -= LSH256_MSG_BLK_BYTE_LEN;
565 if (databytelen > 0){
566 memcpy(ctx->last_block, data, databytelen);
567 ctx->remain_databitlen = (lsh_uint)(databytelen << 3);
571 ctx->last_block[databytelen] = data[databytelen] & ((0xff >> pos2) ^ 0xff);
572 ctx->remain_databitlen += pos2;
578 lsh_err lsh256_final_avx2(LSH256_AVX2_Context* ctx, lsh_u8* hashval)
587 size_t remain_msg_byte = ctx->remain_databitlen >> 3;
589 const size_t remain_msg_bit = 0;
591 if (remain_msg_byte >= LSH256_MSG_BLK_BYTE_LEN){
592 return LSH_ERR_INVALID_STATE;
596 ctx->last_block[remain_msg_byte] |= (0x1 << (7 - remain_msg_bit));
599 ctx->last_block[remain_msg_byte] = 0x80;
601 memset(ctx->last_block + remain_msg_byte + 1, 0, LSH256_MSG_BLK_BYTE_LEN - remain_msg_byte - 1);
603 compress(ctx, ctx->last_block);
606 get_hash(ctx, hashval);
611 ANONYMOUS_NAMESPACE_END
616 void LSH256_Base_Restart_AVX2(
word32* state)
618 state[RemainingBits] = 0;
619 LSH256_AVX2_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
620 lsh_err err = lsh256_init_avx2(&ctx);
622 if (err != LSH_SUCCESS)
627 void LSH256_Base_Update_AVX2(
word32* state,
const byte *input,
size_t size)
629 LSH256_AVX2_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
630 lsh_err err = lsh256_update_avx2(&ctx, input, 8*size);
632 if (err != LSH_SUCCESS)
637 void LSH256_Base_TruncatedFinal_AVX2(
word32* state,
byte *hash,
size_t)
639 LSH256_AVX2_Context ctx(state, state[AlgorithmType], state[RemainingBits]);
640 lsh_err err = lsh256_final_avx2(&ctx, hash);
642 if (err != LSH_SUCCESS)
648 #endif // CRYPTOPP_AVX2_AVAILABLE