Lines Matching +full:a +full:- +full:h
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
4 * specified in: https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02
14 #include <linux/linkage.h>
15 #include <linux/cfi_types.h>
16 #include <asm/frame.h>
34 #define K1 -208106958 /* 0xf3988a32 */
35 #define K2 -416213915 /* 0xe7311465 */
36 #define K3 -832427829 /* 0xce6228cb */
37 #define K4 -1664855657 /* 0x9cc45197 */
40 #define K7 -433943364 /* 0xe6228cbc */
41 #define K8 -867886727 /* 0xcc451979 */
42 #define K9 -1735773453 /* 0x988a32f3 */
45 #define K12 -1001285732 /* 0xc451979c */
46 #define K13 -2002571463 /* 0x88a32f39 */
49 #define K16 -1651869049 /* 0x9d8a7a87 */
52 #define K19 -330050500 /* 0xec53d43c */
53 #define K20 -660100999 /* 0xd8a7a879 */
54 #define K21 -1320201997 /* 0xb14f50f3 */
56 #define K23 -985840690 /* 0xc53d43ce */
57 #define K24 -1971681379 /* 0x8a7a879d */
61 #define K28 -1482130984 /* 0xa7a879d8 */
63 #define K30 -1633556638 /* 0x9ea1e762 */
66 #define K33 -183551212 /* 0xf50f3b14 */
67 #define K34 -367102423 /* 0xea1e7629 */
68 #define K35 -734204845 /* 0xd43cec53 */
69 #define K36 -1468409689 /* 0xa879d8a7 */
71 #define K38 -1578671458 /* 0xa1e7629e */
73 #define K40 -2019718534 /* 0x879d8a7a */
78 #define K45 -206483632 /* 0xf3b14f50 */
79 #define K46 -412967263 /* 0xe7629ea1 */
80 #define K47 -825934525 /* 0xcec53d43 */
81 #define K48 -1651869049 /* 0x9d8a7a87 */
84 #define K51 -330050500 /* 0xec53d43c */
85 #define K52 -660100999 /* 0xd8a7a879 */
86 #define K53 -1320201997 /* 0xb14f50f3 */
88 #define K55 -985840690 /* 0xc53d43ce */
89 #define K56 -1971681379 /* 0x8a7a879d */
93 #define K60 -1482130984 /* 0xa7a879d8 */
95 #define K62 -1633556638 /* 0x9ea1e762 */
108 #define a %r8d macro
115 #define h %r15d macro
153 rorxl $(32-(v)), src, dst;
155 #define addl2(a, out) \ argument
156 leal (a, out), out;
181 #define R(i, a, b, c, d, e, f, g, h, round, widx, wtype) \ argument
182 /* rol(a, 12) => t0 */ \
183 roll3mov(12, a, t0); /* rorxl here would reduce perf by 6% on zen3 */ \
187 /* h + w1 => h */ \
188 addl wtype##_W1_ADDR(round, widx), h; \
189 /* h + t1 => h */ \
190 addl2(t1, h); \
195 /* FF##i(a,b,c) => t1 */ \
196 FF##i(a, b, c, t1, t2); \
201 /* h + t2 => h */ \
202 addl2(t2, h); \
209 /* P0(h) => h */ \
210 roll3(9, h, t2); \
211 roll3(17, h, t1); \
212 xorl t2, h; \
213 xorl t1, h;
215 #define R1(a, b, c, d, e, f, g, h, round, widx, wtype) \ argument
216 R(1, a, b, c, d, e, f, g, h, round, widx, wtype)
218 #define R2(a, b, c, d, e, f, g, h, round, widx, wtype) \ argument
219 R(2, a, b, c, d, e, f, g, h, round, widx, wtype)
223 /* Byte-swapped input address. */
229 (STACK_W + ((((round) / 3) - 4) % 2) * 64 + (offs) + ((widx) * 4))(%rsp)
231 /* Rounds 1-12, byte-swapped input block addresses. */
235 /* Rounds 1-12, expanded input block addresses. */
272 /* Load (w[i - 16]) => XTMP0 */ \
275 /* Load (w[i - 13]) => XTMP1 */ \
278 /* w[i - 9] == w3 */ \
283 /* w[i - 3] == w5 */ \
286 vpsrld $(32-15), w5, XTMP3; \
291 vpsrld $(32-7), XTMP1, XTMP1; \
295 /* w[i - 6] == XMM4 */ \
298 vpsrld $(32-15), XTMP0, XTMP6; \
300 vpsrld $(32-23), XTMP0, XTMP3; \
326 * Transform nblocks*64 bytes (nblocks*16 32-bit words) at DATA.
356 movl state_h0(RSTATE), a;
363 movl state_h7(RSTATE), h;
370 leaq -1(RNBLKS), RNBLKS;
372 /* Transform 0-3 + Load data part2. */
373 R1(a, b, c, d, e, f, g, h, 0, 0, IW); LOAD_W_XMM_2();
374 R1(d, a, b, c, h, e, f, g, 1, 1, IW);
375 R1(c, d, a, b, g, h, e, f, 2, 2, IW);
376 R1(b, c, d, a, f, g, h, e, 3, 3, IW); LOAD_W_XMM_3();
378 /* Transform 4-7 + Precalc 12-14. */
379 R1(a, b, c, d, e, f, g, h, 4, 0, IW);
380 R1(d, a, b, c, h, e, f, g, 5, 1, IW);
381 R1(c, d, a, b, g, h, e, f, 6, 2, IW); SCHED_W_0(12, W0, W1, W2, W3, W4, W5);
382 R1(b, c, d, a, f, g, h, e, 7, 3, IW); SCHED_W_1(12, W0, W1, W2, W3, W4, W5);
384 /* Transform 8-11 + Precalc 12-17. */
385 R1(a, b, c, d, e, f, g, h, 8, 0, IW); SCHED_W_2(12, W0, W1, W2, W3, W4, W5);
386 R1(d, a, b, c, h, e, f, g, 9, 1, IW); SCHED_W_0(15, W1, W2, W3, W4, W5, W0);
387 R1(c, d, a, b, g, h, e, f, 10, 2, IW); SCHED_W_1(15, W1, W2, W3, W4, W5, W0);
388 R1(b, c, d, a, f, g, h, e, 11, 3, IW); SCHED_W_2(15, W1, W2, W3, W4, W5, W0);
390 /* Transform 12-14 + Precalc 18-20 */
391 R1(a, b, c, d, e, f, g, h, 12, 0, XW); SCHED_W_0(18, W2, W3, W4, W5, W0, W1);
392 R1(d, a, b, c, h, e, f, g, 13, 1, XW); SCHED_W_1(18, W2, W3, W4, W5, W0, W1);
393 R1(c, d, a, b, g, h, e, f, 14, 2, XW); SCHED_W_2(18, W2, W3, W4, W5, W0, W1);
395 /* Transform 15-17 + Precalc 21-23 */
396 R1(b, c, d, a, f, g, h, e, 15, 0, XW); SCHED_W_0(21, W3, W4, W5, W0, W1, W2);
397 R2(a, b, c, d, e, f, g, h, 16, 1, XW); SCHED_W_1(21, W3, W4, W5, W0, W1, W2);
398 R2(d, a, b, c, h, e, f, g, 17, 2, XW); SCHED_W_2(21, W3, W4, W5, W0, W1, W2);
400 /* Transform 18-20 + Precalc 24-26 */
401 R2(c, d, a, b, g, h, e, f, 18, 0, XW); SCHED_W_0(24, W4, W5, W0, W1, W2, W3);
402 R2(b, c, d, a, f, g, h, e, 19, 1, XW); SCHED_W_1(24, W4, W5, W0, W1, W2, W3);
403 R2(a, b, c, d, e, f, g, h, 20, 2, XW); SCHED_W_2(24, W4, W5, W0, W1, W2, W3);
405 /* Transform 21-23 + Precalc 27-29 */
406 R2(d, a, b, c, h, e, f, g, 21, 0, XW); SCHED_W_0(27, W5, W0, W1, W2, W3, W4);
407 R2(c, d, a, b, g, h, e, f, 22, 1, XW); SCHED_W_1(27, W5, W0, W1, W2, W3, W4);
408 R2(b, c, d, a, f, g, h, e, 23, 2, XW); SCHED_W_2(27, W5, W0, W1, W2, W3, W4);
410 /* Transform 24-26 + Precalc 30-32 */
411 R2(a, b, c, d, e, f, g, h, 24, 0, XW); SCHED_W_0(30, W0, W1, W2, W3, W4, W5);
412 R2(d, a, b, c, h, e, f, g, 25, 1, XW); SCHED_W_1(30, W0, W1, W2, W3, W4, W5);
413 R2(c, d, a, b, g, h, e, f, 26, 2, XW); SCHED_W_2(30, W0, W1, W2, W3, W4, W5);
415 /* Transform 27-29 + Precalc 33-35 */
416 R2(b, c, d, a, f, g, h, e, 27, 0, XW); SCHED_W_0(33, W1, W2, W3, W4, W5, W0);
417 R2(a, b, c, d, e, f, g, h, 28, 1, XW); SCHED_W_1(33, W1, W2, W3, W4, W5, W0);
418 R2(d, a, b, c, h, e, f, g, 29, 2, XW); SCHED_W_2(33, W1, W2, W3, W4, W5, W0);
420 /* Transform 30-32 + Precalc 36-38 */
421 R2(c, d, a, b, g, h, e, f, 30, 0, XW); SCHED_W_0(36, W2, W3, W4, W5, W0, W1);
422 R2(b, c, d, a, f, g, h, e, 31, 1, XW); SCHED_W_1(36, W2, W3, W4, W5, W0, W1);
423 R2(a, b, c, d, e, f, g, h, 32, 2, XW); SCHED_W_2(36, W2, W3, W4, W5, W0, W1);
425 /* Transform 33-35 + Precalc 39-41 */
426 R2(d, a, b, c, h, e, f, g, 33, 0, XW); SCHED_W_0(39, W3, W4, W5, W0, W1, W2);
427 R2(c, d, a, b, g, h, e, f, 34, 1, XW); SCHED_W_1(39, W3, W4, W5, W0, W1, W2);
428 R2(b, c, d, a, f, g, h, e, 35, 2, XW); SCHED_W_2(39, W3, W4, W5, W0, W1, W2);
430 /* Transform 36-38 + Precalc 42-44 */
431 R2(a, b, c, d, e, f, g, h, 36, 0, XW); SCHED_W_0(42, W4, W5, W0, W1, W2, W3);
432 R2(d, a, b, c, h, e, f, g, 37, 1, XW); SCHED_W_1(42, W4, W5, W0, W1, W2, W3);
433 R2(c, d, a, b, g, h, e, f, 38, 2, XW); SCHED_W_2(42, W4, W5, W0, W1, W2, W3);
435 /* Transform 39-41 + Precalc 45-47 */
436 R2(b, c, d, a, f, g, h, e, 39, 0, XW); SCHED_W_0(45, W5, W0, W1, W2, W3, W4);
437 R2(a, b, c, d, e, f, g, h, 40, 1, XW); SCHED_W_1(45, W5, W0, W1, W2, W3, W4);
438 R2(d, a, b, c, h, e, f, g, 41, 2, XW); SCHED_W_2(45, W5, W0, W1, W2, W3, W4);
440 /* Transform 42-44 + Precalc 48-50 */
441 R2(c, d, a, b, g, h, e, f, 42, 0, XW); SCHED_W_0(48, W0, W1, W2, W3, W4, W5);
442 R2(b, c, d, a, f, g, h, e, 43, 1, XW); SCHED_W_1(48, W0, W1, W2, W3, W4, W5);
443 R2(a, b, c, d, e, f, g, h, 44, 2, XW); SCHED_W_2(48, W0, W1, W2, W3, W4, W5);
445 /* Transform 45-47 + Precalc 51-53 */
446 R2(d, a, b, c, h, e, f, g, 45, 0, XW); SCHED_W_0(51, W1, W2, W3, W4, W5, W0);
447 R2(c, d, a, b, g, h, e, f, 46, 1, XW); SCHED_W_1(51, W1, W2, W3, W4, W5, W0);
448 R2(b, c, d, a, f, g, h, e, 47, 2, XW); SCHED_W_2(51, W1, W2, W3, W4, W5, W0);
450 /* Transform 48-50 + Precalc 54-56 */
451 R2(a, b, c, d, e, f, g, h, 48, 0, XW); SCHED_W_0(54, W2, W3, W4, W5, W0, W1);
452 R2(d, a, b, c, h, e, f, g, 49, 1, XW); SCHED_W_1(54, W2, W3, W4, W5, W0, W1);
453 R2(c, d, a, b, g, h, e, f, 50, 2, XW); SCHED_W_2(54, W2, W3, W4, W5, W0, W1);
455 /* Transform 51-53 + Precalc 57-59 */
456 R2(b, c, d, a, f, g, h, e, 51, 0, XW); SCHED_W_0(57, W3, W4, W5, W0, W1, W2);
457 R2(a, b, c, d, e, f, g, h, 52, 1, XW); SCHED_W_1(57, W3, W4, W5, W0, W1, W2);
458 R2(d, a, b, c, h, e, f, g, 53, 2, XW); SCHED_W_2(57, W3, W4, W5, W0, W1, W2);
460 /* Transform 54-56 + Precalc 60-62 */
461 R2(c, d, a, b, g, h, e, f, 54, 0, XW); SCHED_W_0(60, W4, W5, W0, W1, W2, W3);
462 R2(b, c, d, a, f, g, h, e, 55, 1, XW); SCHED_W_1(60, W4, W5, W0, W1, W2, W3);
463 R2(a, b, c, d, e, f, g, h, 56, 2, XW); SCHED_W_2(60, W4, W5, W0, W1, W2, W3);
465 /* Transform 57-59 + Precalc 63 */
466 R2(d, a, b, c, h, e, f, g, 57, 0, XW); SCHED_W_0(63, W5, W0, W1, W2, W3, W4);
467 R2(c, d, a, b, g, h, e, f, 58, 1, XW);
468 R2(b, c, d, a, f, g, h, e, 59, 2, XW); SCHED_W_1(63, W5, W0, W1, W2, W3, W4);
470 /* Transform 60-62 + Precalc 63 */
471 R2(a, b, c, d, e, f, g, h, 60, 0, XW);
472 R2(d, a, b, c, h, e, f, g, 61, 1, XW); SCHED_W_2(63, W5, W0, W1, W2, W3, W4);
473 R2(c, d, a, b, g, h, e, f, 62, 2, XW);
476 R2(b, c, d, a, f, g, h, e, 63, 0, XW);
479 xorl state_h0(RSTATE), a;
483 movl a, state_h0(RSTATE);
490 xorl state_h7(RSTATE), h;
494 movl h, state_h7(RSTATE);