Lines Matching refs:blksz

106 	int blksz, resid;  in swcr_encdec()  local
124 blksz = exf->blocksize; in swcr_encdec()
126 blksz = exf->native_blocksize; in swcr_encdec()
161 for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) { in swcr_encdec()
172 if (inlen < blksz) { in swcr_encdec()
173 crypto_cursor_copydata(&cc_in, blksz, blk); in swcr_encdec()
175 inlen = blksz; in swcr_encdec()
177 if (outlen < blksz) { in swcr_encdec()
179 outlen = blksz; in swcr_encdec()
182 todo = rounddown2(MIN(resid, MIN(inlen, outlen)), blksz); in swcr_encdec()
198 crypto_cursor_copyback(&cc_out, blksz, blk); in swcr_encdec()
212 KASSERT(resid < blksz, ("%s: partial block too big", __func__)); in swcr_encdec()
335 int blksz, error, ivlen, resid; in swcr_gmac() local
339 blksz = GMAC_BLOCK_LEN; in swcr_gmac()
340 KASSERT(axf->blocksize == blksz, ("%s: axf block size mismatch", in swcr_gmac()
357 for (resid = crp->crp_payload_length; resid >= blksz; resid -= len) { in swcr_gmac()
359 if (len >= blksz) { in swcr_gmac()
360 len = rounddown(MIN(len, resid), blksz); in swcr_gmac()
363 len = blksz; in swcr_gmac()
370 memset(blk, 0, blksz); in swcr_gmac()
372 axf->Update(&s.ctx, blk, blksz); in swcr_gmac()
376 memset(blk, 0, blksz); in swcr_gmac()
379 axf->Update(&s.ctx, blk, blksz); in swcr_gmac()
416 int blksz, error, ivlen, r, resid; in swcr_gcm() local
421 blksz = GMAC_BLOCK_LEN; in swcr_gcm()
422 KASSERT(blksz == exf->native_blocksize, in swcr_gcm()
440 inlen = rounddown2(crp->crp_aad_length, blksz); in swcr_gcm()
444 memset(blk, 0, blksz); in swcr_gcm()
447 exf->update(ctx, blk, blksz); in swcr_gcm()
452 for (resid = crp->crp_aad_length; resid >= blksz; in swcr_gcm()
455 if (inlen >= blksz) { in swcr_gcm()
456 inlen = rounddown2(MIN(inlen, resid), blksz); in swcr_gcm()
459 inlen = blksz; in swcr_gcm()
466 memset(blk, 0, blksz); in swcr_gcm()
468 exf->update(ctx, blk, blksz); in swcr_gcm()
482 for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) { in swcr_gcm()
488 if (inlen < blksz) { in swcr_gcm()
489 crypto_cursor_copydata(&cc_in, blksz, blk); in swcr_gcm()
491 inlen = blksz; in swcr_gcm()
495 if (outlen < blksz) { in swcr_gcm()
497 outlen = blksz; in swcr_gcm()
501 blksz); in swcr_gcm()
507 crypto_cursor_copyback(&cc_out, blksz, blk); in swcr_gcm()
515 todo = rounddown2(MIN(resid, inlen), blksz); in swcr_gcm()
537 memset(blk, 0, blksz); in swcr_gcm()
542 exf->update(ctx, blk, blksz); in swcr_gcm()
563 for (resid = crp->crp_payload_length; resid > blksz; in swcr_gcm()
569 if (inlen < blksz) { in swcr_gcm()
570 crypto_cursor_copydata(&cc_in, blksz, blk); in swcr_gcm()
572 inlen = blksz; in swcr_gcm()
574 if (outlen < blksz) { in swcr_gcm()
576 outlen = blksz; in swcr_gcm()
580 blksz); in swcr_gcm()
593 crypto_cursor_copyback(&cc_out, blksz, blk); in swcr_gcm()
750 int blksz, error, ivlen, r, resid; in swcr_ccm() local
756 blksz = AES_BLOCK_LEN; in swcr_ccm()
757 KASSERT(blksz == exf->native_blocksize, in swcr_ccm()
812 for (resid = crp->crp_payload_length; resid >= blksz; resid -= todo) { in swcr_ccm()
818 if (inlen < blksz) { in swcr_ccm()
819 crypto_cursor_copydata(&cc_in, blksz, blk); in swcr_ccm()
821 inlen = blksz; in swcr_ccm()
825 if (outlen < blksz) { in swcr_ccm()
827 outlen = blksz; in swcr_ccm()
831 blksz); in swcr_ccm()
837 crypto_cursor_copyback(&cc_out, blksz, blk); in swcr_ccm()
853 todo = blksz; in swcr_ccm()
898 for (resid = crp->crp_payload_length; resid >= blksz; in swcr_ccm()
906 if (inlen < blksz) { in swcr_ccm()
907 crypto_cursor_copydata(&cc_in, blksz, blk); in swcr_ccm()
909 inlen = blksz; in swcr_ccm()
911 if (outlen < blksz) { in swcr_ccm()
913 outlen = blksz; in swcr_ccm()
917 blksz); in swcr_ccm()
930 crypto_cursor_copyback(&cc_out, blksz, blk); in swcr_ccm()
975 int blksz, error, r, resid; in swcr_chacha20_poly1305() local
980 blksz = exf->native_blocksize; in swcr_chacha20_poly1305()
981 KASSERT(blksz <= sizeof(s.blkbuf), ("%s: blocksize mismatch", __func__)); in swcr_chacha20_poly1305()
1020 for (resid = crp->crp_payload_length; resid >= blksz; in swcr_chacha20_poly1305()
1028 if (inlen < blksz) { in swcr_chacha20_poly1305()
1029 crypto_cursor_copydata(&cc_in, blksz, blk); in swcr_chacha20_poly1305()
1031 inlen = blksz; in swcr_chacha20_poly1305()
1034 if (outlen < blksz) { in swcr_chacha20_poly1305()
1036 outlen = blksz; in swcr_chacha20_poly1305()
1040 blksz); in swcr_chacha20_poly1305()
1054 crypto_cursor_copyback(&cc_out, blksz, blk); in swcr_chacha20_poly1305()
1103 for (resid = crp->crp_payload_length; resid > blksz; in swcr_chacha20_poly1305()
1110 if (inlen < blksz) { in swcr_chacha20_poly1305()
1111 crypto_cursor_copydata(&cc_in, blksz, blk); in swcr_chacha20_poly1305()
1113 inlen = blksz; in swcr_chacha20_poly1305()
1115 if (outlen < blksz) { in swcr_chacha20_poly1305()
1117 outlen = blksz; in swcr_chacha20_poly1305()
1121 blksz); in swcr_chacha20_poly1305()
1134 crypto_cursor_copyback(&cc_out, blksz, blk); in swcr_chacha20_poly1305()