Lines Matching defs:walk
159 struct skcipher_walk walk;
163 err = skcipher_walk_virt(&walk, req, false);
165 while ((nbytes = walk.nbytes)) {
167 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
171 err = skcipher_walk_done(&walk, nbytes);
181 struct skcipher_walk walk;
185 err = skcipher_walk_virt(&walk, req, false);
187 while ((nbytes = walk.nbytes)) {
189 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
193 err = skcipher_walk_done(&walk, nbytes);
203 struct skcipher_walk walk;
207 err = skcipher_walk_virt(&walk, req, false);
209 while ((nbytes = walk.nbytes)) {
211 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
212 nbytes & AES_BLOCK_MASK, walk.iv);
215 err = skcipher_walk_done(&walk, nbytes);
225 struct skcipher_walk walk;
229 err = skcipher_walk_virt(&walk, req, false);
231 while ((nbytes = walk.nbytes)) {
233 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
234 nbytes & AES_BLOCK_MASK, walk.iv);
237 err = skcipher_walk_done(&walk, nbytes);
251 struct skcipher_walk walk;
287 err = skcipher_walk_virt(&walk, &subreq, false);
292 aesni_cts_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
293 walk.nbytes, walk.iv);
296 return skcipher_walk_done(&walk, 0);
307 struct skcipher_walk walk;
343 err = skcipher_walk_virt(&walk, &subreq, false);
348 aesni_cts_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
349 walk.nbytes, walk.iv);
352 return skcipher_walk_done(&walk, 0);
362 struct skcipher_walk walk;
366 err = skcipher_walk_virt(&walk, req, false);
368 while ((nbytes = walk.nbytes) > 0) {
371 aesni_ctr_enc(ctx, walk.dst.virt.addr,
372 walk.src.virt.addr,
373 nbytes & AES_BLOCK_MASK, walk.iv);
376 if (walk.nbytes == walk.total && nbytes > 0) {
377 aesni_enc(ctx, keystream, walk.iv);
378 crypto_xor_cpy(walk.dst.virt.addr + walk.nbytes - nbytes,
379 walk.src.virt.addr + walk.nbytes - nbytes,
381 crypto_inc(walk.iv, AES_BLOCK_SIZE);
385 err = skcipher_walk_done(&walk, nbytes);
427 struct skcipher_walk walk;
448 err = skcipher_walk_virt(&walk, req, false);
450 while (walk.nbytes) {
453 walk.src.virt.addr, walk.dst.virt.addr,
454 walk.nbytes & ~(AES_BLOCK_SIZE - 1), req->iv);
456 err = skcipher_walk_done(&walk,
457 walk.nbytes & (AES_BLOCK_SIZE - 1));
472 err = skcipher_walk_virt(&walk, req, false);
477 (*crypt_func)(&ctx->crypt_ctx, walk.src.virt.addr, walk.dst.virt.addr,
478 walk.nbytes, req->iv);
481 return skcipher_walk_done(&walk, 0);
663 struct skcipher_walk walk;
671 err = skcipher_walk_virt(&walk, req, false);
673 while ((nbytes = walk.nbytes) != 0) {
674 if (nbytes < walk.total) {
687 (*ctr64_func)(key, walk.src.virt.addr,
688 walk.dst.virt.addr, nbytes, le_ctr);
698 (*ctr64_func)(key, walk.src.virt.addr,
699 walk.dst.virt.addr, p1_nbytes, le_ctr);
702 (*ctr64_func)(key, walk.src.virt.addr + p1_nbytes,
703 walk.dst.virt.addr + p1_nbytes,
709 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
727 struct skcipher_walk walk;
732 err = skcipher_walk_virt(&walk, req, false);
733 while ((nbytes = walk.nbytes) != 0) {
734 if (nbytes < walk.total)
738 (*xctr_func)(key, walk.src.virt.addr, walk.dst.virt.addr,
743 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
1281 struct scatter_walk walk;
1291 scatterwalk_start(&walk, sg_src);
1295 &walk, assoclen);
1298 const u8 *src = walk.addr;
1322 scatterwalk_done_src(&walk, orig_len_this_step);
1341 struct skcipher_walk walk;
1365 err = skcipher_walk_aead_encrypt(&walk, req, false);
1367 err = skcipher_walk_aead_decrypt(&walk, req, false);
1385 while (unlikely((nbytes = walk.nbytes) < walk.total)) {
1394 aes_gcm_update(key, le_ctr, ghash_acc, walk.src.virt.addr,
1395 walk.dst.virt.addr, nbytes, flags);
1398 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
1404 aes_gcm_update(key, le_ctr, ghash_acc, walk.src.virt.addr,
1405 walk.dst.virt.addr, nbytes, flags);
1439 skcipher_walk_done(&walk, 0);