Home
last modified time | relevance | path

Searched refs:walk (Results 1 – 25 of 162) sorted by relevance

1234567

/linux/include/crypto/
H A Dscatterwalk.h29 static inline void scatterwalk_start(struct scatter_walk *walk, in scatterwalk_start() argument
32 walk->sg = sg; in scatterwalk_start()
33 walk->offset = sg->offset; in scatterwalk_start()
40 static inline void scatterwalk_start_at_pos(struct scatter_walk *walk, in scatterwalk_start_at_pos() argument
48 walk->sg = sg; in scatterwalk_start_at_pos()
49 walk->offset = sg->offset + pos; in scatterwalk_start_at_pos()
52 static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, in scatterwalk_clamp() argument
58 if (walk->offset >= walk->sg->offset + walk->sg->length) in scatterwalk_clamp()
59 scatterwalk_start(walk, sg_next(walk->sg)); in scatterwalk_clamp()
60 len_this_sg = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_clamp()
[all …]
/linux/crypto/
H A Dskcipher.c40 static int skcipher_walk_next(struct skcipher_walk *walk);
42 static inline gfp_t skcipher_walk_gfp(struct skcipher_walk *walk) in skcipher_walk_gfp() argument
44 return walk->flags & SKCIPHER_WALK_SLEEP ? GFP_KERNEL : GFP_ATOMIC; in skcipher_walk_gfp()
71 int skcipher_walk_done(struct skcipher_walk *walk, int res) in skcipher_walk_done() argument
73 unsigned int n = walk->nbytes; /* num bytes processed this step */ in skcipher_walk_done()
81 total = walk->total - n; in skcipher_walk_done()
84 if (likely(!(walk->flags & (SKCIPHER_WALK_SLOW | in skcipher_walk_done()
87 scatterwalk_advance(&walk->in, n); in skcipher_walk_done()
88 } else if (walk->flags & SKCIPHER_WALK_DIFF) { in skcipher_walk_done()
89 scatterwalk_done_src(&walk->in, n); in skcipher_walk_done()
[all …]
H A Dscatterwalk.c18 void scatterwalk_skip(struct scatter_walk *walk, unsigned int nbytes) in scatterwalk_skip() argument
20 struct scatterlist *sg = walk->sg; in scatterwalk_skip()
22 nbytes += walk->offset - sg->offset; in scatterwalk_skip()
28 walk->sg = sg; in scatterwalk_skip()
29 walk->offset = sg->offset + nbytes; in scatterwalk_skip()
33 inline void memcpy_from_scatterwalk(void *buf, struct scatter_walk *walk, in memcpy_from_scatterwalk() argument
39 to_copy = scatterwalk_next(walk, nbytes); in memcpy_from_scatterwalk()
40 memcpy(buf, walk->addr, to_copy); in memcpy_from_scatterwalk()
41 scatterwalk_done_src(walk, to_copy); in memcpy_from_scatterwalk()
48 inline void memcpy_to_scatterwalk(struct scatter_walk *walk, const void *buf, in memcpy_to_scatterwalk() argument
[all …]
H A Dzstd.c104 struct acomp_walk walk; in zstd_compress() local
114 ret = acomp_walk_virt(&walk, req, true); in zstd_compress()
125 dcur = acomp_walk_next_dst(&walk); in zstd_compress()
132 outbuf.dst = (u8 *)walk.dst.virt.addr; in zstd_compress()
136 scur = acomp_walk_next_src(&walk); in zstd_compress()
138 ret = zstd_compress_one(req, ctx, walk.src.virt.addr, in zstd_compress()
139 walk.dst.virt.addr, &total_out); in zstd_compress()
140 acomp_walk_done_src(&walk, scur); in zstd_compress()
141 acomp_walk_done_dst(&walk, dcur); in zstd_compress()
147 inbuf.src = walk.src.virt.addr; in zstd_compress()
[all …]
H A Ddeflate.c66 struct acomp_walk walk; in deflate_compress_one() local
69 ret = acomp_walk_virt(&walk, req, true); in deflate_compress_one()
76 dcur = acomp_walk_next_dst(&walk); in deflate_compress_one()
81 stream->next_out = walk.dst.virt.addr; in deflate_compress_one()
90 scur = acomp_walk_next_src(&walk); in deflate_compress_one()
92 if (acomp_walk_more_src(&walk, scur)) in deflate_compress_one()
95 stream->next_in = walk.src.virt.addr; in deflate_compress_one()
102 acomp_walk_done_src(&walk, scur); in deflate_compress_one()
106 acomp_walk_done_dst(&walk, dcur); in deflate_compress_one()
146 struct acomp_walk walk; in deflate_decompress_one() local
[all …]
/linux/mm/
H A Dpagewalk.c31 unsigned long end, struct mm_walk *walk) in walk_pte_range_inner() argument
33 const struct mm_walk_ops *ops = walk->ops; in walk_pte_range_inner()
41 walk); in walk_pte_range_inner()
45 set_pte_at(walk->mm, addr, pte, new_pte); in walk_pte_range_inner()
47 if (!WARN_ON_ONCE(walk->no_vma)) in walk_pte_range_inner()
48 update_mmu_cache(walk->vma, addr, pte); in walk_pte_range_inner()
50 err = ops->pte_entry(pte, addr, addr + PAGE_SIZE, walk); in walk_pte_range_inner()
63 struct mm_walk *walk) in walk_pte_range() argument
69 if (walk->no_vma) { in walk_pte_range()
76 if (walk->mm == &init_mm || addr >= TASK_SIZE) in walk_pte_range()
[all …]
H A Dmapping_dirty_helpers.c35 struct mm_walk *walk) in wp_pte() argument
37 struct wp_walk *wpwalk = walk->private; in wp_pte()
41 pte_t old_pte = ptep_modify_prot_start(walk->vma, addr, pte); in wp_pte()
44 ptep_modify_prot_commit(walk->vma, addr, pte, old_pte, ptent); in wp_pte()
90 unsigned long end, struct mm_walk *walk) in clean_record_pte() argument
92 struct wp_walk *wpwalk = walk->private; in clean_record_pte()
97 pgoff_t pgoff = ((addr - walk->vma->vm_start) >> PAGE_SHIFT) + in clean_record_pte()
98 walk->vma->vm_pgoff - cwalk->bitmap_pgoff; in clean_record_pte()
99 pte_t old_pte = ptep_modify_prot_start(walk->vma, addr, pte); in clean_record_pte()
102 ptep_modify_prot_commit(walk->vma, addr, pte, old_pte, ptent); in clean_record_pte()
[all …]
H A Dptdump.c17 static inline int note_kasan_page_table(struct mm_walk *walk, in note_kasan_page_table() argument
20 struct ptdump_state *st = walk->private; in note_kasan_page_table()
24 walk->action = ACTION_CONTINUE; in note_kasan_page_table()
31 unsigned long next, struct mm_walk *walk) in ptdump_pgd_entry() argument
33 struct ptdump_state *st = walk->private; in ptdump_pgd_entry()
39 return note_kasan_page_table(walk, addr); in ptdump_pgd_entry()
47 walk->action = ACTION_CONTINUE; in ptdump_pgd_entry()
54 unsigned long next, struct mm_walk *walk) in ptdump_p4d_entry() argument
56 struct ptdump_state *st = walk->private; in ptdump_p4d_entry()
62 return note_kasan_page_table(walk, addr); in ptdump_p4d_entry()
[all …]
H A Dhmm.c74 unsigned int required_fault, struct mm_walk *walk) in hmm_vma_fault() argument
76 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_fault()
77 struct vm_area_struct *vma = walk->vma; in hmm_vma_fault()
158 __always_unused int depth, struct mm_walk *walk) in hmm_vma_walk_hole() argument
160 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_walk_hole()
171 if (!walk->vma) { in hmm_vma_walk_hole()
177 return hmm_vma_fault(addr, end, required_fault, walk); in hmm_vma_walk_hole()
197 static int hmm_vma_handle_pmd(struct mm_walk *walk, unsigned long addr, in hmm_vma_handle_pmd() argument
201 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_handle_pmd()
212 return hmm_vma_fault(addr, end, required_fault, walk); in hmm_vma_handle_pmd()
[all …]
H A Dhugetlb_vmemmap.c37 struct vmemmap_remap_walk *walk);
53 struct vmemmap_remap_walk *walk) in vmemmap_split_pmd() argument
88 if (!(walk->flags & VMEMMAP_SPLIT_NO_TLB_FLUSH)) in vmemmap_split_pmd()
99 unsigned long next, struct mm_walk *walk) in vmemmap_pmd_entry() argument
103 struct vmemmap_remap_walk *vmemmap_walk = walk->private; in vmemmap_pmd_entry()
107 walk->action = ACTION_CONTINUE; in vmemmap_pmd_entry()
140 unsigned long next, struct mm_walk *walk) in vmemmap_pte_entry() argument
142 struct vmemmap_remap_walk *vmemmap_walk = walk->private; in vmemmap_pte_entry()
163 struct vmemmap_remap_walk *walk) in vmemmap_remap_range() argument
171 NULL, walk); in vmemmap_remap_range()
[all …]
/linux/arch/arm64/crypto/
H A Daes-glue.c184 struct skcipher_walk walk; in ecb_encrypt() local
187 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt()
189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
191 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
193 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
203 struct skcipher_walk walk; in ecb_decrypt() local
206 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt()
208 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
210 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
212 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
[all …]
H A Daes-neonbs-glue.c100 struct skcipher_walk walk; in __ecb_crypt() local
103 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt()
105 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
108 if (walk.nbytes < walk.total) in __ecb_crypt()
110 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
113 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
115 err = skcipher_walk_done(&walk, in __ecb_crypt()
116 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
158 struct skcipher_walk walk; in cbc_encrypt() local
[all …]
H A Dsm4-neon-glue.c38 struct skcipher_walk walk; in sm4_ecb_do_crypt() local
42 err = skcipher_walk_virt(&walk, req, false); in sm4_ecb_do_crypt()
44 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt()
45 const u8 *src = walk.src.virt.addr; in sm4_ecb_do_crypt()
46 u8 *dst = walk.dst.virt.addr; in sm4_ecb_do_crypt()
55 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_ecb_do_crypt()
81 struct skcipher_walk walk; in sm4_cbc_encrypt() local
85 err = skcipher_walk_virt(&walk, req, false); in sm4_cbc_encrypt()
87 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt()
88 const u8 *iv = walk.iv; in sm4_cbc_encrypt()
[all …]
H A Daes-ce-ccm-glue.c138 struct scatter_walk walk; in ccm_calculate_auth_mac() local
154 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac()
159 n = scatterwalk_next(&walk, len); in ccm_calculate_auth_mac()
160 macp = ce_aes_ccm_auth_data(mac, walk.addr, n, macp, in ccm_calculate_auth_mac()
162 scatterwalk_done_src(&walk, n); in ccm_calculate_auth_mac()
171 struct skcipher_walk walk; in ccm_encrypt() local
184 err = skcipher_walk_aead_encrypt(&walk, req, false); in ccm_encrypt()
193 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt()
194 const u8 *src = walk.src.virt.addr; in ccm_encrypt()
195 u8 *dst = walk.dst.virt.addr; in ccm_encrypt()
[all …]
H A Dsm4-ce-glue.c108 struct skcipher_walk walk; in sm4_ecb_do_crypt() local
112 err = skcipher_walk_virt(&walk, req, false); in sm4_ecb_do_crypt()
114 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt()
115 const u8 *src = walk.src.virt.addr; in sm4_ecb_do_crypt()
116 u8 *dst = walk.dst.virt.addr; in sm4_ecb_do_crypt()
127 err = skcipher_walk_done(&walk, nbytes); in sm4_ecb_do_crypt()
152 struct skcipher_walk walk; in sm4_cbc_crypt() local
156 err = skcipher_walk_virt(&walk, req, false); in sm4_cbc_crypt()
160 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_crypt()
161 const u8 *src = walk.src.virt.addr; in sm4_cbc_crypt()
[all …]
H A Dsm4-ce-ccm-glue.c96 struct scatter_walk walk; in ccm_calculate_auth_mac() local
111 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac()
117 orig_n = scatterwalk_next(&walk, assoclen); in ccm_calculate_auth_mac()
118 p = walk.addr; in ccm_calculate_auth_mac()
151 scatterwalk_done_src(&walk, orig_n); in ccm_calculate_auth_mac()
156 static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk, in ccm_crypt() argument
166 memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE); in ccm_crypt()
167 crypto_inc(walk->iv, SM4_BLOCK_SIZE); in ccm_crypt()
173 while (walk->nbytes) { in ccm_crypt()
174 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE; in ccm_crypt()
[all …]
H A Dsm4-ce-gcm-glue.c77 struct scatter_walk walk; in gcm_calculate_auth_mac() local
80 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac()
86 orig_n = scatterwalk_next(&walk, assoclen); in gcm_calculate_auth_mac()
87 p = walk.addr; in gcm_calculate_auth_mac()
119 scatterwalk_done_src(&walk, orig_n); in gcm_calculate_auth_mac()
130 static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk, in gcm_crypt() argument
145 lengths.b = cpu_to_be64(walk->total * 8); in gcm_crypt()
155 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE; in gcm_crypt()
156 const u8 *src = walk->src.virt.addr; in gcm_crypt()
157 u8 *dst = walk->dst.virt.addr; in gcm_crypt()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_pt_walk.c28 const struct xe_pt_walk *walk) in xe_pt_addr_end() argument
30 u64 size = 1ull << walk->shifts[level]; in xe_pt_addr_end()
37 unsigned int level, const struct xe_pt_walk *walk) in xe_pt_next() argument
42 if (unlikely(walk->shared_pt_mode)) { in xe_pt_next()
43 unsigned int shift = walk->shifts[level]; in xe_pt_next()
74 u64 addr, u64 end, struct xe_pt_walk *walk) in xe_pt_walk_range() argument
76 pgoff_t offset = xe_pt_offset(addr, level, walk); in xe_pt_walk_range()
77 struct xe_ptw **entries = walk->staging ? (parent->staging ?: NULL) : in xe_pt_walk_range()
79 const struct xe_pt_walk_ops *ops = walk->ops; in xe_pt_walk_range()
86 next = xe_pt_addr_end(addr, end, level, walk); in xe_pt_walk_range()
[all …]
H A Dxe_pt_walk.h67 struct xe_pt_walk *walk);
88 u64 addr, u64 end, struct xe_pt_walk *walk);
91 u64 addr, u64 end, struct xe_pt_walk *walk);
106 const struct xe_pt_walk *walk) in xe_pt_covers() argument
108 u64 pt_size = 1ull << walk->shifts[level]; in xe_pt_covers()
126 const struct xe_pt_walk *walk) in xe_pt_num_entries() argument
128 u64 pt_size = 1ull << walk->shifts[level]; in xe_pt_num_entries()
131 walk->shifts[level]; in xe_pt_num_entries()
144 xe_pt_offset(u64 addr, unsigned int level, const struct xe_pt_walk *walk) in xe_pt_offset() argument
146 if (level < walk->max_level) in xe_pt_offset()
[all …]
/linux/arch/riscv/crypto/
H A Daes-riscv64-glue.c139 struct skcipher_walk walk; in riscv64_aes_ecb_crypt() local
143 err = skcipher_walk_virt(&walk, req, false); in riscv64_aes_ecb_crypt()
144 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_ecb_crypt()
147 aes_ecb_encrypt_zvkned(ctx, walk.src.virt.addr, in riscv64_aes_ecb_crypt()
148 walk.dst.virt.addr, in riscv64_aes_ecb_crypt()
151 aes_ecb_decrypt_zvkned(ctx, walk.src.virt.addr, in riscv64_aes_ecb_crypt()
152 walk.dst.virt.addr, in riscv64_aes_ecb_crypt()
155 err = skcipher_walk_done(&walk, nbytes & (AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt()
177 struct skcipher_walk walk; in riscv64_aes_cbc_crypt() local
181 err = skcipher_walk_virt(&walk, req, false); in riscv64_aes_cbc_crypt()
[all …]
/linux/arch/powerpc/crypto/
H A Daes-spe-glue.c184 struct skcipher_walk walk; in ppc_ecb_crypt() local
188 err = skcipher_walk_virt(&walk, req, false); in ppc_ecb_crypt()
190 while ((nbytes = walk.nbytes) != 0) { in ppc_ecb_crypt()
196 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt()
199 ppc_decrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_crypt()
203 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ecb_crypt()
223 struct skcipher_walk walk; in ppc_cbc_crypt() local
227 err = skcipher_walk_virt(&walk, req, false); in ppc_cbc_crypt()
229 while ((nbytes = walk.nbytes) != 0) { in ppc_cbc_crypt()
235 ppc_encrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, in ppc_cbc_crypt()
[all …]
/linux/drivers/atm/
H A Didt77105.c86 struct idt77105_priv *walk; in idt77105_stats_timer_func() local
91 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_stats_timer_func()
92 dev = walk->dev; in idt77105_stats_timer_func()
94 stats = &walk->stats; in idt77105_stats_timer_func()
115 struct idt77105_priv *walk; in idt77105_restart_timer_func() local
120 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_restart_timer_func()
121 dev = walk->dev; in idt77105_restart_timer_func()
135 PUT( walk->old_mcr ,MCR); in idt77105_restart_timer_func()
323 struct idt77105_priv *walk, *prev; in idt77105_stop() local
331 for (prev = NULL, walk = idt77105_all ; in idt77105_stop()
[all …]
/linux/arch/x86/crypto/
H A Daesni-intel_glue.c159 struct skcipher_walk walk; in ecb_encrypt() local
163 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt()
165 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
167 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
171 err = skcipher_walk_done(&walk, nbytes); in ecb_encrypt()
181 struct skcipher_walk walk; in ecb_decrypt() local
185 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt()
187 while ((nbytes = walk.nbytes)) { in ecb_decrypt()
189 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
193 err = skcipher_walk_done(&walk, nbytes); in ecb_decrypt()
[all …]
H A Decb_cbc_helpers.h18 struct skcipher_walk walk; \
19 int err = skcipher_walk_virt(&walk, (req), false); \
20 while (walk.nbytes > 0) { \
21 unsigned int nbytes = walk.nbytes; \
24 const u8 *src = walk.src.virt.addr; \
25 u8 *dst = walk.dst.virt.addr; \
51 const u8 *__iv = walk.iv; \
58 memcpy(walk.iv, __iv, __bsize); \
72 crypto_xor(dst, walk.iv, __bsize); \
73 memcpy(walk.iv, __iv, __bsize); \
[all …]
/linux/arch/s390/crypto/
H A Ddes_s390.c93 struct skcipher_walk walk; in ecb_desall_crypt() local
97 ret = skcipher_walk_virt(&walk, req, false); in ecb_desall_crypt()
98 while ((nbytes = walk.nbytes) != 0) { in ecb_desall_crypt()
101 cpacf_km(fc, ctx->key, walk.dst.virt.addr, in ecb_desall_crypt()
102 walk.src.virt.addr, n); in ecb_desall_crypt()
103 ret = skcipher_walk_done(&walk, nbytes - n); in ecb_desall_crypt()
112 struct skcipher_walk walk; in cbc_desall_crypt() local
120 ret = skcipher_walk_virt(&walk, req, false); in cbc_desall_crypt()
123 memcpy(param.iv, walk.iv, DES_BLOCK_SIZE); in cbc_desall_crypt()
125 while ((nbytes = walk.nbytes) != 0) { in cbc_desall_crypt()
[all …]

1234567