Home
last modified time | relevance | path

Searched refs:nbytes (Results 1 – 25 of 402) sorted by relevance

12345678910>>...17

/linux/drivers/media/test-drivers/vidtv/
H A Dvidtv_ts.c53 u32 nbytes = 0; in vidtv_ts_null_write_into() local
62 nbytes += vidtv_memcpy(args.dest_buf, in vidtv_ts_null_write_into()
63 args.dest_offset + nbytes, in vidtv_ts_null_write_into()
71 nbytes += vidtv_memset(args.dest_buf, in vidtv_ts_null_write_into()
72 args.dest_offset + nbytes, in vidtv_ts_null_write_into()
75 TS_PACKET_LEN - nbytes); in vidtv_ts_null_write_into()
78 if (nbytes != TS_PACKET_LEN) in vidtv_ts_null_write_into()
81 nbytes); in vidtv_ts_null_write_into()
83 return nbytes; in vidtv_ts_null_write_into()
88 u32 nbytes = 0; in vidtv_ts_pcr_write_into() local
[all …]
H A Dvidtv_pes.c83 u32 nbytes = 0; /* the number of bytes written by this function */ in vidtv_pes_write_pts_dts() local
123 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_pts_dts()
124 args->dest_offset + nbytes, in vidtv_pes_write_pts_dts()
129 return nbytes; in vidtv_pes_write_pts_dts()
134 u32 nbytes = 0; /* the number of bytes written by this function */ in vidtv_pes_write_h() local
159 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_h()
160 args->dest_offset + nbytes, in vidtv_pes_write_h()
166 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_h()
167 args->dest_offset + nbytes, in vidtv_pes_write_h()
174 pts_dts_args.dest_offset = args->dest_offset + nbytes; in vidtv_pes_write_h()
[all …]
H A Dvidtv_psi.c172 u32 nbytes = 0; in vidtv_psi_ts_psi_write_into() local
184 nbytes += vidtv_memset(args->dest_buf, in vidtv_psi_ts_psi_write_into()
185 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into()
192 nbytes_past_boundary = (args->dest_offset + nbytes) % TS_PACKET_LEN; in vidtv_psi_ts_psi_write_into()
199 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_psi_ts_psi_write_into()
200 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into()
213 nbytes += vidtv_memset(args->dest_buf, in vidtv_psi_ts_psi_write_into()
214 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into()
220 nbytes_past_boundary = (args->dest_offset + nbytes) % TS_PACKET_LEN; in vidtv_psi_ts_psi_write_into()
223 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_psi_ts_psi_write_into()
[all …]
H A Dvidtv_mux.c164 u32 nbytes; in vidtv_mux_push_si() local
214 nbytes = m->mux_buf_offset - initial_offset; in vidtv_mux_push_si()
218 return nbytes; in vidtv_mux_push_si()
225 u32 nbytes = 0; in vidtv_mux_push_pcr() local
236 nbytes += vidtv_ts_pcr_write_into(args); in vidtv_mux_push_pcr()
237 m->mux_buf_offset += nbytes; in vidtv_mux_push_pcr()
241 return nbytes; in vidtv_mux_push_pcr()
287 u32 nbytes = 0; in vidtv_mux_packetize_access_units() local
302 args.access_unit_len = au->nbytes; in vidtv_mux_packetize_access_units()
318 nbytes = m->mux_buf_offset - initial_offset; in vidtv_mux_packetize_access_units()
[all …]
/linux/lib/crypto/mpi/
H A Dmpicoder.c35 MPI mpi_read_raw_data(const void *xbuffer, size_t nbytes) in mpi_read_raw_data() argument
43 while (nbytes > 0 && buffer[0] == 0) { in mpi_read_raw_data()
45 nbytes--; in mpi_read_raw_data()
48 nbits = nbytes * 8; in mpi_read_raw_data()
53 if (nbytes > 0) in mpi_read_raw_data()
56 nlimbs = DIV_ROUND_UP(nbytes, BYTES_PER_MPI_LIMB); in mpi_read_raw_data()
64 if (nbytes > 0) { in mpi_read_raw_data()
65 i = BYTES_PER_MPI_LIMB - nbytes % BYTES_PER_MPI_LIMB; in mpi_read_raw_data()
84 unsigned int nbits, nbytes; in mpi_read_from_buffer() local
96 nbytes = DIV_ROUND_UP(nbits, 8); in mpi_read_from_buffer()
[all …]
/linux/arch/x86/crypto/
H A Dsm4_aesni_avx_glue.c42 unsigned int nbytes; in ecb_do_crypt() local
47 while ((nbytes = walk.nbytes) > 0) { in ecb_do_crypt()
52 while (nbytes >= SM4_CRYPT8_BLOCK_SIZE) { in ecb_do_crypt()
56 nbytes -= SM4_CRYPT8_BLOCK_SIZE; in ecb_do_crypt()
58 while (nbytes >= SM4_BLOCK_SIZE) { in ecb_do_crypt()
59 unsigned int nblocks = min(nbytes >> 4, 4u); in ecb_do_crypt()
63 nbytes -= nblocks * SM4_BLOCK_SIZE; in ecb_do_crypt()
67 err = skcipher_walk_done(&walk, nbytes); in ecb_do_crypt()
96 unsigned int nbytes; in sm4_cbc_encrypt() local
101 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt()
[all …]
H A Ddes3_ede_glue.c70 unsigned int nbytes; in ecb_crypt() local
75 while ((nbytes = walk.nbytes)) { in ecb_crypt()
80 if (nbytes >= bsize * 3) { in ecb_crypt()
87 nbytes -= bsize * 3; in ecb_crypt()
88 } while (nbytes >= bsize * 3); in ecb_crypt()
90 if (nbytes < bsize) in ecb_crypt()
100 nbytes -= bsize; in ecb_crypt()
101 } while (nbytes >= bsize); in ecb_crypt()
104 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt()
130 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local
[all …]
H A Daria_gfni_avx512_glue.c81 unsigned int nbytes; in aria_avx512_ctr_encrypt() local
86 while ((nbytes = walk.nbytes) > 0) { in aria_avx512_ctr_encrypt()
90 while (nbytes >= ARIA_GFNI_AVX512_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt()
98 nbytes -= ARIA_GFNI_AVX512_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt()
101 while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt()
109 nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt()
112 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt()
120 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt()
123 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx512_ctr_encrypt()
135 nbytes -= ARIA_BLOCK_SIZE; in aria_avx512_ctr_encrypt()
[all …]
/linux/crypto/
H A Dpcbc.c25 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() local
37 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment()
39 return nbytes; in crypto_pcbc_encrypt_segment()
47 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() local
59 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_inplace()
61 return nbytes; in crypto_pcbc_encrypt_inplace()
69 unsigned int nbytes; in crypto_pcbc_encrypt() local
74 while (walk.nbytes) { in crypto_pcbc_encrypt()
76 nbytes = crypto_pcbc_encrypt_inplace(req, &walk, in crypto_pcbc_encrypt()
79 nbytes = crypto_pcbc_encrypt_segment(req, &walk, in crypto_pcbc_encrypt()
[all …]
H A Dxctr.c40 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_final() local
45 crypto_xor_cpy(dst, keystream, src, nbytes); in crypto_xctr_crypt_final()
56 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_segment() local
69 } while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE); in crypto_xctr_crypt_segment()
71 return nbytes; in crypto_xctr_crypt_segment()
80 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_inplace() local
95 } while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE); in crypto_xctr_crypt_inplace()
97 return nbytes; in crypto_xctr_crypt_inplace()
105 unsigned int nbytes; in crypto_xctr_crypt() local
111 while (walk.nbytes >= XCTR_BLOCKSIZE) { in crypto_xctr_crypt()
[all …]
H A Dcbc.c16 const u8 *src, u8 *dst, unsigned nbytes, in crypto_cbc_encrypt_segment() argument
21 for (; nbytes >= bsize; src += bsize, dst += bsize, nbytes -= bsize) { in crypto_cbc_encrypt_segment()
27 return nbytes; in crypto_cbc_encrypt_segment()
31 u8 *src, unsigned nbytes, u8 *oiv) in crypto_cbc_encrypt_inplace() argument
36 if (nbytes < bsize) in crypto_cbc_encrypt_inplace()
45 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_inplace()
50 return nbytes; in crypto_cbc_encrypt_inplace()
70 const u8 *src, u8 *dst, unsigned nbytes, in crypto_cbc_decrypt_segment() argument
76 if (nbytes < bsize) in crypto_cbc_decrypt_segment()
86 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_decrypt_segment()
[all …]
H A Dscatterwalk.c18 static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) in memcpy_dir() argument
23 memcpy(dst, src, nbytes); in memcpy_dir()
27 size_t nbytes, int out) in scatterwalk_copychunks() argument
33 if (len_this_page > nbytes) in scatterwalk_copychunks()
34 len_this_page = nbytes; in scatterwalk_copychunks()
44 if (nbytes == len_this_page) in scatterwalk_copychunks()
48 nbytes -= len_this_page; in scatterwalk_copychunks()
56 unsigned int start, unsigned int nbytes, int out) in scatterwalk_map_and_copy() argument
61 if (!nbytes) in scatterwalk_map_and_copy()
67 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
/linux/arch/arm64/crypto/
H A Dsm4-neon-glue.c39 unsigned int nbytes; in sm4_ecb_do_crypt() local
44 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt()
49 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_ecb_do_crypt()
58 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_ecb_do_crypt()
85 unsigned int nbytes; in sm4_cbc_encrypt() local
90 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt()
95 while (nbytes >= SM4_BLOCK_SIZE) { in sm4_cbc_encrypt()
101 nbytes -= SM4_BLOCK_SIZE; in sm4_cbc_encrypt()
106 err = skcipher_walk_done(&walk, nbytes); in sm4_cbc_encrypt()
117 unsigned int nbytes; in sm4_cbc_decrypt() local
[all …]
H A Daes-neonbs-glue.c106 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
107 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
109 if (walk.nbytes < walk.total) in __ecb_crypt()
118 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
166 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt()
167 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt()
175 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
189 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
190 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
192 if (walk.nbytes < walk.total) in cbc_decrypt()
[all …]
H A Daes-glue.c189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
194 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
209 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
214 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
227 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
232 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
256 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
261 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
328 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt()
385 ctx->key_dec, rounds, walk.nbytes, walk.iv); in cts_cbc_decrypt()
[all …]
H A Dsm4-ce-glue.c25 #define BYTES2BLKS(nbytes) ((nbytes) >> 4) argument
37 u8 *iv, unsigned int nbytes);
39 u8 *iv, unsigned int nbytes);
43 u8 *tweak, unsigned int nbytes,
46 u8 *tweak, unsigned int nbytes,
112 unsigned int nbytes; in sm4_ecb_do_crypt() local
117 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt()
124 nblks = BYTES2BLKS(nbytes); in sm4_ecb_do_crypt()
127 nbytes -= nblks * SM4_BLOCK_SIZE; in sm4_ecb_do_crypt()
132 err = skcipher_walk_done(&walk, nbytes); in sm4_ecb_do_crypt()
[all …]
/linux/arch/powerpc/crypto/
H A Daes-spe-glue.c185 unsigned int nbytes; in ppc_ecb_crypt() local
190 while ((nbytes = walk.nbytes) != 0) { in ppc_ecb_crypt()
191 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_ecb_crypt()
192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt()
197 ctx->key_enc, ctx->rounds, nbytes); in ppc_ecb_crypt()
200 ctx->key_dec, ctx->rounds, nbytes); in ppc_ecb_crypt()
203 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ecb_crypt()
224 unsigned int nbytes; in ppc_cbc_crypt() local
229 while ((nbytes = walk.nbytes) != 0) { in ppc_cbc_crypt()
230 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_cbc_crypt()
[all …]
/linux/drivers/comedi/
H A Dcomedi_buf.c340 unsigned int nbytes) in comedi_buf_write_alloc() argument
345 if (nbytes > unalloc) in comedi_buf_write_alloc()
346 nbytes = unalloc; in comedi_buf_write_alloc()
348 async->buf_write_alloc_count += nbytes; in comedi_buf_write_alloc()
356 return nbytes; in comedi_buf_write_alloc()
430 unsigned int nbytes) in comedi_buf_write_free() argument
435 if (nbytes > allocated) in comedi_buf_write_free()
436 nbytes = allocated; in comedi_buf_write_free()
438 async->buf_write_count += nbytes; in comedi_buf_write_free()
439 async->buf_write_ptr += nbytes; in comedi_buf_write_free()
[all …]
/linux/drivers/spi/
H A Dspi-mtk-nor.c172 if (op->addr.nbytes == 4) { in mtk_nor_set_addr()
189 if (op->dummy.nbytes) in mtk_nor_match_read()
190 dummy = op->dummy.nbytes * BITS_PER_BYTE / op->dummy.buswidth; in mtk_nor_match_read()
217 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_match_prg()
221 tx_len += op->dummy.nbytes; in mtk_nor_match_prg()
229 if ((!op->addr.nbytes) && in mtk_nor_match_prg()
230 (tx_len + op->data.nbytes > MTK_NOR_REG_PRGDATA_MAX + 1)) in mtk_nor_match_prg()
236 rx_len = op->data.nbytes; in mtk_nor_match_prg()
237 prg_left = MTK_NOR_PRG_CNT_MAX / 8 - tx_len - op->dummy.nbytes; in mtk_nor_match_prg()
241 if (!op->addr.nbytes) in mtk_nor_match_prg()
[all …]
H A Dspi-mem.c42 if (!op->data.nbytes) in spi_controller_dma_map_mem_op_data()
55 return spi_map_buf(ctlr, dmadev, sgt, op->data.buf.in, op->data.nbytes, in spi_controller_dma_map_mem_op_data()
88 if (!op->data.nbytes) in spi_controller_dma_unmap_mem_op_data()
148 if (op->addr.nbytes && in spi_mem_check_buswidth()
152 if (op->dummy.nbytes && in spi_mem_check_buswidth()
175 if (op->cmd.nbytes != 2) in spi_mem_default_supports_op()
178 if (op->cmd.nbytes != 1) in spi_mem_default_supports_op()
201 if (!op->cmd.buswidth || !op->cmd.nbytes) in spi_mem_check_op()
204 if ((op->addr.nbytes && !op->addr.buswidth) || in spi_mem_check_op()
205 (op->dummy.nbytes && !op->dummy.buswidth) || in spi_mem_check_op()
[all …]
/linux/arch/s390/crypto/
H A Dchacha-glue.c22 unsigned int nbytes, const u32 *key, in chacha20_crypt_s390() argument
28 chacha20_vx(dst, src, nbytes, key, counter); in chacha20_crypt_s390()
31 *counter += round_up(nbytes, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE; in chacha20_crypt_s390()
40 unsigned int nbytes; in chacha20_s390() local
46 while (walk.nbytes > 0) { in chacha20_s390()
47 nbytes = walk.nbytes; in chacha20_s390()
48 if (nbytes < walk.total) in chacha20_s390()
49 nbytes = round_down(nbytes, walk.stride); in chacha20_s390()
51 if (nbytes <= CHACHA_BLOCK_SIZE) { in chacha20_s390()
53 walk.src.virt.addr, nbytes, in chacha20_s390()
[all …]
/linux/arch/riscv/crypto/
H A Daes-riscv64-glue.c140 unsigned int nbytes; in riscv64_aes_ecb_crypt() local
144 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_ecb_crypt()
149 nbytes & ~(AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt()
153 nbytes & ~(AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt()
155 err = skcipher_walk_done(&walk, nbytes & (AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt()
178 unsigned int nbytes; in riscv64_aes_cbc_crypt() local
182 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_cbc_crypt()
187 nbytes & ~(AES_BLOCK_SIZE - 1), in riscv64_aes_cbc_crypt()
192 nbytes & ~(AES_BLOCK_SIZE - 1), in riscv64_aes_cbc_crypt()
195 err = skcipher_walk_done(&walk, nbytes & (AES_BLOCK_SIZE - 1)); in riscv64_aes_cbc_crypt()
[all …]
H A Dchacha-riscv64-glue.c26 unsigned int nbytes; in riscv64_chacha20_crypt() local
36 while (walk.nbytes) { in riscv64_chacha20_crypt()
37 nbytes = walk.nbytes & ~(CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt()
38 tail_bytes = walk.nbytes & (CHACHA_BLOCK_SIZE - 1); in riscv64_chacha20_crypt()
40 if (nbytes) { in riscv64_chacha20_crypt()
42 walk.dst.virt.addr, nbytes, iv); in riscv64_chacha20_crypt()
43 iv[0] += nbytes / CHACHA_BLOCK_SIZE; in riscv64_chacha20_crypt()
45 if (walk.nbytes == walk.total && tail_bytes > 0) { in riscv64_chacha20_crypt()
46 memcpy(block_buffer, walk.src.virt.addr + nbytes, in riscv64_chacha20_crypt()
50 memcpy(walk.dst.virt.addr + nbytes, block_buffer, in riscv64_chacha20_crypt()
/linux/drivers/infiniband/hw/hfi1/
H A Dpio_copy.c184 unsigned int nbytes) in read_low_bytes() argument
187 jcopy(&pbuf->carry.val8[0], from, nbytes); in read_low_bytes()
188 pbuf->carry_bytes = nbytes; in read_low_bytes()
200 const void *from, unsigned int nbytes) in read_extra_bytes() argument
202 jcopy(&pbuf->carry.val8[pbuf->carry_bytes], from, nbytes); in read_extra_bytes()
203 pbuf->carry_bytes += nbytes; in read_extra_bytes()
263 const void *from, size_t nbytes) in seg_pio_copy_start() argument
273 dend = dest + ((nbytes >> 3) * sizeof(u64)); in seg_pio_copy_start()
338 read_low_bytes(pbuf, from, nbytes & 0x7); in seg_pio_copy_start()
340 pbuf->qw_written = 1 /*PBC*/ + (nbytes >> 3); in seg_pio_copy_start()
[all …]
/linux/lib/crypto/
H A Dpoly1305.c31 unsigned int nbytes) in poly1305_update_generic() argument
36 bytes = min(nbytes, POLY1305_BLOCK_SIZE - desc->buflen); in poly1305_update_generic()
39 nbytes -= bytes; in poly1305_update_generic()
49 if (likely(nbytes >= POLY1305_BLOCK_SIZE)) { in poly1305_update_generic()
51 nbytes / POLY1305_BLOCK_SIZE, 1); in poly1305_update_generic()
52 src += nbytes - (nbytes % POLY1305_BLOCK_SIZE); in poly1305_update_generic()
53 nbytes %= POLY1305_BLOCK_SIZE; in poly1305_update_generic()
56 if (unlikely(nbytes)) { in poly1305_update_generic()
57 desc->buflen = nbytes; in poly1305_update_generic()
58 memcpy(desc->buf, src, nbytes); in poly1305_update_generic()

12345678910>>...17