| /linux/drivers/comedi/ |
| H A D | comedi_buf.c | 277 unsigned int nbytes) in _comedi_buf_write_alloc() argument 282 if (nbytes > unalloc) in _comedi_buf_write_alloc() 283 nbytes = unalloc; in _comedi_buf_write_alloc() 285 async->buf_write_alloc_count += nbytes; in _comedi_buf_write_alloc() 293 return nbytes; in _comedi_buf_write_alloc() 308 unsigned int nbytes) in comedi_buf_write_alloc() argument 311 nbytes = _comedi_buf_write_alloc(s, nbytes); in comedi_buf_write_alloc() 314 nbytes = 0; in comedi_buf_write_alloc() 316 return nbytes; in comedi_buf_write_alloc() 378 unsigned int nbytes) in _comedi_buf_write_free() argument [all …]
|
| /linux/drivers/media/test-drivers/vidtv/ |
| H A D | vidtv_ts.c | 53 u32 nbytes = 0; in vidtv_ts_null_write_into() local 62 nbytes += vidtv_memcpy(args.dest_buf, in vidtv_ts_null_write_into() 63 args.dest_offset + nbytes, in vidtv_ts_null_write_into() 71 nbytes += vidtv_memset(args.dest_buf, in vidtv_ts_null_write_into() 72 args.dest_offset + nbytes, in vidtv_ts_null_write_into() 75 TS_PACKET_LEN - nbytes); in vidtv_ts_null_write_into() 78 if (nbytes != TS_PACKET_LEN) in vidtv_ts_null_write_into() 81 nbytes); in vidtv_ts_null_write_into() 83 return nbytes; in vidtv_ts_null_write_into() 88 u32 nbytes = 0; in vidtv_ts_pcr_write_into() local [all …]
|
| H A D | vidtv_pes.c | 83 u32 nbytes = 0; /* the number of bytes written by this function */ in vidtv_pes_write_pts_dts() local 123 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_pts_dts() 124 args->dest_offset + nbytes, in vidtv_pes_write_pts_dts() 129 return nbytes; in vidtv_pes_write_pts_dts() 134 u32 nbytes = 0; /* the number of bytes written by this function */ in vidtv_pes_write_h() local 159 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_h() 160 args->dest_offset + nbytes, in vidtv_pes_write_h() 166 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_h() 167 args->dest_offset + nbytes, in vidtv_pes_write_h() 174 pts_dts_args.dest_offset = args->dest_offset + nbytes; in vidtv_pes_write_h() [all …]
|
| H A D | vidtv_psi.c | 172 u32 nbytes = 0; in vidtv_psi_ts_psi_write_into() local 184 nbytes += vidtv_memset(args->dest_buf, in vidtv_psi_ts_psi_write_into() 185 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into() 192 nbytes_past_boundary = (args->dest_offset + nbytes) % TS_PACKET_LEN; in vidtv_psi_ts_psi_write_into() 199 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_psi_ts_psi_write_into() 200 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into() 213 nbytes += vidtv_memset(args->dest_buf, in vidtv_psi_ts_psi_write_into() 214 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into() 220 nbytes_past_boundary = (args->dest_offset + nbytes) % TS_PACKET_LEN; in vidtv_psi_ts_psi_write_into() 223 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_psi_ts_psi_write_into() [all …]
|
| H A D | vidtv_mux.c | 164 u32 nbytes; in vidtv_mux_push_si() local 214 nbytes = m->mux_buf_offset - initial_offset; in vidtv_mux_push_si() 218 return nbytes; in vidtv_mux_push_si() 225 u32 nbytes = 0; in vidtv_mux_push_pcr() local 236 nbytes += vidtv_ts_pcr_write_into(args); in vidtv_mux_push_pcr() 237 m->mux_buf_offset += nbytes; in vidtv_mux_push_pcr() 241 return nbytes; in vidtv_mux_push_pcr() 287 u32 nbytes = 0; in vidtv_mux_packetize_access_units() local 302 args.access_unit_len = au->nbytes; in vidtv_mux_packetize_access_units() 318 nbytes = m->mux_buf_offset - initial_offset; in vidtv_mux_packetize_access_units() [all …]
|
| /linux/lib/crypto/mpi/ |
| H A D | mpicoder.c | 36 MPI mpi_read_raw_data(const void *xbuffer, size_t nbytes) in mpi_read_raw_data() argument 44 while (nbytes > 0 && buffer[0] == 0) { in mpi_read_raw_data() 46 nbytes--; in mpi_read_raw_data() 49 nbits = nbytes * 8; in mpi_read_raw_data() 54 if (nbytes > 0) in mpi_read_raw_data() 57 nlimbs = DIV_ROUND_UP(nbytes, BYTES_PER_MPI_LIMB); in mpi_read_raw_data() 65 if (nbytes > 0) { in mpi_read_raw_data() 66 i = BYTES_PER_MPI_LIMB - nbytes % BYTES_PER_MPI_LIMB; in mpi_read_raw_data() 85 unsigned int nbits, nbytes; in mpi_read_from_buffer() local 97 nbytes = DIV_ROUND_UP(nbits, 8); in mpi_read_from_buffer() [all …]
|
| /linux/crypto/ |
| H A D | scatterwalk.c | 18 void scatterwalk_skip(struct scatter_walk *walk, unsigned int nbytes) in scatterwalk_skip() argument 22 nbytes += walk->offset - sg->offset; in scatterwalk_skip() 24 while (nbytes > sg->length) { in scatterwalk_skip() 25 nbytes -= sg->length; in scatterwalk_skip() 29 walk->offset = sg->offset + nbytes; in scatterwalk_skip() 34 unsigned int nbytes) in memcpy_from_scatterwalk() argument 39 to_copy = scatterwalk_next(walk, nbytes); in memcpy_from_scatterwalk() 43 nbytes -= to_copy; in memcpy_from_scatterwalk() 44 } while (nbytes); in memcpy_from_scatterwalk() 49 unsigned int nbytes) in memcpy_to_scatterwalk() argument [all …]
|
| /linux/include/crypto/ |
| H A D | scatterwalk.h | 53 unsigned int nbytes) in scatterwalk_clamp() argument 80 return min3(nbytes, len_this_sg, limit); in scatterwalk_clamp() 144 unsigned int nbytes = scatterwalk_clamp(walk, total); in scatterwalk_next() local 147 return nbytes; in scatterwalk_next() 157 unsigned int nbytes) in scatterwalk_advance() argument 159 walk->offset += nbytes; in scatterwalk_advance() 171 unsigned int nbytes) in scatterwalk_done_src() argument 174 scatterwalk_advance(walk, nbytes); in scatterwalk_done_src() 187 unsigned int nbytes) in __scatterwalk_flush_dcache_pages() argument 198 num_pages = nbytes / PAGE_SIZE; in __scatterwalk_flush_dcache_pages() [all …]
|
| /linux/arch/arm64/crypto/ |
| H A D | sm4-neon-glue.c | 39 unsigned int nbytes; in sm4_ecb_do_crypt() local 44 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 49 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 55 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_ecb_do_crypt() 82 unsigned int nbytes; in sm4_cbc_encrypt() local 87 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() 92 while (nbytes >= SM4_BLOCK_SIZE) { in sm4_cbc_encrypt() 98 nbytes -= SM4_BLOCK_SIZE; in sm4_cbc_encrypt() 103 err = skcipher_walk_done(&walk, nbytes); in sm4_cbc_encrypt() 114 unsigned int nbytes; in sm4_cbc_decrypt() local [all …]
|
| H A D | aes-neonbs-glue.c | 105 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 108 if (walk.nbytes < walk.total) in __ecb_crypt() 116 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 163 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt() 164 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt() 172 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 186 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt() 187 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt() 189 if (walk.nbytes < walk.total) in cbc_decrypt() [all …]
|
| H A D | ghash-ce-glue.c | 320 int nbytes = walk.nbytes; in gcm_encrypt() local 324 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) { in gcm_encrypt() 325 src = dst = memcpy(buf + sizeof(buf) - nbytes, in gcm_encrypt() 326 src, nbytes); in gcm_encrypt() 327 } else if (nbytes < walk.total) { in gcm_encrypt() 328 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_encrypt() 333 pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h, in gcm_encrypt() 337 if (unlikely(!nbytes)) in gcm_encrypt() 340 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_encrypt() 342 buf + sizeof(buf) - nbytes, nbytes); in gcm_encrypt() [all …]
|
| H A D | aes-glue.c | 189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 193 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 208 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 212 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 225 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk() 229 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk() 253 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk() 257 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk() 324 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt() 380 ctx->key_dec, rounds, walk.nbytes, walk.iv); in cts_cbc_decrypt() [all …]
|
| H A D | sm4-ce-glue.c | 24 #define BYTES2BLKS(nbytes) ((nbytes) >> 4) argument 36 u8 *iv, unsigned int nbytes); 38 u8 *iv, unsigned int nbytes); 42 u8 *tweak, unsigned int nbytes, 45 u8 *tweak, unsigned int nbytes, 109 unsigned int nbytes; in sm4_ecb_do_crypt() local 114 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 120 nblks = BYTES2BLKS(nbytes); in sm4_ecb_do_crypt() 123 nbytes -= nblks * SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 127 err = skcipher_walk_done(&walk, nbytes); in sm4_ecb_do_crypt() [all …]
|
| H A D | aes-ce-ccm-glue.c | 193 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() 199 if (walk.nbytes == walk.total) { in ccm_encrypt() 204 if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) in ccm_encrypt() 205 src = dst = memcpy(&buf[sizeof(buf) - walk.nbytes], in ccm_encrypt() 206 src, walk.nbytes); in ccm_encrypt() 208 ce_aes_ccm_encrypt(dst, src, walk.nbytes - tail, in ccm_encrypt() 212 if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) in ccm_encrypt() 213 memcpy(walk.dst.virt.addr, dst, walk.nbytes); in ccm_encrypt() 215 if (walk.nbytes) { in ccm_encrypt() 218 } while (walk.nbytes); in ccm_encrypt() [all …]
|
| /linux/arch/powerpc/crypto/ |
| H A D | aes-spe-glue.c | 185 unsigned int nbytes; in ppc_ecb_crypt() local 190 while ((nbytes = walk.nbytes) != 0) { in ppc_ecb_crypt() 191 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_ecb_crypt() 192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt() 197 ctx->key_enc, ctx->rounds, nbytes); in ppc_ecb_crypt() 200 ctx->key_dec, ctx->rounds, nbytes); in ppc_ecb_crypt() 203 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ecb_crypt() 224 unsigned int nbytes; in ppc_cbc_crypt() local 229 while ((nbytes = walk.nbytes) != 0) { in ppc_cbc_crypt() 230 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_cbc_crypt() [all …]
|
| /linux/drivers/spi/ |
| H A D | spi-mtk-nor.c | 172 if (op->addr.nbytes == 4) { in mtk_nor_set_addr() 189 if (op->dummy.nbytes) in mtk_nor_match_read() 190 dummy = op->dummy.nbytes * BITS_PER_BYTE / op->dummy.buswidth; in mtk_nor_match_read() 217 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_match_prg() 221 tx_len += op->dummy.nbytes; in mtk_nor_match_prg() 229 if ((!op->addr.nbytes) && in mtk_nor_match_prg() 230 (tx_len + op->data.nbytes > MTK_NOR_REG_PRGDATA_MAX + 1)) in mtk_nor_match_prg() 236 rx_len = op->data.nbytes; in mtk_nor_match_prg() 237 prg_left = MTK_NOR_PRG_CNT_MAX / 8 - tx_len - op->dummy.nbytes; in mtk_nor_match_prg() 241 if (!op->addr.nbytes) in mtk_nor_match_prg() [all …]
|
| H A D | spi-wpcm-fiu.c | 78 static void wpcm_fiu_set_data(struct wpcm_fiu_spi *fiu, const u8 *data, unsigned int nbytes) in wpcm_fiu_set_data() argument 82 for (i = 0; i < nbytes; i++) in wpcm_fiu_set_data() 86 static void wpcm_fiu_get_data(struct wpcm_fiu_spi *fiu, u8 *data, unsigned int nbytes) in wpcm_fiu_get_data() argument 90 for (i = 0; i < nbytes; i++) in wpcm_fiu_get_data() 146 return (op->addr.nbytes == 0 || op->addr.nbytes == 3) && in wpcm_fiu_normal_match() 147 op->dummy.nbytes == 0 && op->data.nbytes <= 4; in wpcm_fiu_normal_match() 158 wpcm_fiu_set_data(fiu, op->data.buf.out, op->data.nbytes); in wpcm_fiu_normal_exec() 160 ret = wpcm_fiu_do_uma(fiu, spi_get_chipselect(mem->spi, 0), op->addr.nbytes == 3, in wpcm_fiu_normal_exec() 161 op->data.dir == SPI_MEM_DATA_OUT, op->data.nbytes); in wpcm_fiu_normal_exec() 164 wpcm_fiu_get_data(fiu, op->data.buf.in, op->data.nbytes); in wpcm_fiu_normal_exec() [all …]
|
| /linux/arch/riscv/crypto/ |
| H A D | aes-riscv64-glue.c | 140 unsigned int nbytes; in riscv64_aes_ecb_crypt() local 144 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_ecb_crypt() 149 nbytes & ~(AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt() 153 nbytes & ~(AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt() 155 err = skcipher_walk_done(&walk, nbytes & (AES_BLOCK_SIZE - 1)); in riscv64_aes_ecb_crypt() 178 unsigned int nbytes; in riscv64_aes_cbc_crypt() local 182 while ((nbytes = walk.nbytes) != 0) { in riscv64_aes_cbc_crypt() 187 nbytes & ~(AES_BLOCK_SIZE - 1), in riscv64_aes_cbc_crypt() 192 nbytes & ~(AES_BLOCK_SIZE - 1), in riscv64_aes_cbc_crypt() 195 err = skcipher_walk_done(&walk, nbytes & (AES_BLOCK_SIZE - 1)); in riscv64_aes_cbc_crypt() [all …]
|
| /linux/drivers/infiniband/hw/hfi1/ |
| H A D | pio_copy.c | 184 unsigned int nbytes) in read_low_bytes() argument 187 jcopy(&pbuf->carry.val8[0], from, nbytes); in read_low_bytes() 188 pbuf->carry_bytes = nbytes; in read_low_bytes() 200 const void *from, unsigned int nbytes) in read_extra_bytes() argument 202 jcopy(&pbuf->carry.val8[pbuf->carry_bytes], from, nbytes); in read_extra_bytes() 203 pbuf->carry_bytes += nbytes; in read_extra_bytes() 263 const void *from, size_t nbytes) in seg_pio_copy_start() argument 273 dend = dest + ((nbytes >> 3) * sizeof(u64)); in seg_pio_copy_start() 338 read_low_bytes(pbuf, from, nbytes & 0x7); in seg_pio_copy_start() 340 pbuf->qw_written = 1 /*PBC*/ + (nbytes >> 3); in seg_pio_copy_start() [all …]
|
| /linux/drivers/i3c/ |
| H A D | internals.h | 34 int nbytes) in i3c_writel_fifo() argument 36 writesl(addr, buf, nbytes / 4); in i3c_writel_fifo() 37 if (nbytes & 3) { in i3c_writel_fifo() 40 memcpy(&tmp, buf + (nbytes & ~3), nbytes & 3); in i3c_writel_fifo() 56 int nbytes) in i3c_readl_fifo() argument 58 readsl(addr, buf, nbytes / 4); in i3c_readl_fifo() 59 if (nbytes & 3) { in i3c_readl_fifo() 67 memcpy(buf + (nbytes & ~3), &tmp, nbytes & 3); in i3c_readl_fifo()
|
| /linux/arch/x86/crypto/ |
| H A D | aria_aesni_avx2_glue.c | 92 unsigned int nbytes; in aria_avx2_ctr_encrypt() local 97 while ((nbytes = walk.nbytes) > 0) { in aria_avx2_ctr_encrypt() 101 while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) { in aria_avx2_ctr_encrypt() 109 nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 112 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) { in aria_avx2_ctr_encrypt() 120 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 123 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx2_ctr_encrypt() 134 nbytes -= ARIA_BLOCK_SIZE; in aria_avx2_ctr_encrypt() 137 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx2_ctr_encrypt() 146 nbytes); in aria_avx2_ctr_encrypt() [all …]
|
| H A D | aesni-intel_glue.c | 160 unsigned int nbytes; in ecb_encrypt() local 165 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 168 nbytes & AES_BLOCK_MASK); in ecb_encrypt() 170 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 171 err = skcipher_walk_done(&walk, nbytes); in ecb_encrypt() 182 unsigned int nbytes; in ecb_decrypt() local 187 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 190 nbytes & AES_BLOCK_MASK); in ecb_decrypt() 192 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 193 err = skcipher_walk_done(&walk, nbytes); in ecb_decrypt() [all …]
|
| H A D | aria_aesni_avx_glue.c | 90 unsigned int nbytes; in aria_avx_ctr_encrypt() local 95 while ((nbytes = walk.nbytes) > 0) { in aria_avx_ctr_encrypt() 99 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) { in aria_avx_ctr_encrypt() 107 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx_ctr_encrypt() 110 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx_ctr_encrypt() 121 nbytes -= ARIA_BLOCK_SIZE; in aria_avx_ctr_encrypt() 124 if (walk.nbytes == walk.total && nbytes > 0) { in aria_avx_ctr_encrypt() 133 nbytes); in aria_avx_ctr_encrypt() 134 dst += nbytes; in aria_avx_ctr_encrypt() 135 src += nbytes; in aria_avx_ctr_encrypt() [all …]
|
| H A D | ecb_cbc_helpers.h | 20 while (walk.nbytes > 0) { \ 21 unsigned int nbytes = walk.nbytes; \ 23 nbytes >= __fpu_blocks * __bsize; \ 35 nbytes -= (blocks) * __bsize; \ 44 while (nbytes >= __blocks * __bsize) { \ 52 while (nbytes >= __bsize) { \ 67 while (nbytes >= __blocks * __bsize) { \ 80 err = skcipher_walk_done(&walk, nbytes); \
|
| /linux/fs/coda/ |
| H A D | psdev.c | 92 size_t nbytes, loff_t *off) in coda_psdev_write() argument 103 if (nbytes < (2 * sizeof(u_int32_t))) in coda_psdev_write() 114 if ( nbytes < sizeof(struct coda_out_hdr) ) { in coda_psdev_write() 117 count = nbytes; in coda_psdev_write() 120 if ( nbytes > size ) { in coda_psdev_write() 123 nbytes = size; in coda_psdev_write() 126 dcbuf = vmemdup_user(buf, nbytes); in coda_psdev_write() 133 error = coda_downcall(vcp, hdr.opcode, dcbuf, nbytes); in coda_psdev_write() 142 count = nbytes; in coda_psdev_write() 166 if (req->uc_outSize < nbytes) { in coda_psdev_write() [all …]
|