| /linux/drivers/s390/cio/ |
| H A D | airq.c | 130 struct airq_iv *iv; in airq_iv_create() local 133 iv = kzalloc(sizeof(*iv), GFP_KERNEL); in airq_iv_create() 134 if (!iv) in airq_iv_create() 136 iv->bits = bits; in airq_iv_create() 137 iv->flags = flags; in airq_iv_create() 145 iv->vector = dma_pool_zalloc(airq_iv_cache, GFP_KERNEL, in airq_iv_create() 146 &iv->vector_dma); in airq_iv_create() 147 if (!iv->vector) in airq_iv_create() 150 iv->vector = vec; in airq_iv_create() 152 iv->vector = cio_dma_zalloc(size); in airq_iv_create() [all …]
|
| /linux/arch/s390/include/asm/ |
| H A D | airq.h | 53 void airq_iv_release(struct airq_iv *iv); 54 unsigned long airq_iv_alloc(struct airq_iv *iv, unsigned long num); 55 void airq_iv_free(struct airq_iv *iv, unsigned long bit, unsigned long num); 56 unsigned long airq_iv_scan(struct airq_iv *iv, unsigned long start, 59 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() argument 61 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit() 64 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() argument 66 airq_iv_free(iv, bit, 1); in airq_iv_free_bit() 69 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument 71 return iv->end; in airq_iv_end() [all …]
|
| /linux/crypto/ |
| H A D | testmgr.h | 67 const char *iv; member 107 const char *iv; member 9235 .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", 9247 .iv = "\x12\x34\x56\x78\x90\xab\xcd\xef", 9255 .iv = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", 9263 .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", 9271 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 9343 .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", 9411 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 9643 .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42", [all …]
|
| /linux/lib/crypto/ |
| H A D | aescfb.c | 42 int len, const u8 iv[AES_BLOCK_SIZE]) in aescfb_encrypt() 45 const u8 *v = iv; in aescfb_encrypt() 71 int len, const u8 iv[AES_BLOCK_SIZE]) in aescfb_decrypt() 75 aescfb_encrypt_block(ctx, ks[0], iv); in aescfb_decrypt() 112 u8 iv[AES_BLOCK_SIZE] __nonstring; member 121 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 145 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 170 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 193 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 206 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" [all …]
|
| H A D | chacha20poly1305.c | 37 u8 iv[CHACHA_IV_SIZE]; in xchacha_init() local 39 memset(iv, 0, 8); in xchacha_init() 40 memcpy(iv + 8, nonce + 16, 8); in xchacha_init() 48 chacha_init(chacha_state, k, iv); in xchacha_init() 51 memzero_explicit(iv, sizeof(iv)); in xchacha_init() 96 __le64 iv[2]; in chacha20poly1305_encrypt() local 100 iv[0] = 0; in chacha20poly1305_encrypt() 101 iv[1] = cpu_to_le64(nonce); in chacha20poly1305_encrypt() 103 chacha_init(&chacha_state, k, (u8 *)iv); in chacha20poly1305_encrypt() 107 memzero_explicit(iv, sizeof(iv)); in chacha20poly1305_encrypt() [all …]
|
| H A D | aesgcm.c | 149 const u8 iv[GCM_AES_IV_SIZE], u8 *authtag) in aesgcm_encrypt() 153 memcpy(ctr, iv, GCM_AES_IV_SIZE); in aesgcm_encrypt() 179 int assoc_len, const u8 iv[GCM_AES_IV_SIZE], in aesgcm_decrypt() 185 memcpy(ctr, iv, GCM_AES_IV_SIZE); in aesgcm_decrypt() 562 u8 iv[GCM_AES_IV_SIZE] __nonstring; member 584 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 594 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 619 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 641 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 653 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" [all …]
|
| /linux/arch/arm64/crypto/ |
| H A D | sm4-neon-glue.c | 24 u8 *iv, unsigned int nblocks); 26 u8 *iv, unsigned int nblocks); 88 const u8 *iv = walk.iv; in sm4_cbc_encrypt() local 93 crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 95 iv = dst; in sm4_cbc_encrypt() 100 if (iv != walk.iv) in sm4_cbc_encrypt() 101 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 128 walk.iv, nblocks); in sm4_cbc_decrypt() 156 walk.iv, nblocks); in sm4_ctr_crypt() 167 sm4_crypt_block(ctx->rkey_enc, keystream, walk.iv); in sm4_ctr_crypt() [all …]
|
| H A D | ghash-ce-glue.c | 295 static int gcm_encrypt(struct aead_request *req, char *iv, int assoclen) in gcm_encrypt() argument 313 put_unaligned_be32(2, iv + GCM_AES_IV_SIZE); in gcm_encrypt() 334 dg, iv, ctx->aes_key.key_enc, nrounds, in gcm_encrypt() 357 static int gcm_decrypt(struct aead_request *req, char *iv, int assoclen) in gcm_decrypt() argument 378 put_unaligned_be32(2, iv + GCM_AES_IV_SIZE); in gcm_decrypt() 404 dg, iv, ctx->aes_key.key_enc, in gcm_decrypt() 425 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_encrypt() local 427 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_encrypt() 428 return gcm_encrypt(req, iv, req->assoclen); in gcm_aes_encrypt() 433 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_decrypt() local [all …]
|
| H A D | aes-neonbs-glue.c | 35 int rounds, int blocks, u8 iv[]); 38 int rounds, int blocks, u8 iv[]); 41 int rounds, int blocks, u8 iv[]); 43 int rounds, int blocks, u8 iv[]); 49 int rounds, int blocks, u8 iv[]); 54 u32 const rk2[], u8 iv[], int first); 57 u32 const rk2[], u8 iv[], int first); 171 walk.iv); in cbc_encrypt() 196 walk.iv); in cbc_decrypt() 223 walk.iv); in ctr_encrypt() [all …]
|
| H A D | sm4-ce-ccm-glue.c | 24 u8 *iv, unsigned int nbytes, u8 *mac); 26 u8 *iv, unsigned int nbytes, u8 *mac); 27 asmlinkage void sm4_ce_ccm_final(const u32 *rkey_enc, u8 *iv, u8 *mac); 56 unsigned int l = req->iv[0] + 1; in ccm_format_input() 66 memset(&req->iv[SM4_BLOCK_SIZE - l], 0, l); in ccm_format_input() 68 memcpy(info, req->iv, SM4_BLOCK_SIZE); in ccm_format_input() 159 const u8 *src, u8 *iv, in ccm_crypt() argument 166 memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE); in ccm_crypt() 167 crypto_inc(walk->iv, SM4_BLOCK_SIZE); in ccm_crypt() 180 walk->src.virt.addr, walk->iv, in ccm_crypt()
|
| /linux/net/mac80211/ |
| H A D | wep.c | 32 static inline bool ieee80211_wep_weak_iv(u32 iv, int keylen) in ieee80211_wep_weak_iv() argument 39 if ((iv & 0xff00) == 0xff00) { in ieee80211_wep_weak_iv() 40 u8 B = (iv >> 16) & 0xff; in ieee80211_wep_weak_iv() 49 int keylen, int keyidx, u8 *iv) in ieee80211_wep_get_iv() argument 55 if (!iv) in ieee80211_wep_get_iv() 58 *iv++ = (local->wep_iv >> 16) & 0xff; in ieee80211_wep_get_iv() 59 *iv++ = (local->wep_iv >> 8) & 0xff; in ieee80211_wep_get_iv() 60 *iv++ = local->wep_iv & 0xff; in ieee80211_wep_get_iv() 61 *iv++ = keyidx << 6; in ieee80211_wep_get_iv() 136 u8 *iv; in ieee80211_wep_encrypt() local [all …]
|
| H A D | aes_gmac.c | 21 u8 *zero, *__aad, iv[AES_BLOCK_SIZE]; in ieee80211_aes_gmac() local 57 memcpy(iv, nonce, GMAC_NONCE_LEN); in ieee80211_aes_gmac() 58 memset(iv + GMAC_NONCE_LEN, 0, sizeof(iv) - GMAC_NONCE_LEN); in ieee80211_aes_gmac() 59 iv[AES_BLOCK_SIZE - 1] = 0x01; in ieee80211_aes_gmac() 62 aead_request_set_crypt(aead_req, sg, sg, 0, iv); in ieee80211_aes_gmac()
|
| /linux/net/qrtr/ |
| H A D | ns.c | 114 struct kvec iv; in service_announce_new() local 119 iv.iov_base = &pkt; in service_announce_new() 120 iv.iov_len = sizeof(pkt); in service_announce_new() 132 return kernel_sendmsg(qrtr_ns.sock, &msg, &iv, 1, sizeof(pkt)); in service_announce_new() 140 struct kvec iv; in service_announce_del() local 146 iv.iov_base = &pkt; in service_announce_del() 147 iv.iov_len = sizeof(pkt); in service_announce_del() 159 ret = kernel_sendmsg(qrtr_ns.sock, &msg, &iv, 1, sizeof(pkt)); in service_announce_del() 171 struct kvec iv; in lookup_notify() local 174 iv in lookup_notify() 303 struct kvec iv; say_hello() local 343 struct kvec iv; ctrl_cmd_bye() local 396 struct kvec iv; ctrl_cmd_del_client() local 597 struct kvec iv; qrtr_ns_worker() local [all...] |
| /linux/tools/testing/crypto/chacha20-s390/ |
| H A D | test-cipher.c | 54 u8 iv[16], key[32]; in test_lib_chacha() local 58 memset(iv, 'I', sizeof(iv)); in test_lib_chacha() 64 print_hex_dump(KERN_INFO, "iv: ", DUMP_PREFIX_OFFSET, in test_lib_chacha() 65 16, 1, iv, 16, 1); in test_lib_chacha() 69 chacha_init(&chacha_state, (u32 *)key, iv); in test_lib_chacha() 84 chacha_init(&chacha_state, (u32 *)key, iv); in test_lib_chacha() 131 u8 iv[16], key[32]; in test_skcipher() local 153 memset(iv, 'I', sizeof(iv)); in test_skcipher() [all...] |
| /linux/include/uapi/linux/ |
| H A D | tls.h | 128 unsigned char iv[TLS_CIPHER_AES_GCM_128_IV_SIZE]; 136 unsigned char iv[TLS_CIPHER_AES_GCM_256_IV_SIZE]; 144 unsigned char iv[TLS_CIPHER_AES_CCM_128_IV_SIZE]; 152 unsigned char iv[TLS_CIPHER_CHACHA20_POLY1305_IV_SIZE]; 160 unsigned char iv[TLS_CIPHER_SM4_GCM_IV_SIZE]; 168 unsigned char iv[TLS_CIPHER_SM4_CCM_IV_SIZE]; 176 unsigned char iv[TLS_CIPHER_ARIA_GCM_128_IV_SIZE]; 184 unsigned char iv[TLS_CIPHER_ARIA_GCM_256_IV_SIZE]; 127 unsigned char iv[TLS_CIPHER_AES_GCM_128_IV_SIZE]; global() member 135 unsigned char iv[TLS_CIPHER_AES_GCM_256_IV_SIZE]; global() member 143 unsigned char iv[TLS_CIPHER_AES_CCM_128_IV_SIZE]; global() member 151 unsigned char iv[TLS_CIPHER_CHACHA20_POLY1305_IV_SIZE]; global() member 159 unsigned char iv[TLS_CIPHER_SM4_GCM_IV_SIZE]; global() member 167 unsigned char iv[TLS_CIPHER_SM4_CCM_IV_SIZE]; global() member 175 unsigned char iv[TLS_CIPHER_ARIA_GCM_128_IV_SIZE]; global() member 183 unsigned char iv[TLS_CIPHER_ARIA_GCM_256_IV_SIZE]; global() member
|
| /linux/tools/testing/selftests/bpf/progs/ |
| H A D | linked_list_fail.c | 11 struct map_value *v, *v2, *iv, *iv2; \ 25 iv = bpf_map_lookup_elem(map, &(int){ 0 }); \ 26 if (!iv) \ 66 CHECK(inner_map, pop_front, &iv->head); 67 CHECK(inner_map, pop_back, &iv->head); 89 CHECK(inner_map, push_front, &iv->head, &f->node2); 90 CHECK(inner_map, push_back, &iv->head, &f->node2); 109 CHECK(kptr_inner_map, op, &f1->lock, &iv->head); \ 114 CHECK(global_inner_map, op, &glock, &iv->head); \ 119 CHECK(map_inner_map, op, &v->lock, &iv->head); \ [all …]
|
| /linux/io_uring/ |
| H A D | rsrc.h | 70 int io_prep_reg_iovec(struct io_kiocb *req, struct iou_vec *iv, 134 void io_vec_free(struct iou_vec *iv); 135 int io_vec_realloc(struct iou_vec *iv, unsigned nr_entries); in io_vec_reset_iovec() argument 137 static inline void io_vec_reset_iovec(struct iou_vec *iv, in io_vec_reset_iovec() 140 io_vec_free(iv); in io_vec_reset_iovec() 141 iv->iovec = iovec; in io_vec_reset_iovec() 142 iv->nr = nr; 145 static inline void io_alloc_cache_vec_kasan(struct iou_vec *iv) in io_alloc_cache_vec_kasan() 148 io_vec_free(iv); 143 io_alloc_cache_vec_kasan(struct iou_vec * iv) io_alloc_cache_vec_kasan() argument
|
| /linux/arch/riscv/crypto/ |
| H A D | aes-riscv64-glue.c | 40 u8 iv[AES_BLOCK_SIZE]); 43 u8 iv[AES_BLOCK_SIZE]); 47 const u8 iv[AES_BLOCK_SIZE], bool enc); 51 u8 iv[AES_BLOCK_SIZE]); 188 walk.iv); in riscv64_aes_cbc_crypt() 193 walk.iv); in riscv64_aes_cbc_crypt() 245 cbc_len, req->iv); in riscv64_aes_cbc_cts_crypt() 253 req->cryptlen - cbc_len, req->iv); in riscv64_aes_cbc_cts_crypt() 260 walk.nbytes, req->iv, enc); in riscv64_aes_cbc_cts_crypt() 287 ctr32 = get_unaligned_be32(req->iv + 12); in riscv64_aes_ctr_crypt() [all …]
|
| /linux/arch/x86/crypto/ |
| H A D | aria-avx.h | 22 u8 *keystream, u8 *iv); 29 u8 *keystream, u8 *iv); 37 u8 *keystream, u8 *iv); 44 u8 *keystream, u8 *iv); 50 u8 *keystream, u8 *iv); 54 u8 *keystream, u8 *iv); 58 u8 *keystream, u8 *iv);
|
| /linux/drivers/crypto/ |
| H A D | padlock-aes.c | 203 u8 *iv, struct cword *control_word, int count) in rep_xcrypt_cbc() argument 206 : "+S" (input), "+D" (output), "+a" (iv) in rep_xcrypt_cbc() 208 return iv; in rep_xcrypt_cbc() 226 u8 *iv, struct cword *cword, int count) in cbc_crypt_copy() argument 236 return rep_xcrypt_cbc(tmp, out, key, iv, cword, count); in cbc_crypt_copy() 254 u8 *iv, struct cword *cword, int count) in cbc_crypt() argument 258 return cbc_crypt_copy(in, out, key, iv, cword, count); in cbc_crypt() 260 return rep_xcrypt_cbc(in, out, key, iv, cword, count); in cbc_crypt() 286 u8 *iv, void *control_word, u32 count) in padlock_xcrypt_cbc() argument 291 return cbc_crypt(input, output, key, iv, control_word, count); in padlock_xcrypt_cbc() [all …]
|
| /linux/net/sunrpc/auth_gss/ |
| H A D | gss_krb5_crypto.c | 105 void * iv, in krb5_encrypt() argument 124 if (iv) in krb5_encrypt() 125 memcpy(local_iv, iv, crypto_sync_skcipher_ivsize(tfm)); in krb5_encrypt() 225 u8 iv[GSS_KRB5_MAX_BLOCKSIZE]; member 278 thislen, desc->iv); in encryptor() 302 u8 iv[GSS_KRB5_MAX_BLOCKSIZE]; member 335 thislen, desc->iv); in decryptor() 394 u32 offset, u8 *iv, struct page **pages, int encrypt) in gss_krb5_cts_crypt() argument 429 skcipher_request_set_crypt(req, sg, sg, len, iv); in gss_krb5_cts_crypt() 450 memcpy(iv, data, crypto_sync_skcipher_ivsize(cipher)); in gss_krb5_cts_crypt() [all …]
|
| /linux/drivers/ssb/ |
| H A D | host_soc.c | 177 struct ssb_init_invariants *iv) in ssb_host_soc_get_invariants() argument 183 memset(&iv->boardinfo, 0, sizeof(struct ssb_boardinfo)); in ssb_host_soc_get_invariants() 187 err = kstrtou16(strim(buf), 0, &iv->boardinfo.vendor); in ssb_host_soc_get_invariants() 192 if (!iv->boardinfo.vendor) in ssb_host_soc_get_invariants() 193 iv->boardinfo.vendor = SSB_BOARDVENDOR_BCM; in ssb_host_soc_get_invariants() 197 err = kstrtou16(strim(buf), 0, &iv->boardinfo.type); in ssb_host_soc_get_invariants() 203 memset(&iv->sprom, 0, sizeof(struct ssb_sprom)); in ssb_host_soc_get_invariants() 204 ssb_fill_sprom_with_fallback(bus, &iv->sprom); in ssb_host_soc_get_invariants() 207 iv->has_cardbus_slot = !!simple_strtoul(buf, NULL, 10); in ssb_host_soc_get_invariants()
|
| /linux/net/rxrpc/ |
| H A D | rxkad.c | 197 struct rxrpc_crypt iv; in rxkad_prime_packet_security() 217 memcpy(&iv, token->kad->session_key, sizeof(iv)); in rxkad_prime_packet_security() 227 skcipher_request_set_crypt(req, &sg, &sg, tmpsize, iv.x); in rxkad_prime_packet_security() 263 struct rxrpc_crypt iv; in rxkad_secure_packet_auth() 283 memset(&iv, 0, sizeof(iv)); in rxkad_secure_packet_auth() 288 skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x); in rxkad_secure_packet_auth() 305 struct rxrpc_crypt iv; in rxkad_secure_packet_encrypt() 326 memcpy(&iv, toke in rxkad_secure_packet_encrypt() 195 struct rxrpc_crypt iv; rxkad_prime_packet_security() local 261 struct rxrpc_crypt iv; rxkad_secure_packet_auth() local 303 struct rxrpc_crypt iv; rxkad_secure_packet_encrypt() local 341 struct rxrpc_crypt iv; rxkad_secure_packet() local 428 struct rxrpc_crypt iv; rxkad_verify_packet_1() local 492 struct rxrpc_crypt iv; rxkad_verify_packet_2() local 570 struct rxrpc_crypt iv; rxkad_verify_packet() local 779 struct rxrpc_crypt iv; rxkad_encrypt_response() local 882 struct rxrpc_crypt iv, key; rxkad_decrypt_ticket() local 1008 struct rxrpc_crypt iv; rxkad_decrypt_response() local [all...] |
| /linux/block/ |
| H A D | t10-pi.c | 136 struct bio_vec iv; in t10_pi_type1_prepare() local 143 bip_for_each_vec(iv, bip, iter) { in t10_pi_type1_prepare() 147 p = bvec_kmap_local(&iv); in t10_pi_type1_prepare() 148 for (j = 0; j < iv.bv_len; j += tuple_sz) { in t10_pi_type1_prepare() 188 struct bio_vec iv; in t10_pi_type1_complete() local 191 bip_for_each_vec(iv, bip, iter) { in t10_pi_type1_complete() 195 p = bvec_kmap_local(&iv); in t10_pi_type1_complete() 196 for (j = 0; j < iv.bv_len && intervals; j += tuple_sz) { in t10_pi_type1_complete() 310 struct bio_vec iv; in ext_pi_type1_prepare() local 317 bip_for_each_vec(iv, bip, iter) { in ext_pi_type1_prepare() [all …]
|
| /linux/arch/powerpc/crypto/ |
| H A D | aes-spe-glue.c | 61 u32 bytes, u8 *iv); 63 u32 bytes, u8 *iv); 65 u32 bytes, u8 *iv); 67 u32 bytes, u8 *iv, u32 *key_twk); 69 u32 bytes, u8 *iv, u32 *key_twk); 237 walk.iv); in ppc_cbc_crypt() 241 walk.iv); in ppc_cbc_crypt() 277 ctx->key_enc, ctx->rounds, nbytes, walk.iv); in ppc_ctr_crypt() 306 walk.iv, twk); in ppc_xts_crypt() 310 walk.iv, twk); in ppc_xts_crypt() [all …]
|