Lines Matching refs:ctx_p

66 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);  in cc_key_type()  local
68 return ctx_p->key_type; in cc_key_type()
71 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) in validate_keys_sizes() argument
73 switch (ctx_p->flow_mode) { in validate_keys_sizes()
78 if (ctx_p->cipher_mode != DRV_CIPHER_XTS) in validate_keys_sizes()
85 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in validate_keys_sizes()
86 ctx_p->cipher_mode == DRV_CIPHER_ESSIV) in validate_keys_sizes()
107 static int validate_data_size(struct cc_cipher_ctx *ctx_p, in validate_data_size() argument
110 switch (ctx_p->flow_mode) { in validate_data_size()
112 switch (ctx_p->cipher_mode) { in validate_data_size()
136 switch (ctx_p->cipher_mode) { in validate_data_size()
156 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_init() local
164 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, in cc_cipher_init()
167 ctx_p->cipher_mode = cc_alg->cipher_mode; in cc_cipher_init()
168 ctx_p->flow_mode = cc_alg->flow_mode; in cc_cipher_init()
169 ctx_p->drvdata = cc_alg->drvdata; in cc_cipher_init()
171 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_init()
175 ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0); in cc_cipher_init()
176 if (IS_ERR(ctx_p->shash_tfm)) { in cc_cipher_init()
178 return PTR_ERR(ctx_p->shash_tfm); in cc_cipher_init()
183 ctx_p->fallback_tfm = in cc_cipher_init()
186 if (IS_ERR(ctx_p->fallback_tfm)) { in cc_cipher_init()
192 ctx_p->fallback_tfm = NULL; in cc_cipher_init()
194 fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm); in cc_cipher_init()
202 ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL); in cc_cipher_init()
203 if (!ctx_p->user.key) in cc_cipher_init()
207 ctx_p->user.key); in cc_cipher_init()
210 ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key, in cc_cipher_init()
213 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { in cc_cipher_init()
215 max_key_buf_size, ctx_p->user.key); in cc_cipher_init()
219 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); in cc_cipher_init()
224 kfree(ctx_p->user.key); in cc_cipher_init()
226 crypto_free_skcipher(ctx_p->fallback_tfm); in cc_cipher_init()
227 crypto_free_shash(ctx_p->shash_tfm); in cc_cipher_init()
239 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_exit() local
240 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_exit()
245 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_exit()
247 crypto_free_shash(ctx_p->shash_tfm); in cc_cipher_exit()
248 ctx_p->shash_tfm = NULL; in cc_cipher_exit()
249 crypto_free_skcipher(ctx_p->fallback_tfm); in cc_cipher_exit()
250 ctx_p->fallback_tfm = NULL; in cc_cipher_exit()
254 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, in cc_cipher_exit()
257 &ctx_p->user.key_dma_addr); in cc_cipher_exit()
260 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); in cc_cipher_exit()
261 kfree_sensitive(ctx_p->user.key); in cc_cipher_exit()
299 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_sethkey() local
300 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_sethkey()
304 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_sethkey()
322 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_sethkey()
327 ctx_p->keylen = keylen; in cc_cipher_sethkey()
328 ctx_p->fallback_on = false; in cc_cipher_sethkey()
332 if (ctx_p->flow_mode == S_DIN_to_SM4) { in cc_cipher_sethkey()
337 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); in cc_cipher_sethkey()
338 if (ctx_p->hw.key1_slot == END_OF_KEYS) { in cc_cipher_sethkey()
344 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in cc_cipher_sethkey()
345 ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_sethkey()
352 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); in cc_cipher_sethkey()
353 if (ctx_p->hw.key2_slot == END_OF_KEYS) { in cc_cipher_sethkey()
360 ctx_p->key_type = CC_HW_PROTECTED_KEY; in cc_cipher_sethkey()
362 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot); in cc_cipher_sethkey()
366 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) { in cc_cipher_sethkey()
371 if (ctx_p->cipher_mode != DRV_CIPHER_CBC && in cc_cipher_sethkey()
372 ctx_p->cipher_mode != DRV_CIPHER_CTR) { in cc_cipher_sethkey()
377 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1); in cc_cipher_sethkey()
378 if (ctx_p->flow_mode == S_DIN_to_AES) in cc_cipher_sethkey()
379 ctx_p->cpp.alg = CC_CPP_AES; in cc_cipher_sethkey()
381 ctx_p->cpp.alg = CC_CPP_SM4; in cc_cipher_sethkey()
382 ctx_p->key_type = CC_POLICY_PROTECTED_KEY; in cc_cipher_sethkey()
384 ctx_p->cpp.alg, ctx_p->cpp.slot); in cc_cipher_sethkey()
399 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_setkey() local
400 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_setkey()
407 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_setkey()
412 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_setkey()
417 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_setkey()
423 if (likely(ctx_p->fallback_tfm)) { in cc_cipher_setkey()
424 ctx_p->fallback_on = true; in cc_cipher_setkey()
425 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, in cc_cipher_setkey()
427 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags); in cc_cipher_setkey()
428 return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen); in cc_cipher_setkey()
439 ctx_p->fallback_on = false; in cc_cipher_setkey()
440 ctx_p->key_type = CC_UNPROTECTED_KEY; in cc_cipher_setkey()
447 if (ctx_p->flow_mode == S_DIN_to_DES) { in cc_cipher_setkey()
456 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && in cc_cipher_setkey()
463 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
466 memcpy(ctx_p->user.key, key, keylen); in cc_cipher_setkey()
468 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_setkey()
472 err = crypto_shash_tfm_digest(ctx_p->shash_tfm, in cc_cipher_setkey()
473 ctx_p->user.key, keylen, in cc_cipher_setkey()
474 ctx_p->user.key + keylen); in cc_cipher_setkey()
482 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
484 ctx_p->keylen = keylen; in cc_cipher_setkey()
490 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p) in cc_out_setup_mode() argument
492 switch (ctx_p->flow_mode) { in cc_out_setup_mode()
500 return ctx_p->flow_mode; in cc_out_setup_mode()
509 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_readiv_desc() local
510 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_readiv_desc()
511 int cipher_mode = ctx_p->cipher_mode; in cc_setup_readiv_desc()
512 int flow_mode = cc_out_setup_mode(ctx_p); in cc_setup_readiv_desc()
516 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) in cc_setup_readiv_desc()
538 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_readiv_desc()
551 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_readiv_desc()
566 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_state_desc() local
567 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_state_desc()
568 int cipher_mode = ctx_p->cipher_mode; in cc_setup_state_desc()
569 int flow_mode = ctx_p->flow_mode; in cc_setup_state_desc()
610 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_xex_state_desc() local
611 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_xex_state_desc()
612 int cipher_mode = ctx_p->cipher_mode; in cc_setup_xex_state_desc()
613 int flow_mode = ctx_p->flow_mode; in cc_setup_xex_state_desc()
615 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; in cc_setup_xex_state_desc()
616 unsigned int key_len = (ctx_p->keylen / 2); in cc_setup_xex_state_desc()
640 ctx_p->hw.key2_slot); in cc_setup_xex_state_desc()
668 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p) in cc_out_flow_mode() argument
670 switch (ctx_p->flow_mode) { in cc_out_flow_mode()
678 return ctx_p->flow_mode; in cc_out_flow_mode()
687 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_key_desc() local
688 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_key_desc()
689 int cipher_mode = ctx_p->cipher_mode; in cc_setup_key_desc()
690 int flow_mode = ctx_p->flow_mode; in cc_setup_key_desc()
692 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; in cc_setup_key_desc()
693 unsigned int key_len = ctx_p->keylen; in cc_setup_key_desc()
710 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot); in cc_setup_key_desc()
711 flow_mode = cc_out_flow_mode(ctx_p); in cc_setup_key_desc()
716 ctx_p->hw.key1_slot); in cc_setup_key_desc()
749 ctx_p->hw.key1_slot); in cc_setup_key_desc()
770 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_mlli_desc() local
771 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_mlli_desc()
778 ctx_p->drvdata->mlli_sram_addr); in cc_setup_mlli_desc()
784 ctx_p->drvdata->mlli_sram_addr, in cc_setup_mlli_desc()
797 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_flow_desc() local
798 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_flow_desc()
799 unsigned int flow_mode = cc_out_flow_mode(ctx_p); in cc_setup_flow_desc()
800 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY || in cc_setup_flow_desc()
801 ctx_p->cipher_mode == DRV_CIPHER_ECB); in cc_setup_flow_desc()
815 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_flow_desc()
822 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
826 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
827 ctx_p->drvdata->mlli_sram_addr); in cc_setup_flow_desc()
829 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
834 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
835 ctx_p->drvdata->mlli_sram_addr + in cc_setup_flow_desc()
838 (ctx_p->drvdata->mlli_sram_addr + in cc_setup_flow_desc()
845 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_flow_desc()
882 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_process() local
883 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_process()
896 if (validate_data_size(ctx_p, nbytes)) { in cc_cipher_process()
907 if (ctx_p->fallback_on) { in cc_cipher_process()
911 skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm); in cc_cipher_process()
932 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) { in cc_cipher_process()
934 cc_req.cpp.alg = ctx_p->cpp.alg; in cc_cipher_process()
935 cc_req.cpp.slot = ctx_p->cpp.slot; in cc_cipher_process()
943 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, in cc_cipher_process()
967 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, in cc_cipher_process()