Lines Matching refs:ctx_p
66 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_key_type() local
68 return ctx_p->key_type; in cc_key_type()
71 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) in validate_keys_sizes() argument
73 switch (ctx_p->flow_mode) { in validate_keys_sizes()
78 if (ctx_p->cipher_mode != DRV_CIPHER_XTS) in validate_keys_sizes()
85 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in validate_keys_sizes()
86 ctx_p->cipher_mode == DRV_CIPHER_ESSIV) in validate_keys_sizes()
107 static int validate_data_size(struct cc_cipher_ctx *ctx_p, in validate_data_size() argument
110 switch (ctx_p->flow_mode) { in validate_data_size()
112 switch (ctx_p->cipher_mode) { in validate_data_size()
136 switch (ctx_p->cipher_mode) { in validate_data_size()
156 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_init() local
164 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, in cc_cipher_init()
167 ctx_p->cipher_mode = cc_alg->cipher_mode; in cc_cipher_init()
168 ctx_p->flow_mode = cc_alg->flow_mode; in cc_cipher_init()
169 ctx_p->drvdata = cc_alg->drvdata; in cc_cipher_init()
171 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_init()
175 ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0); in cc_cipher_init()
176 if (IS_ERR(ctx_p->shash_tfm)) { in cc_cipher_init()
178 return PTR_ERR(ctx_p->shash_tfm); in cc_cipher_init()
183 ctx_p->fallback_tfm = in cc_cipher_init()
186 if (IS_ERR(ctx_p->fallback_tfm)) { in cc_cipher_init()
192 ctx_p->fallback_tfm = NULL; in cc_cipher_init()
194 fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm); in cc_cipher_init()
202 ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL); in cc_cipher_init()
203 if (!ctx_p->user.key) in cc_cipher_init()
207 ctx_p->user.key); in cc_cipher_init()
210 ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key, in cc_cipher_init()
213 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { in cc_cipher_init()
215 max_key_buf_size, ctx_p->user.key); in cc_cipher_init()
219 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); in cc_cipher_init()
224 kfree(ctx_p->user.key); in cc_cipher_init()
226 crypto_free_skcipher(ctx_p->fallback_tfm); in cc_cipher_init()
227 crypto_free_shash(ctx_p->shash_tfm); in cc_cipher_init()
239 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_exit() local
240 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_exit()
245 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_exit()
247 crypto_free_shash(ctx_p->shash_tfm); in cc_cipher_exit()
248 ctx_p->shash_tfm = NULL; in cc_cipher_exit()
249 crypto_free_skcipher(ctx_p->fallback_tfm); in cc_cipher_exit()
250 ctx_p->fallback_tfm = NULL; in cc_cipher_exit()
254 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, in cc_cipher_exit()
257 &ctx_p->user.key_dma_addr); in cc_cipher_exit()
260 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); in cc_cipher_exit()
261 kfree_sensitive(ctx_p->user.key); in cc_cipher_exit()
305 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_sethkey() local
306 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_sethkey()
310 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_sethkey()
328 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_sethkey()
333 ctx_p->keylen = keylen; in cc_cipher_sethkey()
334 ctx_p->fallback_on = false; in cc_cipher_sethkey()
338 if (ctx_p->flow_mode == S_DIN_to_SM4) { in cc_cipher_sethkey()
343 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); in cc_cipher_sethkey()
344 if (ctx_p->hw.key1_slot == END_OF_KEYS) { in cc_cipher_sethkey()
350 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in cc_cipher_sethkey()
351 ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_sethkey()
358 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); in cc_cipher_sethkey()
359 if (ctx_p->hw.key2_slot == END_OF_KEYS) { in cc_cipher_sethkey()
366 ctx_p->key_type = CC_HW_PROTECTED_KEY; in cc_cipher_sethkey()
368 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot); in cc_cipher_sethkey()
372 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) { in cc_cipher_sethkey()
377 if (ctx_p->cipher_mode != DRV_CIPHER_CBC && in cc_cipher_sethkey()
378 ctx_p->cipher_mode != DRV_CIPHER_CTR) { in cc_cipher_sethkey()
383 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1); in cc_cipher_sethkey()
384 if (ctx_p->flow_mode == S_DIN_to_AES) in cc_cipher_sethkey()
385 ctx_p->cpp.alg = CC_CPP_AES; in cc_cipher_sethkey()
387 ctx_p->cpp.alg = CC_CPP_SM4; in cc_cipher_sethkey()
388 ctx_p->key_type = CC_POLICY_PROTECTED_KEY; in cc_cipher_sethkey()
390 ctx_p->cpp.alg, ctx_p->cpp.slot); in cc_cipher_sethkey()
405 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_setkey() local
406 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_setkey()
413 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_setkey()
418 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_setkey()
423 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_setkey()
429 if (likely(ctx_p->fallback_tfm)) { in cc_cipher_setkey()
430 ctx_p->fallback_on = true; in cc_cipher_setkey()
431 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, in cc_cipher_setkey()
433 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags); in cc_cipher_setkey()
434 return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen); in cc_cipher_setkey()
445 ctx_p->fallback_on = false; in cc_cipher_setkey()
446 ctx_p->key_type = CC_UNPROTECTED_KEY; in cc_cipher_setkey()
453 if (ctx_p->flow_mode == S_DIN_to_DES) { in cc_cipher_setkey()
462 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && in cc_cipher_setkey()
469 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
472 memcpy(ctx_p->user.key, key, keylen); in cc_cipher_setkey()
474 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_setkey()
478 err = crypto_shash_tfm_digest(ctx_p->shash_tfm, in cc_cipher_setkey()
479 ctx_p->user.key, keylen, in cc_cipher_setkey()
480 ctx_p->user.key + keylen); in cc_cipher_setkey()
488 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
490 ctx_p->keylen = keylen; in cc_cipher_setkey()
496 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p) in cc_out_setup_mode() argument
498 switch (ctx_p->flow_mode) { in cc_out_setup_mode()
506 return ctx_p->flow_mode; in cc_out_setup_mode()
515 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_readiv_desc() local
516 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_readiv_desc()
517 int cipher_mode = ctx_p->cipher_mode; in cc_setup_readiv_desc()
518 int flow_mode = cc_out_setup_mode(ctx_p); in cc_setup_readiv_desc()
522 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) in cc_setup_readiv_desc()
544 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_readiv_desc()
557 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_readiv_desc()
572 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_state_desc() local
573 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_state_desc()
574 int cipher_mode = ctx_p->cipher_mode; in cc_setup_state_desc()
575 int flow_mode = ctx_p->flow_mode; in cc_setup_state_desc()
616 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_xex_state_desc() local
617 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_xex_state_desc()
618 int cipher_mode = ctx_p->cipher_mode; in cc_setup_xex_state_desc()
619 int flow_mode = ctx_p->flow_mode; in cc_setup_xex_state_desc()
621 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; in cc_setup_xex_state_desc()
622 unsigned int key_len = (ctx_p->keylen / 2); in cc_setup_xex_state_desc()
646 ctx_p->hw.key2_slot); in cc_setup_xex_state_desc()
674 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p) in cc_out_flow_mode() argument
676 switch (ctx_p->flow_mode) { in cc_out_flow_mode()
684 return ctx_p->flow_mode; in cc_out_flow_mode()
693 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_key_desc() local
694 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_key_desc()
695 int cipher_mode = ctx_p->cipher_mode; in cc_setup_key_desc()
696 int flow_mode = ctx_p->flow_mode; in cc_setup_key_desc()
698 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; in cc_setup_key_desc()
699 unsigned int key_len = ctx_p->keylen; in cc_setup_key_desc()
716 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot); in cc_setup_key_desc()
717 flow_mode = cc_out_flow_mode(ctx_p); in cc_setup_key_desc()
722 ctx_p->hw.key1_slot); in cc_setup_key_desc()
755 ctx_p->hw.key1_slot); in cc_setup_key_desc()
776 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_mlli_desc() local
777 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_mlli_desc()
784 ctx_p->drvdata->mlli_sram_addr); in cc_setup_mlli_desc()
790 ctx_p->drvdata->mlli_sram_addr, in cc_setup_mlli_desc()
803 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_flow_desc() local
804 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_flow_desc()
805 unsigned int flow_mode = cc_out_flow_mode(ctx_p); in cc_setup_flow_desc()
806 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY || in cc_setup_flow_desc()
807 ctx_p->cipher_mode == DRV_CIPHER_ECB); in cc_setup_flow_desc()
821 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_flow_desc()
828 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
832 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
833 ctx_p->drvdata->mlli_sram_addr); in cc_setup_flow_desc()
835 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
840 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
841 ctx_p->drvdata->mlli_sram_addr + in cc_setup_flow_desc()
844 (ctx_p->drvdata->mlli_sram_addr + in cc_setup_flow_desc()
851 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_flow_desc()
888 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_process() local
889 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_process()
902 if (validate_data_size(ctx_p, nbytes)) { in cc_cipher_process()
913 if (ctx_p->fallback_on) { in cc_cipher_process()
917 skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm); in cc_cipher_process()
938 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) { in cc_cipher_process()
940 cc_req.cpp.alg = ctx_p->cpp.alg; in cc_cipher_process()
941 cc_req.cpp.slot = ctx_p->cpp.slot; in cc_cipher_process()
949 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, in cc_cipher_process()
973 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, in cc_cipher_process()