Lines Matching refs:ctx_p
53 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_is_hw_key() local
55 return ctx_p->hw_key; in cc_is_hw_key()
58 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) in validate_keys_sizes() argument
60 switch (ctx_p->flow_mode) { in validate_keys_sizes()
65 if (ctx_p->cipher_mode != DRV_CIPHER_XTS && in validate_keys_sizes()
66 ctx_p->cipher_mode != DRV_CIPHER_ESSIV && in validate_keys_sizes()
67 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) in validate_keys_sizes()
74 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in validate_keys_sizes()
75 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || in validate_keys_sizes()
76 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) in validate_keys_sizes()
92 static int validate_data_size(struct cc_cipher_ctx *ctx_p, in validate_data_size() argument
95 switch (ctx_p->flow_mode) { in validate_data_size()
97 switch (ctx_p->cipher_mode) { in validate_data_size()
133 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_init() local
141 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, in cc_cipher_init()
147 ctx_p->cipher_mode = cc_alg->cipher_mode; in cc_cipher_init()
148 ctx_p->flow_mode = cc_alg->flow_mode; in cc_cipher_init()
149 ctx_p->drvdata = cc_alg->drvdata; in cc_cipher_init()
152 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); in cc_cipher_init()
153 if (!ctx_p->user.key) in cc_cipher_init()
157 ctx_p->user.key); in cc_cipher_init()
160 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, in cc_cipher_init()
163 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { in cc_cipher_init()
165 max_key_buf_size, ctx_p->user.key); in cc_cipher_init()
169 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); in cc_cipher_init()
171 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_init()
173 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); in cc_cipher_init()
174 if (IS_ERR(ctx_p->shash_tfm)) { in cc_cipher_init()
176 return PTR_ERR(ctx_p->shash_tfm); in cc_cipher_init()
190 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_exit() local
191 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_exit()
196 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_exit()
198 crypto_free_shash(ctx_p->shash_tfm); in cc_cipher_exit()
199 ctx_p->shash_tfm = NULL; in cc_cipher_exit()
203 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, in cc_cipher_exit()
206 &ctx_p->user.key_dma_addr); in cc_cipher_exit()
209 kzfree(ctx_p->user.key); in cc_cipher_exit()
210 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); in cc_cipher_exit()
238 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_sethkey() local
239 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_sethkey()
243 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_sethkey()
255 if (ctx_p->flow_mode != S_DIN_to_AES) { in cc_cipher_sethkey()
267 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_sethkey()
273 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); in cc_cipher_sethkey()
274 if (ctx_p->hw.key1_slot == END_OF_KEYS) { in cc_cipher_sethkey()
279 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in cc_cipher_sethkey()
280 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || in cc_cipher_sethkey()
281 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { in cc_cipher_sethkey()
287 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); in cc_cipher_sethkey()
288 if (ctx_p->hw.key2_slot == END_OF_KEYS) { in cc_cipher_sethkey()
295 ctx_p->keylen = keylen; in cc_cipher_sethkey()
296 ctx_p->hw_key = true; in cc_cipher_sethkey()
306 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_setkey() local
307 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_setkey()
315 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_setkey()
320 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_setkey()
326 ctx_p->hw_key = false; in cc_cipher_setkey()
333 if (ctx_p->flow_mode == S_DIN_to_DES) { in cc_cipher_setkey()
347 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && in cc_cipher_setkey()
354 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
357 memcpy(ctx_p->user.key, key, keylen); in cc_cipher_setkey()
359 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); in cc_cipher_setkey()
361 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_setkey()
366 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); in cc_cipher_setkey()
368 desc->tfm = ctx_p->shash_tfm; in cc_cipher_setkey()
370 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, in cc_cipher_setkey()
371 ctx_p->user.key + key_len); in cc_cipher_setkey()
377 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
379 ctx_p->keylen = keylen; in cc_cipher_setkey()
391 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_cipher_desc() local
392 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_cipher_desc()
393 int cipher_mode = ctx_p->cipher_mode; in cc_setup_cipher_desc()
394 int flow_mode = ctx_p->flow_mode; in cc_setup_cipher_desc()
396 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; in cc_setup_cipher_desc()
397 unsigned int key_len = ctx_p->keylen; in cc_setup_cipher_desc()
436 ctx_p->hw.key1_slot); in cc_setup_cipher_desc()
463 ctx_p->hw.key1_slot); in cc_setup_cipher_desc()
479 ctx_p->hw.key2_slot); in cc_setup_cipher_desc()
514 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_cipher_data() local
515 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_cipher_data()
516 unsigned int flow_mode = ctx_p->flow_mode; in cc_setup_cipher_data()
518 switch (ctx_p->flow_mode) { in cc_setup_cipher_data()
541 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_cipher_data()
550 (unsigned int)ctx_p->drvdata->mlli_sram_addr); in cc_setup_cipher_data()
556 ctx_p->drvdata->mlli_sram_addr, in cc_setup_cipher_data()
563 ctx_p->drvdata->mlli_sram_addr, in cc_setup_cipher_data()
567 (unsigned int)ctx_p->drvdata->mlli_sram_addr, in cc_setup_cipher_data()
568 (unsigned int)ctx_p->drvdata->mlli_sram_addr); in cc_setup_cipher_data()
570 ctx_p->drvdata->mlli_sram_addr, in cc_setup_cipher_data()
575 (unsigned int)ctx_p->drvdata->mlli_sram_addr, in cc_setup_cipher_data()
576 (unsigned int)ctx_p->drvdata->mlli_sram_addr + in cc_setup_cipher_data()
579 (ctx_p->drvdata->mlli_sram_addr + in cc_setup_cipher_data()
586 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_cipher_data()
633 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_complete() local
637 switch (ctx_p->cipher_mode) { in cc_cipher_complete()
683 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_process() local
684 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_process()
698 if (validate_data_size(ctx_p, nbytes)) { in cc_cipher_process()
728 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, in cc_cipher_process()
745 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, in cc_cipher_process()
776 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_decrypt() local
784 if (ctx_p->cipher_mode == DRV_CIPHER_CBC) { in cc_cipher_decrypt()