1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef ASM_X86_SERPENT_SSE2_H
3 #define ASM_X86_SERPENT_SSE2_H
4
5 #include <linux/crypto.h>
6 #include <crypto/serpent.h>
7
8 #ifdef CONFIG_X86_32
9
10 #define SERPENT_PARALLEL_BLOCKS 4
11
12 asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst,
13 const u8 *src, bool xor);
14 asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst,
15 const u8 *src);
16
serpent_enc_blk_xway(struct serpent_ctx * ctx,u8 * dst,const u8 * src)17 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst,
18 const u8 *src)
19 {
20 __serpent_enc_blk_4way(ctx, dst, src, false);
21 }
22
serpent_enc_blk_xway_xor(struct serpent_ctx * ctx,u8 * dst,const u8 * src)23 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst,
24 const u8 *src)
25 {
26 __serpent_enc_blk_4way(ctx, dst, src, true);
27 }
28
serpent_dec_blk_xway(struct serpent_ctx * ctx,u8 * dst,const u8 * src)29 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst,
30 const u8 *src)
31 {
32 serpent_dec_blk_4way(ctx, dst, src);
33 }
34
35 #else
36
37 #define SERPENT_PARALLEL_BLOCKS 8
38
39 asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst,
40 const u8 *src, bool xor);
41 asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst,
42 const u8 *src);
43
serpent_enc_blk_xway(struct serpent_ctx * ctx,u8 * dst,const u8 * src)44 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst,
45 const u8 *src)
46 {
47 __serpent_enc_blk_8way(ctx, dst, src, false);
48 }
49
serpent_enc_blk_xway_xor(struct serpent_ctx * ctx,u8 * dst,const u8 * src)50 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst,
51 const u8 *src)
52 {
53 __serpent_enc_blk_8way(ctx, dst, src, true);
54 }
55
serpent_dec_blk_xway(struct serpent_ctx * ctx,u8 * dst,const u8 * src)56 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst,
57 const u8 *src)
58 {
59 serpent_dec_blk_8way(ctx, dst, src);
60 }
61
62 #endif
63
64 #endif
65