xref: /linux/drivers/crypto/intel/qat/qat_common/icp_qat_hw.h (revision 5e3992fe72748ed3892be876f09d4d990548b7af)
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only) */
2 /* Copyright(c) 2014 - 2020 Intel Corporation */
3 #ifndef _ICP_QAT_HW_H_
4 #define _ICP_QAT_HW_H_
5 
6 enum icp_qat_hw_ae_id {
7 	ICP_QAT_HW_AE_0 = 0,
8 	ICP_QAT_HW_AE_1 = 1,
9 	ICP_QAT_HW_AE_2 = 2,
10 	ICP_QAT_HW_AE_3 = 3,
11 	ICP_QAT_HW_AE_4 = 4,
12 	ICP_QAT_HW_AE_5 = 5,
13 	ICP_QAT_HW_AE_6 = 6,
14 	ICP_QAT_HW_AE_7 = 7,
15 	ICP_QAT_HW_AE_8 = 8,
16 	ICP_QAT_HW_AE_9 = 9,
17 	ICP_QAT_HW_AE_10 = 10,
18 	ICP_QAT_HW_AE_11 = 11,
19 	ICP_QAT_HW_AE_DELIMITER = 12
20 };
21 
22 enum icp_qat_hw_qat_id {
23 	ICP_QAT_HW_QAT_0 = 0,
24 	ICP_QAT_HW_QAT_1 = 1,
25 	ICP_QAT_HW_QAT_2 = 2,
26 	ICP_QAT_HW_QAT_3 = 3,
27 	ICP_QAT_HW_QAT_4 = 4,
28 	ICP_QAT_HW_QAT_5 = 5,
29 	ICP_QAT_HW_QAT_DELIMITER = 6
30 };
31 
32 enum icp_qat_hw_auth_algo {
33 	ICP_QAT_HW_AUTH_ALGO_NULL = 0,
34 	ICP_QAT_HW_AUTH_ALGO_SHA1 = 1,
35 	ICP_QAT_HW_AUTH_ALGO_MD5 = 2,
36 	ICP_QAT_HW_AUTH_ALGO_SHA224 = 3,
37 	ICP_QAT_HW_AUTH_ALGO_SHA256 = 4,
38 	ICP_QAT_HW_AUTH_ALGO_SHA384 = 5,
39 	ICP_QAT_HW_AUTH_ALGO_SHA512 = 6,
40 	ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC = 7,
41 	ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC = 8,
42 	ICP_QAT_HW_AUTH_ALGO_AES_F9 = 9,
43 	ICP_QAT_HW_AUTH_ALGO_GALOIS_128 = 10,
44 	ICP_QAT_HW_AUTH_ALGO_GALOIS_64 = 11,
45 	ICP_QAT_HW_AUTH_ALGO_KASUMI_F9 = 12,
46 	ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 = 13,
47 	ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 = 14,
48 	ICP_QAT_HW_AUTH_RESERVED_1 = 15,
49 	ICP_QAT_HW_AUTH_RESERVED_2 = 16,
50 	ICP_QAT_HW_AUTH_ALGO_SHA3_256 = 17,
51 	ICP_QAT_HW_AUTH_RESERVED_3 = 18,
52 	ICP_QAT_HW_AUTH_ALGO_SHA3_512 = 19,
53 	ICP_QAT_HW_AUTH_ALGO_DELIMITER = 20
54 };
55 
56 enum icp_qat_hw_auth_mode {
57 	ICP_QAT_HW_AUTH_MODE0 = 0,
58 	ICP_QAT_HW_AUTH_MODE1 = 1,
59 	ICP_QAT_HW_AUTH_MODE2 = 2,
60 	ICP_QAT_HW_AUTH_MODE_DELIMITER = 3
61 };
62 
63 struct icp_qat_hw_auth_config {
64 	__u32 config;
65 	__u32 reserved;
66 };
67 
68 struct icp_qat_hw_ucs_cipher_config {
69 	__u32 val;
70 	__u32 reserved[3];
71 };
72 
73 enum icp_qat_slice_mask {
74 	ICP_ACCEL_MASK_CIPHER_SLICE = BIT(0),
75 	ICP_ACCEL_MASK_AUTH_SLICE = BIT(1),
76 	ICP_ACCEL_MASK_PKE_SLICE = BIT(2),
77 	ICP_ACCEL_MASK_COMPRESS_SLICE = BIT(3),
78 	ICP_ACCEL_MASK_LZS_SLICE = BIT(4),
79 	ICP_ACCEL_MASK_EIA3_SLICE = BIT(5),
80 	ICP_ACCEL_MASK_SHA3_SLICE = BIT(6),
81 };
82 
83 enum icp_qat_capabilities_mask {
84 	ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC = BIT(0),
85 	ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC = BIT(1),
86 	ICP_ACCEL_CAPABILITIES_CIPHER = BIT(2),
87 	ICP_ACCEL_CAPABILITIES_AUTHENTICATION = BIT(3),
88 	ICP_ACCEL_CAPABILITIES_RESERVED_1 = BIT(4),
89 	ICP_ACCEL_CAPABILITIES_COMPRESSION = BIT(5),
90 	ICP_ACCEL_CAPABILITIES_LZS_COMPRESSION = BIT(6),
91 	ICP_ACCEL_CAPABILITIES_RAND = BIT(7),
92 	ICP_ACCEL_CAPABILITIES_ZUC = BIT(8),
93 	ICP_ACCEL_CAPABILITIES_SHA3 = BIT(9),
94 	/* Bits 10-11 are currently reserved */
95 	ICP_ACCEL_CAPABILITIES_HKDF = BIT(12),
96 	ICP_ACCEL_CAPABILITIES_ECEDMONT = BIT(13),
97 	/* Bit 14 is currently reserved */
98 	ICP_ACCEL_CAPABILITIES_SHA3_EXT = BIT(15),
99 	ICP_ACCEL_CAPABILITIES_AESGCM_SPC = BIT(16),
100 	ICP_ACCEL_CAPABILITIES_CHACHA_POLY = BIT(17),
101 	/* Bits 18-21 are currently reserved */
102 	ICP_ACCEL_CAPABILITIES_CNV_INTEGRITY = BIT(22),
103 	ICP_ACCEL_CAPABILITIES_CNV_INTEGRITY64 = BIT(23),
104 	ICP_ACCEL_CAPABILITIES_LZ4_COMPRESSION = BIT(24),
105 	ICP_ACCEL_CAPABILITIES_LZ4S_COMPRESSION = BIT(25),
106 	ICP_ACCEL_CAPABILITIES_AES_V2 = BIT(26)
107 };
108 
109 #define QAT_AUTH_MODE_BITPOS 4
110 #define QAT_AUTH_MODE_MASK 0xF
111 #define QAT_AUTH_ALGO_BITPOS 0
112 #define QAT_AUTH_ALGO_MASK 0xF
113 #define QAT_AUTH_CMP_BITPOS 8
114 #define QAT_AUTH_CMP_MASK 0x7F
115 #define QAT_AUTH_SHA3_PADDING_BITPOS 16
116 #define QAT_AUTH_SHA3_PADDING_MASK 0x1
117 #define QAT_AUTH_ALGO_SHA3_BITPOS 22
118 #define QAT_AUTH_ALGO_SHA3_MASK 0x3
119 #define ICP_QAT_HW_AUTH_CONFIG_BUILD(mode, algo, cmp_len) \
120 	(((mode & QAT_AUTH_MODE_MASK) << QAT_AUTH_MODE_BITPOS) | \
121 	((algo & QAT_AUTH_ALGO_MASK) << QAT_AUTH_ALGO_BITPOS) | \
122 	(((algo >> 4) & QAT_AUTH_ALGO_SHA3_MASK) << \
123 	 QAT_AUTH_ALGO_SHA3_BITPOS) | \
124 	 (((((algo == ICP_QAT_HW_AUTH_ALGO_SHA3_256) || \
125 	(algo == ICP_QAT_HW_AUTH_ALGO_SHA3_512)) ? 1 : 0) \
126 	& QAT_AUTH_SHA3_PADDING_MASK) << QAT_AUTH_SHA3_PADDING_BITPOS) | \
127 	((cmp_len & QAT_AUTH_CMP_MASK) << QAT_AUTH_CMP_BITPOS))
128 
129 struct icp_qat_hw_auth_counter {
130 	__be32 counter;
131 	__u32 reserved;
132 };
133 
134 #define QAT_AUTH_COUNT_MASK 0xFFFFFFFF
135 #define QAT_AUTH_COUNT_BITPOS 0
136 #define ICP_QAT_HW_AUTH_COUNT_BUILD(val) \
137 	(((val) & QAT_AUTH_COUNT_MASK) << QAT_AUTH_COUNT_BITPOS)
138 
139 struct icp_qat_hw_auth_setup {
140 	struct icp_qat_hw_auth_config auth_config;
141 	struct icp_qat_hw_auth_counter auth_counter;
142 };
143 
144 #define QAT_HW_DEFAULT_ALIGNMENT 8
145 #define QAT_HW_ROUND_UP(val, n) (((val) + ((n) - 1)) & (~(n - 1)))
146 #define ICP_QAT_HW_NULL_STATE1_SZ 32
147 #define ICP_QAT_HW_MD5_STATE1_SZ 16
148 #define ICP_QAT_HW_SHA1_STATE1_SZ 20
149 #define ICP_QAT_HW_SHA224_STATE1_SZ 32
150 #define ICP_QAT_HW_SHA256_STATE1_SZ 32
151 #define ICP_QAT_HW_SHA3_256_STATE1_SZ 32
152 #define ICP_QAT_HW_SHA384_STATE1_SZ 64
153 #define ICP_QAT_HW_SHA512_STATE1_SZ 64
154 #define ICP_QAT_HW_SHA3_512_STATE1_SZ 64
155 #define ICP_QAT_HW_SHA3_224_STATE1_SZ 28
156 #define ICP_QAT_HW_SHA3_384_STATE1_SZ 48
157 #define ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ 16
158 #define ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ 16
159 #define ICP_QAT_HW_AES_F9_STATE1_SZ 32
160 #define ICP_QAT_HW_KASUMI_F9_STATE1_SZ 16
161 #define ICP_QAT_HW_GALOIS_128_STATE1_SZ 16
162 #define ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ 8
163 #define ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ 8
164 #define ICP_QAT_HW_NULL_STATE2_SZ 32
165 #define ICP_QAT_HW_MD5_STATE2_SZ 16
166 #define ICP_QAT_HW_SHA1_STATE2_SZ 20
167 #define ICP_QAT_HW_SHA224_STATE2_SZ 32
168 #define ICP_QAT_HW_SHA256_STATE2_SZ 32
169 #define ICP_QAT_HW_SHA3_256_STATE2_SZ 0
170 #define ICP_QAT_HW_SHA384_STATE2_SZ 64
171 #define ICP_QAT_HW_SHA512_STATE2_SZ 64
172 #define ICP_QAT_HW_SHA3_512_STATE2_SZ 0
173 #define ICP_QAT_HW_SHA3_224_STATE2_SZ 0
174 #define ICP_QAT_HW_SHA3_384_STATE2_SZ 0
175 #define ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ 16
176 #define ICP_QAT_HW_AES_CBC_MAC_KEY_SZ 16
177 #define ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ 16
178 #define ICP_QAT_HW_F9_IK_SZ 16
179 #define ICP_QAT_HW_F9_FK_SZ 16
180 #define ICP_QAT_HW_KASUMI_F9_STATE2_SZ (ICP_QAT_HW_F9_IK_SZ + \
181 	ICP_QAT_HW_F9_FK_SZ)
182 #define ICP_QAT_HW_AES_F9_STATE2_SZ ICP_QAT_HW_KASUMI_F9_STATE2_SZ
183 #define ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ 24
184 #define ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ 32
185 #define ICP_QAT_HW_GALOIS_H_SZ 16
186 #define ICP_QAT_HW_GALOIS_LEN_A_SZ 8
187 #define ICP_QAT_HW_GALOIS_E_CTR0_SZ 16
188 
189 struct icp_qat_hw_auth_sha512 {
190 	struct icp_qat_hw_auth_setup inner_setup;
191 	__u8 state1[ICP_QAT_HW_SHA512_STATE1_SZ];
192 	struct icp_qat_hw_auth_setup outer_setup;
193 	__u8 state2[ICP_QAT_HW_SHA512_STATE2_SZ];
194 };
195 
196 struct icp_qat_hw_auth_algo_blk {
197 	struct icp_qat_hw_auth_sha512 sha;
198 };
199 
200 #define ICP_QAT_HW_GALOIS_LEN_A_BITPOS 0
201 #define ICP_QAT_HW_GALOIS_LEN_A_MASK 0xFFFFFFFF
202 
203 enum icp_qat_hw_cipher_algo {
204 	ICP_QAT_HW_CIPHER_ALGO_NULL = 0,
205 	ICP_QAT_HW_CIPHER_ALGO_DES = 1,
206 	ICP_QAT_HW_CIPHER_ALGO_3DES = 2,
207 	ICP_QAT_HW_CIPHER_ALGO_AES128 = 3,
208 	ICP_QAT_HW_CIPHER_ALGO_AES192 = 4,
209 	ICP_QAT_HW_CIPHER_ALGO_AES256 = 5,
210 	ICP_QAT_HW_CIPHER_ALGO_ARC4 = 6,
211 	ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7,
212 	ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8,
213 	ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9,
214 	ICP_QAT_HW_CIPHER_DELIMITER = 10
215 };
216 
217 enum icp_qat_hw_cipher_mode {
218 	ICP_QAT_HW_CIPHER_ECB_MODE = 0,
219 	ICP_QAT_HW_CIPHER_CBC_MODE = 1,
220 	ICP_QAT_HW_CIPHER_CTR_MODE = 2,
221 	ICP_QAT_HW_CIPHER_F8_MODE = 3,
222 	ICP_QAT_HW_CIPHER_XTS_MODE = 6,
223 	ICP_QAT_HW_CIPHER_MODE_DELIMITER = 7
224 };
225 
226 struct icp_qat_hw_cipher_config {
227 	__u32 val;
228 	__u32 reserved;
229 };
230 
231 enum icp_qat_hw_cipher_dir {
232 	ICP_QAT_HW_CIPHER_ENCRYPT = 0,
233 	ICP_QAT_HW_CIPHER_DECRYPT = 1,
234 };
235 
236 enum icp_qat_hw_cipher_convert {
237 	ICP_QAT_HW_CIPHER_NO_CONVERT = 0,
238 	ICP_QAT_HW_CIPHER_KEY_CONVERT = 1,
239 };
240 
241 #define QAT_CIPHER_MODE_BITPOS 4
242 #define QAT_CIPHER_MODE_MASK 0xF
243 #define QAT_CIPHER_ALGO_BITPOS 0
244 #define QAT_CIPHER_ALGO_MASK 0xF
245 #define QAT_CIPHER_CONVERT_BITPOS 9
246 #define QAT_CIPHER_CONVERT_MASK 0x1
247 #define QAT_CIPHER_DIR_BITPOS 8
248 #define QAT_CIPHER_DIR_MASK 0x1
249 #define QAT_CIPHER_MODE_F8_KEY_SZ_MULT 2
250 #define QAT_CIPHER_MODE_XTS_KEY_SZ_MULT 2
251 #define ICP_QAT_HW_CIPHER_CONFIG_BUILD(mode, algo, convert, dir) \
252 	(((mode & QAT_CIPHER_MODE_MASK) << QAT_CIPHER_MODE_BITPOS) | \
253 	((algo & QAT_CIPHER_ALGO_MASK) << QAT_CIPHER_ALGO_BITPOS) | \
254 	((convert & QAT_CIPHER_CONVERT_MASK) << QAT_CIPHER_CONVERT_BITPOS) | \
255 	((dir & QAT_CIPHER_DIR_MASK) << QAT_CIPHER_DIR_BITPOS))
256 #define ICP_QAT_HW_DES_BLK_SZ 8
257 #define ICP_QAT_HW_3DES_BLK_SZ 8
258 #define ICP_QAT_HW_NULL_BLK_SZ 8
259 #define ICP_QAT_HW_AES_BLK_SZ 16
260 #define ICP_QAT_HW_KASUMI_BLK_SZ 8
261 #define ICP_QAT_HW_SNOW_3G_BLK_SZ 8
262 #define ICP_QAT_HW_ZUC_3G_BLK_SZ 8
263 #define ICP_QAT_HW_NULL_KEY_SZ 256
264 #define ICP_QAT_HW_DES_KEY_SZ 8
265 #define ICP_QAT_HW_3DES_KEY_SZ 24
266 #define ICP_QAT_HW_AES_128_KEY_SZ 16
267 #define ICP_QAT_HW_AES_192_KEY_SZ 24
268 #define ICP_QAT_HW_AES_256_KEY_SZ 32
269 #define ICP_QAT_HW_AES_128_F8_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \
270 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
271 #define ICP_QAT_HW_AES_192_F8_KEY_SZ (ICP_QAT_HW_AES_192_KEY_SZ * \
272 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
273 #define ICP_QAT_HW_AES_256_F8_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \
274 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
275 #define ICP_QAT_HW_AES_128_XTS_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \
276 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
277 #define ICP_QAT_HW_AES_256_XTS_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \
278 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
279 #define ICP_QAT_HW_KASUMI_KEY_SZ 16
280 #define ICP_QAT_HW_KASUMI_F8_KEY_SZ (ICP_QAT_HW_KASUMI_KEY_SZ * \
281 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
282 #define ICP_QAT_HW_AES_128_XTS_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \
283 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
284 #define ICP_QAT_HW_AES_256_XTS_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \
285 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
286 #define ICP_QAT_HW_ARC4_KEY_SZ 256
287 #define ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ 16
288 #define ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ 16
289 #define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16
290 #define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16
291 #define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2
292 #define INIT_SHRAM_CONSTANTS_TABLE_SZ 1024
293 
294 struct icp_qat_hw_cipher_aes256_f8 {
295 	struct icp_qat_hw_cipher_config cipher_config;
296 	__u8 key[ICP_QAT_HW_AES_256_F8_KEY_SZ];
297 };
298 
299 struct icp_qat_hw_ucs_cipher_aes256_f8 {
300 	struct icp_qat_hw_ucs_cipher_config cipher_config;
301 	__u8 key[ICP_QAT_HW_AES_256_F8_KEY_SZ];
302 };
303 
304 struct icp_qat_hw_cipher_algo_blk {
305 	union {
306 		struct icp_qat_hw_cipher_aes256_f8 aes;
307 		struct icp_qat_hw_ucs_cipher_aes256_f8 ucs_aes;
308 	};
309 } __aligned(64);
310 
311 enum icp_qat_hw_compression_direction {
312 	ICP_QAT_HW_COMPRESSION_DIR_COMPRESS = 0,
313 	ICP_QAT_HW_COMPRESSION_DIR_DECOMPRESS = 1,
314 	ICP_QAT_HW_COMPRESSION_DIR_DELIMITER = 2
315 };
316 
317 enum icp_qat_hw_compression_delayed_match {
318 	ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_DISABLED = 0,
319 	ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_ENABLED = 1,
320 	ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_DELIMITER = 2
321 };
322 
323 enum icp_qat_hw_compression_algo {
324 	ICP_QAT_HW_COMPRESSION_ALGO_DEFLATE = 0,
325 	ICP_QAT_HW_COMPRESSION_ALGO_LZS = 1,
326 	ICP_QAT_HW_COMPRESSION_ALGO_DELIMITER = 2
327 };
328 
329 enum icp_qat_hw_compression_depth {
330 	ICP_QAT_HW_COMPRESSION_DEPTH_1 = 0,
331 	ICP_QAT_HW_COMPRESSION_DEPTH_4 = 1,
332 	ICP_QAT_HW_COMPRESSION_DEPTH_8 = 2,
333 	ICP_QAT_HW_COMPRESSION_DEPTH_16 = 3,
334 	ICP_QAT_HW_COMPRESSION_DEPTH_128 = 4,
335 	ICP_QAT_HW_COMPRESSION_DEPTH_DELIMITER = 5
336 };
337 
338 enum icp_qat_hw_compression_file_type {
339 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_0 = 0,
340 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_1 = 1,
341 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_2 = 2,
342 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_3 = 3,
343 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_4 = 4,
344 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_DELIMITER = 5
345 };
346 
347 struct icp_qat_hw_compression_config {
348 	__u32 lower_val;
349 	__u32 upper_val;
350 };
351 
352 #define QAT_COMPRESSION_DIR_BITPOS 4
353 #define QAT_COMPRESSION_DIR_MASK 0x7
354 #define QAT_COMPRESSION_DELAYED_MATCH_BITPOS 16
355 #define QAT_COMPRESSION_DELAYED_MATCH_MASK 0x1
356 #define QAT_COMPRESSION_ALGO_BITPOS 31
357 #define QAT_COMPRESSION_ALGO_MASK 0x1
358 #define QAT_COMPRESSION_DEPTH_BITPOS 28
359 #define QAT_COMPRESSION_DEPTH_MASK 0x7
360 #define QAT_COMPRESSION_FILE_TYPE_BITPOS 24
361 #define QAT_COMPRESSION_FILE_TYPE_MASK 0xF
362 
363 #define ICP_QAT_HW_COMPRESSION_CONFIG_BUILD(dir, delayed, \
364 	algo, depth, filetype) \
365 	((((dir) & QAT_COMPRESSION_DIR_MASK) << \
366 	QAT_COMPRESSION_DIR_BITPOS) | \
367 	(((delayed) & QAT_COMPRESSION_DELAYED_MATCH_MASK) << \
368 	QAT_COMPRESSION_DELAYED_MATCH_BITPOS) | \
369 	(((algo) & QAT_COMPRESSION_ALGO_MASK) << \
370 	QAT_COMPRESSION_ALGO_BITPOS) | \
371 	(((depth) & QAT_COMPRESSION_DEPTH_MASK) << \
372 	QAT_COMPRESSION_DEPTH_BITPOS) | \
373 	(((filetype) & QAT_COMPRESSION_FILE_TYPE_MASK) << \
374 	QAT_COMPRESSION_FILE_TYPE_BITPOS))
375 
376 #endif
377