xref: /linux/drivers/crypto/intel/qat/qat_common/icp_qat_hw.h (revision 3bf3e21c15d4386a5f15118ec39bbc1b67ea5759)
1 /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only) */
2 /* Copyright(c) 2014 - 2020 Intel Corporation */
3 #ifndef _ICP_QAT_HW_H_
4 #define _ICP_QAT_HW_H_
5 
6 #include <linux/bits.h>
7 
8 enum icp_qat_hw_ae_id {
9 	ICP_QAT_HW_AE_0 = 0,
10 	ICP_QAT_HW_AE_1 = 1,
11 	ICP_QAT_HW_AE_2 = 2,
12 	ICP_QAT_HW_AE_3 = 3,
13 	ICP_QAT_HW_AE_4 = 4,
14 	ICP_QAT_HW_AE_5 = 5,
15 	ICP_QAT_HW_AE_6 = 6,
16 	ICP_QAT_HW_AE_7 = 7,
17 	ICP_QAT_HW_AE_8 = 8,
18 	ICP_QAT_HW_AE_9 = 9,
19 	ICP_QAT_HW_AE_10 = 10,
20 	ICP_QAT_HW_AE_11 = 11,
21 	ICP_QAT_HW_AE_DELIMITER = 12
22 };
23 
24 enum icp_qat_hw_qat_id {
25 	ICP_QAT_HW_QAT_0 = 0,
26 	ICP_QAT_HW_QAT_1 = 1,
27 	ICP_QAT_HW_QAT_2 = 2,
28 	ICP_QAT_HW_QAT_3 = 3,
29 	ICP_QAT_HW_QAT_4 = 4,
30 	ICP_QAT_HW_QAT_5 = 5,
31 	ICP_QAT_HW_QAT_DELIMITER = 6
32 };
33 
34 enum icp_qat_hw_auth_algo {
35 	ICP_QAT_HW_AUTH_ALGO_NULL = 0,
36 	ICP_QAT_HW_AUTH_ALGO_SHA1 = 1,
37 	ICP_QAT_HW_AUTH_ALGO_MD5 = 2,
38 	ICP_QAT_HW_AUTH_ALGO_SHA224 = 3,
39 	ICP_QAT_HW_AUTH_ALGO_SHA256 = 4,
40 	ICP_QAT_HW_AUTH_ALGO_SHA384 = 5,
41 	ICP_QAT_HW_AUTH_ALGO_SHA512 = 6,
42 	ICP_QAT_HW_AUTH_ALGO_AES_XCBC_MAC = 7,
43 	ICP_QAT_HW_AUTH_ALGO_AES_CBC_MAC = 8,
44 	ICP_QAT_HW_AUTH_ALGO_AES_F9 = 9,
45 	ICP_QAT_HW_AUTH_ALGO_GALOIS_128 = 10,
46 	ICP_QAT_HW_AUTH_ALGO_GALOIS_64 = 11,
47 	ICP_QAT_HW_AUTH_ALGO_KASUMI_F9 = 12,
48 	ICP_QAT_HW_AUTH_ALGO_SNOW_3G_UIA2 = 13,
49 	ICP_QAT_HW_AUTH_ALGO_ZUC_3G_128_EIA3 = 14,
50 	ICP_QAT_HW_AUTH_RESERVED_1 = 15,
51 	ICP_QAT_HW_AUTH_RESERVED_2 = 16,
52 	ICP_QAT_HW_AUTH_ALGO_SHA3_256 = 17,
53 	ICP_QAT_HW_AUTH_RESERVED_3 = 18,
54 	ICP_QAT_HW_AUTH_ALGO_SHA3_512 = 19,
55 	ICP_QAT_HW_AUTH_ALGO_DELIMITER = 20
56 };
57 
58 enum icp_qat_hw_auth_mode {
59 	ICP_QAT_HW_AUTH_MODE0 = 0,
60 	ICP_QAT_HW_AUTH_MODE1 = 1,
61 	ICP_QAT_HW_AUTH_MODE2 = 2,
62 	ICP_QAT_HW_AUTH_MODE_DELIMITER = 3
63 };
64 
65 struct icp_qat_hw_auth_config {
66 	__u32 config;
67 	__u32 reserved;
68 };
69 
70 struct icp_qat_hw_ucs_cipher_config {
71 	__u32 val;
72 	__u32 reserved[3];
73 };
74 
75 enum icp_qat_slice_mask {
76 	ICP_ACCEL_MASK_CIPHER_SLICE = BIT(0),
77 	ICP_ACCEL_MASK_AUTH_SLICE = BIT(1),
78 	ICP_ACCEL_MASK_PKE_SLICE = BIT(2),
79 	ICP_ACCEL_MASK_COMPRESS_SLICE = BIT(3),
80 	ICP_ACCEL_MASK_LZS_SLICE = BIT(4),
81 	ICP_ACCEL_MASK_EIA3_SLICE = BIT(5),
82 	ICP_ACCEL_MASK_SHA3_SLICE = BIT(6),
83 };
84 
85 enum icp_qat_capabilities_mask {
86 	ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC = BIT(0),
87 	ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC = BIT(1),
88 	ICP_ACCEL_CAPABILITIES_CIPHER = BIT(2),
89 	ICP_ACCEL_CAPABILITIES_AUTHENTICATION = BIT(3),
90 	ICP_ACCEL_CAPABILITIES_RESERVED_1 = BIT(4),
91 	ICP_ACCEL_CAPABILITIES_COMPRESSION = BIT(5),
92 	/* Bits 6-7 are currently reserved */
93 	ICP_ACCEL_CAPABILITIES_ZUC = BIT(8),
94 	ICP_ACCEL_CAPABILITIES_SHA3 = BIT(9),
95 	/* Bits 10-11 are currently reserved */
96 	ICP_ACCEL_CAPABILITIES_HKDF = BIT(12),
97 	ICP_ACCEL_CAPABILITIES_ECEDMONT = BIT(13),
98 	/* Bit 14 is currently reserved */
99 	ICP_ACCEL_CAPABILITIES_SHA3_EXT = BIT(15),
100 	ICP_ACCEL_CAPABILITIES_AESGCM_SPC = BIT(16),
101 	ICP_ACCEL_CAPABILITIES_CHACHA_POLY = BIT(17),
102 	ICP_ACCEL_CAPABILITIES_SM2 = BIT(18),
103 	ICP_ACCEL_CAPABILITIES_SM3 = BIT(19),
104 	ICP_ACCEL_CAPABILITIES_SM4 = BIT(20),
105 	/* Bit 21 is currently reserved */
106 	ICP_ACCEL_CAPABILITIES_CNV_INTEGRITY = BIT(22),
107 	ICP_ACCEL_CAPABILITIES_CNV_INTEGRITY64 = BIT(23),
108 	ICP_ACCEL_CAPABILITIES_LZ4_COMPRESSION = BIT(24),
109 	ICP_ACCEL_CAPABILITIES_LZ4S_COMPRESSION = BIT(25),
110 	ICP_ACCEL_CAPABILITIES_AES_V2 = BIT(26)
111 };
112 
113 #define QAT_AUTH_MODE_BITPOS 4
114 #define QAT_AUTH_MODE_MASK 0xF
115 #define QAT_AUTH_ALGO_BITPOS 0
116 #define QAT_AUTH_ALGO_MASK 0xF
117 #define QAT_AUTH_CMP_BITPOS 8
118 #define QAT_AUTH_CMP_MASK 0x7F
119 #define QAT_AUTH_SHA3_PADDING_BITPOS 16
120 #define QAT_AUTH_SHA3_PADDING_MASK 0x1
121 #define QAT_AUTH_ALGO_SHA3_BITPOS 22
122 #define QAT_AUTH_ALGO_SHA3_MASK 0x3
123 #define ICP_QAT_HW_AUTH_CONFIG_BUILD(mode, algo, cmp_len) \
124 	(((mode & QAT_AUTH_MODE_MASK) << QAT_AUTH_MODE_BITPOS) | \
125 	((algo & QAT_AUTH_ALGO_MASK) << QAT_AUTH_ALGO_BITPOS) | \
126 	(((algo >> 4) & QAT_AUTH_ALGO_SHA3_MASK) << \
127 	 QAT_AUTH_ALGO_SHA3_BITPOS) | \
128 	 (((((algo == ICP_QAT_HW_AUTH_ALGO_SHA3_256) || \
129 	(algo == ICP_QAT_HW_AUTH_ALGO_SHA3_512)) ? 1 : 0) \
130 	& QAT_AUTH_SHA3_PADDING_MASK) << QAT_AUTH_SHA3_PADDING_BITPOS) | \
131 	((cmp_len & QAT_AUTH_CMP_MASK) << QAT_AUTH_CMP_BITPOS))
132 
133 struct icp_qat_hw_auth_counter {
134 	__be32 counter;
135 	__u32 reserved;
136 };
137 
138 #define QAT_AUTH_COUNT_MASK 0xFFFFFFFF
139 #define QAT_AUTH_COUNT_BITPOS 0
140 #define ICP_QAT_HW_AUTH_COUNT_BUILD(val) \
141 	(((val) & QAT_AUTH_COUNT_MASK) << QAT_AUTH_COUNT_BITPOS)
142 
143 struct icp_qat_hw_auth_setup {
144 	struct icp_qat_hw_auth_config auth_config;
145 	struct icp_qat_hw_auth_counter auth_counter;
146 };
147 
148 #define QAT_HW_DEFAULT_ALIGNMENT 8
149 #define QAT_HW_ROUND_UP(val, n) (((val) + ((n) - 1)) & (~(n - 1)))
150 #define ICP_QAT_HW_NULL_STATE1_SZ 32
151 #define ICP_QAT_HW_MD5_STATE1_SZ 16
152 #define ICP_QAT_HW_SHA1_STATE1_SZ 20
153 #define ICP_QAT_HW_SHA224_STATE1_SZ 32
154 #define ICP_QAT_HW_SHA256_STATE1_SZ 32
155 #define ICP_QAT_HW_SHA3_256_STATE1_SZ 32
156 #define ICP_QAT_HW_SHA384_STATE1_SZ 64
157 #define ICP_QAT_HW_SHA512_STATE1_SZ 64
158 #define ICP_QAT_HW_SHA3_512_STATE1_SZ 64
159 #define ICP_QAT_HW_SHA3_224_STATE1_SZ 28
160 #define ICP_QAT_HW_SHA3_384_STATE1_SZ 48
161 #define ICP_QAT_HW_AES_XCBC_MAC_STATE1_SZ 16
162 #define ICP_QAT_HW_AES_CBC_MAC_STATE1_SZ 16
163 #define ICP_QAT_HW_AES_F9_STATE1_SZ 32
164 #define ICP_QAT_HW_KASUMI_F9_STATE1_SZ 16
165 #define ICP_QAT_HW_GALOIS_128_STATE1_SZ 16
166 #define ICP_QAT_HW_SNOW_3G_UIA2_STATE1_SZ 8
167 #define ICP_QAT_HW_ZUC_3G_EIA3_STATE1_SZ 8
168 #define ICP_QAT_HW_NULL_STATE2_SZ 32
169 #define ICP_QAT_HW_MD5_STATE2_SZ 16
170 #define ICP_QAT_HW_SHA1_STATE2_SZ 20
171 #define ICP_QAT_HW_SHA224_STATE2_SZ 32
172 #define ICP_QAT_HW_SHA256_STATE2_SZ 32
173 #define ICP_QAT_HW_SHA3_256_STATE2_SZ 0
174 #define ICP_QAT_HW_SHA384_STATE2_SZ 64
175 #define ICP_QAT_HW_SHA512_STATE2_SZ 64
176 #define ICP_QAT_HW_SHA3_512_STATE2_SZ 0
177 #define ICP_QAT_HW_SHA3_224_STATE2_SZ 0
178 #define ICP_QAT_HW_SHA3_384_STATE2_SZ 0
179 #define ICP_QAT_HW_AES_XCBC_MAC_KEY_SZ 16
180 #define ICP_QAT_HW_AES_CBC_MAC_KEY_SZ 16
181 #define ICP_QAT_HW_AES_CCM_CBC_E_CTR0_SZ 16
182 #define ICP_QAT_HW_F9_IK_SZ 16
183 #define ICP_QAT_HW_F9_FK_SZ 16
184 #define ICP_QAT_HW_KASUMI_F9_STATE2_SZ (ICP_QAT_HW_F9_IK_SZ + \
185 	ICP_QAT_HW_F9_FK_SZ)
186 #define ICP_QAT_HW_AES_F9_STATE2_SZ ICP_QAT_HW_KASUMI_F9_STATE2_SZ
187 #define ICP_QAT_HW_SNOW_3G_UIA2_STATE2_SZ 24
188 #define ICP_QAT_HW_ZUC_3G_EIA3_STATE2_SZ 32
189 #define ICP_QAT_HW_GALOIS_H_SZ 16
190 #define ICP_QAT_HW_GALOIS_LEN_A_SZ 8
191 #define ICP_QAT_HW_GALOIS_E_CTR0_SZ 16
192 
193 struct icp_qat_hw_auth_sha512 {
194 	struct icp_qat_hw_auth_setup inner_setup;
195 	__u8 state1[ICP_QAT_HW_SHA512_STATE1_SZ];
196 	struct icp_qat_hw_auth_setup outer_setup;
197 	__u8 state2[ICP_QAT_HW_SHA512_STATE2_SZ];
198 };
199 
200 struct icp_qat_hw_auth_algo_blk {
201 	struct icp_qat_hw_auth_sha512 sha;
202 };
203 
204 #define ICP_QAT_HW_GALOIS_LEN_A_BITPOS 0
205 #define ICP_QAT_HW_GALOIS_LEN_A_MASK 0xFFFFFFFF
206 
207 enum icp_qat_hw_cipher_algo {
208 	ICP_QAT_HW_CIPHER_ALGO_NULL = 0,
209 	ICP_QAT_HW_CIPHER_ALGO_DES = 1,
210 	ICP_QAT_HW_CIPHER_ALGO_3DES = 2,
211 	ICP_QAT_HW_CIPHER_ALGO_AES128 = 3,
212 	ICP_QAT_HW_CIPHER_ALGO_AES192 = 4,
213 	ICP_QAT_HW_CIPHER_ALGO_AES256 = 5,
214 	ICP_QAT_HW_CIPHER_ALGO_ARC4 = 6,
215 	ICP_QAT_HW_CIPHER_ALGO_KASUMI = 7,
216 	ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2 = 8,
217 	ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3 = 9,
218 	ICP_QAT_HW_CIPHER_DELIMITER = 10
219 };
220 
221 enum icp_qat_hw_cipher_mode {
222 	ICP_QAT_HW_CIPHER_ECB_MODE = 0,
223 	ICP_QAT_HW_CIPHER_CBC_MODE = 1,
224 	ICP_QAT_HW_CIPHER_CTR_MODE = 2,
225 	ICP_QAT_HW_CIPHER_F8_MODE = 3,
226 	ICP_QAT_HW_CIPHER_XTS_MODE = 6,
227 	ICP_QAT_HW_CIPHER_MODE_DELIMITER = 7
228 };
229 
230 struct icp_qat_hw_cipher_config {
231 	__u32 val;
232 	__u32 reserved;
233 };
234 
235 enum icp_qat_hw_cipher_dir {
236 	ICP_QAT_HW_CIPHER_ENCRYPT = 0,
237 	ICP_QAT_HW_CIPHER_DECRYPT = 1,
238 };
239 
240 enum icp_qat_hw_cipher_convert {
241 	ICP_QAT_HW_CIPHER_NO_CONVERT = 0,
242 	ICP_QAT_HW_CIPHER_KEY_CONVERT = 1,
243 };
244 
245 #define QAT_CIPHER_MODE_BITPOS 4
246 #define QAT_CIPHER_MODE_MASK 0xF
247 #define QAT_CIPHER_ALGO_BITPOS 0
248 #define QAT_CIPHER_ALGO_MASK 0xF
249 #define QAT_CIPHER_CONVERT_BITPOS 9
250 #define QAT_CIPHER_CONVERT_MASK 0x1
251 #define QAT_CIPHER_DIR_BITPOS 8
252 #define QAT_CIPHER_DIR_MASK 0x1
253 #define QAT_CIPHER_MODE_F8_KEY_SZ_MULT 2
254 #define QAT_CIPHER_MODE_XTS_KEY_SZ_MULT 2
255 #define ICP_QAT_HW_CIPHER_CONFIG_BUILD(mode, algo, convert, dir) \
256 	(((mode & QAT_CIPHER_MODE_MASK) << QAT_CIPHER_MODE_BITPOS) | \
257 	((algo & QAT_CIPHER_ALGO_MASK) << QAT_CIPHER_ALGO_BITPOS) | \
258 	((convert & QAT_CIPHER_CONVERT_MASK) << QAT_CIPHER_CONVERT_BITPOS) | \
259 	((dir & QAT_CIPHER_DIR_MASK) << QAT_CIPHER_DIR_BITPOS))
260 #define ICP_QAT_HW_DES_BLK_SZ 8
261 #define ICP_QAT_HW_3DES_BLK_SZ 8
262 #define ICP_QAT_HW_NULL_BLK_SZ 8
263 #define ICP_QAT_HW_AES_BLK_SZ 16
264 #define ICP_QAT_HW_KASUMI_BLK_SZ 8
265 #define ICP_QAT_HW_SNOW_3G_BLK_SZ 8
266 #define ICP_QAT_HW_ZUC_3G_BLK_SZ 8
267 #define ICP_QAT_HW_NULL_KEY_SZ 256
268 #define ICP_QAT_HW_DES_KEY_SZ 8
269 #define ICP_QAT_HW_3DES_KEY_SZ 24
270 #define ICP_QAT_HW_AES_128_KEY_SZ 16
271 #define ICP_QAT_HW_AES_192_KEY_SZ 24
272 #define ICP_QAT_HW_AES_256_KEY_SZ 32
273 #define ICP_QAT_HW_AES_128_F8_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \
274 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
275 #define ICP_QAT_HW_AES_192_F8_KEY_SZ (ICP_QAT_HW_AES_192_KEY_SZ * \
276 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
277 #define ICP_QAT_HW_AES_256_F8_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \
278 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
279 #define ICP_QAT_HW_AES_128_XTS_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \
280 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
281 #define ICP_QAT_HW_AES_256_XTS_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \
282 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
283 #define ICP_QAT_HW_KASUMI_KEY_SZ 16
284 #define ICP_QAT_HW_KASUMI_F8_KEY_SZ (ICP_QAT_HW_KASUMI_KEY_SZ * \
285 	QAT_CIPHER_MODE_F8_KEY_SZ_MULT)
286 #define ICP_QAT_HW_AES_128_XTS_KEY_SZ (ICP_QAT_HW_AES_128_KEY_SZ * \
287 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
288 #define ICP_QAT_HW_AES_256_XTS_KEY_SZ (ICP_QAT_HW_AES_256_KEY_SZ * \
289 	QAT_CIPHER_MODE_XTS_KEY_SZ_MULT)
290 #define ICP_QAT_HW_ARC4_KEY_SZ 256
291 #define ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ 16
292 #define ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ 16
293 #define ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ 16
294 #define ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ 16
295 #define ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR 2
296 #define INIT_SHRAM_CONSTANTS_TABLE_SZ 1024
297 
298 struct icp_qat_hw_cipher_aes256_f8 {
299 	struct icp_qat_hw_cipher_config cipher_config;
300 	__u8 key[ICP_QAT_HW_AES_256_F8_KEY_SZ];
301 };
302 
303 struct icp_qat_hw_ucs_cipher_aes256_f8 {
304 	struct icp_qat_hw_ucs_cipher_config cipher_config;
305 	__u8 key[ICP_QAT_HW_AES_256_F8_KEY_SZ];
306 };
307 
308 struct icp_qat_hw_cipher_algo_blk {
309 	union {
310 		struct icp_qat_hw_cipher_aes256_f8 aes;
311 		struct icp_qat_hw_ucs_cipher_aes256_f8 ucs_aes;
312 	};
313 } __aligned(64);
314 
315 enum icp_qat_hw_compression_direction {
316 	ICP_QAT_HW_COMPRESSION_DIR_COMPRESS = 0,
317 	ICP_QAT_HW_COMPRESSION_DIR_DECOMPRESS = 1,
318 	ICP_QAT_HW_COMPRESSION_DIR_DELIMITER = 2
319 };
320 
321 enum icp_qat_hw_compression_delayed_match {
322 	ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_DISABLED = 0,
323 	ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_ENABLED = 1,
324 	ICP_QAT_HW_COMPRESSION_DELAYED_MATCH_DELIMITER = 2
325 };
326 
327 enum icp_qat_hw_compression_algo {
328 	ICP_QAT_HW_COMPRESSION_ALGO_DEFLATE = 0,
329 	ICP_QAT_HW_COMPRESSION_ALGO_LZS = 1,
330 	ICP_QAT_HW_COMPRESSION_ALGO_DELIMITER = 2
331 };
332 
333 enum icp_qat_hw_compression_depth {
334 	ICP_QAT_HW_COMPRESSION_DEPTH_1 = 0,
335 	ICP_QAT_HW_COMPRESSION_DEPTH_4 = 1,
336 	ICP_QAT_HW_COMPRESSION_DEPTH_8 = 2,
337 	ICP_QAT_HW_COMPRESSION_DEPTH_16 = 3,
338 	ICP_QAT_HW_COMPRESSION_DEPTH_128 = 4,
339 	ICP_QAT_HW_COMPRESSION_DEPTH_DELIMITER = 5
340 };
341 
342 enum icp_qat_hw_compression_file_type {
343 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_0 = 0,
344 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_1 = 1,
345 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_2 = 2,
346 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_3 = 3,
347 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_4 = 4,
348 	ICP_QAT_HW_COMPRESSION_FILE_TYPE_DELIMITER = 5
349 };
350 
351 struct icp_qat_hw_compression_config {
352 	__u32 lower_val;
353 	__u32 upper_val;
354 };
355 
356 #define QAT_COMPRESSION_DIR_BITPOS 4
357 #define QAT_COMPRESSION_DIR_MASK 0x7
358 #define QAT_COMPRESSION_DELAYED_MATCH_BITPOS 16
359 #define QAT_COMPRESSION_DELAYED_MATCH_MASK 0x1
360 #define QAT_COMPRESSION_ALGO_BITPOS 31
361 #define QAT_COMPRESSION_ALGO_MASK 0x1
362 #define QAT_COMPRESSION_DEPTH_BITPOS 28
363 #define QAT_COMPRESSION_DEPTH_MASK 0x7
364 #define QAT_COMPRESSION_FILE_TYPE_BITPOS 24
365 #define QAT_COMPRESSION_FILE_TYPE_MASK 0xF
366 
367 #define ICP_QAT_HW_COMPRESSION_CONFIG_BUILD(dir, delayed, \
368 	algo, depth, filetype) \
369 	((((dir) & QAT_COMPRESSION_DIR_MASK) << \
370 	QAT_COMPRESSION_DIR_BITPOS) | \
371 	(((delayed) & QAT_COMPRESSION_DELAYED_MATCH_MASK) << \
372 	QAT_COMPRESSION_DELAYED_MATCH_BITPOS) | \
373 	(((algo) & QAT_COMPRESSION_ALGO_MASK) << \
374 	QAT_COMPRESSION_ALGO_BITPOS) | \
375 	(((depth) & QAT_COMPRESSION_DEPTH_MASK) << \
376 	QAT_COMPRESSION_DEPTH_BITPOS) | \
377 	(((filetype) & QAT_COMPRESSION_FILE_TYPE_MASK) << \
378 	QAT_COMPRESSION_FILE_TYPE_BITPOS))
379 
380 #endif
381