xref: /linux/drivers/iommu/arm/arm-smmu/arm-smmu.h (revision a1ff5a7d78a036d6c2178ee5acd6ba4946243800)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * IOMMU API for ARM architected SMMU implementations.
4  *
5  * Copyright (C) 2013 ARM Limited
6  *
7  * Author: Will Deacon <will.deacon@arm.com>
8  */
9 
10 #ifndef _ARM_SMMU_H
11 #define _ARM_SMMU_H
12 
13 #include <linux/atomic.h>
14 #include <linux/bitfield.h>
15 #include <linux/bits.h>
16 #include <linux/clk.h>
17 #include <linux/device.h>
18 #include <linux/io-64-nonatomic-hi-lo.h>
19 #include <linux/io-pgtable.h>
20 #include <linux/iommu.h>
21 #include <linux/irqreturn.h>
22 #include <linux/mutex.h>
23 #include <linux/spinlock.h>
24 #include <linux/types.h>
25 
26 /* Configuration registers */
27 #define ARM_SMMU_GR0_sCR0		0x0
28 #define ARM_SMMU_sCR0_VMID16EN		BIT(31)
29 #define ARM_SMMU_sCR0_BSU		GENMASK(15, 14)
30 #define ARM_SMMU_sCR0_FB		BIT(13)
31 #define ARM_SMMU_sCR0_PTM		BIT(12)
32 #define ARM_SMMU_sCR0_VMIDPNE		BIT(11)
33 #define ARM_SMMU_sCR0_USFCFG		BIT(10)
34 #define ARM_SMMU_sCR0_GCFGFIE		BIT(5)
35 #define ARM_SMMU_sCR0_GCFGFRE		BIT(4)
36 #define ARM_SMMU_sCR0_EXIDENABLE	BIT(3)
37 #define ARM_SMMU_sCR0_GFIE		BIT(2)
38 #define ARM_SMMU_sCR0_GFRE		BIT(1)
39 #define ARM_SMMU_sCR0_CLIENTPD		BIT(0)
40 
41 /* Auxiliary Configuration register */
42 #define ARM_SMMU_GR0_sACR		0x10
43 
44 /* Identification registers */
45 #define ARM_SMMU_GR0_ID0		0x20
46 #define ARM_SMMU_ID0_S1TS		BIT(30)
47 #define ARM_SMMU_ID0_S2TS		BIT(29)
48 #define ARM_SMMU_ID0_NTS		BIT(28)
49 #define ARM_SMMU_ID0_SMS		BIT(27)
50 #define ARM_SMMU_ID0_ATOSNS		BIT(26)
51 #define ARM_SMMU_ID0_PTFS_NO_AARCH32	BIT(25)
52 #define ARM_SMMU_ID0_PTFS_NO_AARCH32S	BIT(24)
53 #define ARM_SMMU_ID0_NUMIRPT		GENMASK(23, 16)
54 #define ARM_SMMU_ID0_CTTW		BIT(14)
55 #define ARM_SMMU_ID0_NUMSIDB		GENMASK(12, 9)
56 #define ARM_SMMU_ID0_EXIDS		BIT(8)
57 #define ARM_SMMU_ID0_NUMSMRG		GENMASK(7, 0)
58 
59 #define ARM_SMMU_GR0_ID1		0x24
60 #define ARM_SMMU_ID1_PAGESIZE		BIT(31)
61 #define ARM_SMMU_ID1_NUMPAGENDXB	GENMASK(30, 28)
62 #define ARM_SMMU_ID1_NUMS2CB		GENMASK(23, 16)
63 #define ARM_SMMU_ID1_NUMCB		GENMASK(7, 0)
64 
65 #define ARM_SMMU_GR0_ID2		0x28
66 #define ARM_SMMU_ID2_VMID16		BIT(15)
67 #define ARM_SMMU_ID2_PTFS_64K		BIT(14)
68 #define ARM_SMMU_ID2_PTFS_16K		BIT(13)
69 #define ARM_SMMU_ID2_PTFS_4K		BIT(12)
70 #define ARM_SMMU_ID2_UBS		GENMASK(11, 8)
71 #define ARM_SMMU_ID2_OAS		GENMASK(7, 4)
72 #define ARM_SMMU_ID2_IAS		GENMASK(3, 0)
73 
74 #define ARM_SMMU_GR0_ID3		0x2c
75 #define ARM_SMMU_GR0_ID4		0x30
76 #define ARM_SMMU_GR0_ID5		0x34
77 #define ARM_SMMU_GR0_ID6		0x38
78 
79 #define ARM_SMMU_GR0_ID7		0x3c
80 #define ARM_SMMU_ID7_MAJOR		GENMASK(7, 4)
81 #define ARM_SMMU_ID7_MINOR		GENMASK(3, 0)
82 
83 #define ARM_SMMU_GR0_sGFSR		0x48
84 #define ARM_SMMU_sGFSR_USF		BIT(1)
85 
86 #define ARM_SMMU_GR0_sGFSYNR0		0x50
87 #define ARM_SMMU_GR0_sGFSYNR1		0x54
88 #define ARM_SMMU_GR0_sGFSYNR2		0x58
89 
90 /* Global TLB invalidation */
91 #define ARM_SMMU_GR0_TLBIVMID		0x64
92 #define ARM_SMMU_GR0_TLBIALLNSNH	0x68
93 #define ARM_SMMU_GR0_TLBIALLH		0x6c
94 #define ARM_SMMU_GR0_sTLBGSYNC		0x70
95 
96 #define ARM_SMMU_GR0_sTLBGSTATUS	0x74
97 #define ARM_SMMU_sTLBGSTATUS_GSACTIVE	BIT(0)
98 
99 /* Stream mapping registers */
100 #define ARM_SMMU_GR0_SMR(n)		(0x800 + ((n) << 2))
101 #define ARM_SMMU_SMR_VALID		BIT(31)
102 #define ARM_SMMU_SMR_MASK		GENMASK(31, 16)
103 #define ARM_SMMU_SMR_ID			GENMASK(15, 0)
104 
105 #define ARM_SMMU_GR0_S2CR(n)		(0xc00 + ((n) << 2))
106 #define ARM_SMMU_S2CR_PRIVCFG		GENMASK(25, 24)
107 enum arm_smmu_s2cr_privcfg {
108 	S2CR_PRIVCFG_DEFAULT,
109 	S2CR_PRIVCFG_DIPAN,
110 	S2CR_PRIVCFG_UNPRIV,
111 	S2CR_PRIVCFG_PRIV,
112 };
113 #define ARM_SMMU_S2CR_TYPE		GENMASK(17, 16)
114 enum arm_smmu_s2cr_type {
115 	S2CR_TYPE_TRANS,
116 	S2CR_TYPE_BYPASS,
117 	S2CR_TYPE_FAULT,
118 };
119 #define ARM_SMMU_S2CR_EXIDVALID		BIT(10)
120 #define ARM_SMMU_S2CR_CBNDX		GENMASK(7, 0)
121 
122 /* Context bank attribute registers */
123 #define ARM_SMMU_GR1_CBAR(n)		(0x0 + ((n) << 2))
124 #define ARM_SMMU_CBAR_IRPTNDX		GENMASK(31, 24)
125 #define ARM_SMMU_CBAR_TYPE		GENMASK(17, 16)
126 enum arm_smmu_cbar_type {
127 	CBAR_TYPE_S2_TRANS,
128 	CBAR_TYPE_S1_TRANS_S2_BYPASS,
129 	CBAR_TYPE_S1_TRANS_S2_FAULT,
130 	CBAR_TYPE_S1_TRANS_S2_TRANS,
131 };
132 #define ARM_SMMU_CBAR_S1_MEMATTR	GENMASK(15, 12)
133 #define ARM_SMMU_CBAR_S1_MEMATTR_WB	0xf
134 #define ARM_SMMU_CBAR_S1_BPSHCFG	GENMASK(9, 8)
135 #define ARM_SMMU_CBAR_S1_BPSHCFG_NSH	3
136 #define ARM_SMMU_CBAR_VMID		GENMASK(7, 0)
137 
138 #define ARM_SMMU_GR1_CBFRSYNRA(n)	(0x400 + ((n) << 2))
139 #define ARM_SMMU_CBFRSYNRA_SID		GENMASK(15, 0)
140 
141 #define ARM_SMMU_GR1_CBA2R(n)		(0x800 + ((n) << 2))
142 #define ARM_SMMU_CBA2R_VMID16		GENMASK(31, 16)
143 #define ARM_SMMU_CBA2R_VA64		BIT(0)
144 
145 #define ARM_SMMU_CB_SCTLR		0x0
146 #define ARM_SMMU_SCTLR_S1_ASIDPNE	BIT(12)
147 #define ARM_SMMU_SCTLR_CFCFG		BIT(7)
148 #define ARM_SMMU_SCTLR_HUPCF		BIT(8)
149 #define ARM_SMMU_SCTLR_CFIE		BIT(6)
150 #define ARM_SMMU_SCTLR_CFRE		BIT(5)
151 #define ARM_SMMU_SCTLR_E		BIT(4)
152 #define ARM_SMMU_SCTLR_AFE		BIT(2)
153 #define ARM_SMMU_SCTLR_TRE		BIT(1)
154 #define ARM_SMMU_SCTLR_M		BIT(0)
155 
156 #define ARM_SMMU_CB_ACTLR		0x4
157 
158 #define ARM_SMMU_CB_RESUME		0x8
159 #define ARM_SMMU_RESUME_TERMINATE	BIT(0)
160 
161 #define ARM_SMMU_CB_TCR2		0x10
162 #define ARM_SMMU_TCR2_SEP		GENMASK(17, 15)
163 #define ARM_SMMU_TCR2_SEP_UPSTREAM	0x7
164 #define ARM_SMMU_TCR2_AS		BIT(4)
165 #define ARM_SMMU_TCR2_PASIZE		GENMASK(3, 0)
166 
167 #define ARM_SMMU_CB_TTBR0		0x20
168 #define ARM_SMMU_CB_TTBR1		0x28
169 #define ARM_SMMU_TTBRn_ASID		GENMASK_ULL(63, 48)
170 
171 #define ARM_SMMU_CB_TCR			0x30
172 #define ARM_SMMU_TCR_EAE		BIT(31)
173 #define ARM_SMMU_TCR_EPD1		BIT(23)
174 #define ARM_SMMU_TCR_A1			BIT(22)
175 #define ARM_SMMU_TCR_TG0		GENMASK(15, 14)
176 #define ARM_SMMU_TCR_SH0		GENMASK(13, 12)
177 #define ARM_SMMU_TCR_ORGN0		GENMASK(11, 10)
178 #define ARM_SMMU_TCR_IRGN0		GENMASK(9, 8)
179 #define ARM_SMMU_TCR_EPD0		BIT(7)
180 #define ARM_SMMU_TCR_T0SZ		GENMASK(5, 0)
181 
182 #define ARM_SMMU_VTCR_RES1		BIT(31)
183 #define ARM_SMMU_VTCR_PS		GENMASK(18, 16)
184 #define ARM_SMMU_VTCR_TG0		ARM_SMMU_TCR_TG0
185 #define ARM_SMMU_VTCR_SH0		ARM_SMMU_TCR_SH0
186 #define ARM_SMMU_VTCR_ORGN0		ARM_SMMU_TCR_ORGN0
187 #define ARM_SMMU_VTCR_IRGN0		ARM_SMMU_TCR_IRGN0
188 #define ARM_SMMU_VTCR_SL0		GENMASK(7, 6)
189 #define ARM_SMMU_VTCR_T0SZ		ARM_SMMU_TCR_T0SZ
190 
191 #define ARM_SMMU_CB_CONTEXTIDR		0x34
192 #define ARM_SMMU_CB_S1_MAIR0		0x38
193 #define ARM_SMMU_CB_S1_MAIR1		0x3c
194 
195 #define ARM_SMMU_CB_PAR			0x50
196 #define ARM_SMMU_CB_PAR_F		BIT(0)
197 
198 #define ARM_SMMU_CB_FSR			0x58
199 #define ARM_SMMU_CB_FSR_MULTI		BIT(31)
200 #define ARM_SMMU_CB_FSR_SS		BIT(30)
201 #define ARM_SMMU_CB_FSR_FORMAT		GENMASK(10, 9)
202 #define ARM_SMMU_CB_FSR_UUT		BIT(8)
203 #define ARM_SMMU_CB_FSR_ASF		BIT(7)
204 #define ARM_SMMU_CB_FSR_TLBLKF		BIT(6)
205 #define ARM_SMMU_CB_FSR_TLBMCF		BIT(5)
206 #define ARM_SMMU_CB_FSR_EF		BIT(4)
207 #define ARM_SMMU_CB_FSR_PF		BIT(3)
208 #define ARM_SMMU_CB_FSR_AFF		BIT(2)
209 #define ARM_SMMU_CB_FSR_TF		BIT(1)
210 
211 #define ARM_SMMU_CB_FSR_IGN		(ARM_SMMU_CB_FSR_AFF |		\
212 					 ARM_SMMU_CB_FSR_ASF |		\
213 					 ARM_SMMU_CB_FSR_TLBMCF |	\
214 					 ARM_SMMU_CB_FSR_TLBLKF)
215 
216 #define ARM_SMMU_CB_FSR_FAULT		(ARM_SMMU_CB_FSR_MULTI |	\
217 					 ARM_SMMU_CB_FSR_SS |		\
218 					 ARM_SMMU_CB_FSR_UUT |		\
219 					 ARM_SMMU_CB_FSR_EF |		\
220 					 ARM_SMMU_CB_FSR_PF |		\
221 					 ARM_SMMU_CB_FSR_TF |		\
222 					 ARM_SMMU_CB_FSR_IGN)
223 
224 #define ARM_SMMU_CB_FAR			0x60
225 
226 #define ARM_SMMU_CB_FSYNR0		0x68
227 #define ARM_SMMU_CB_FSYNR0_PLVL		GENMASK(1, 0)
228 #define ARM_SMMU_CB_FSYNR0_WNR		BIT(4)
229 #define ARM_SMMU_CB_FSYNR0_PNU		BIT(5)
230 #define ARM_SMMU_CB_FSYNR0_IND		BIT(6)
231 #define ARM_SMMU_CB_FSYNR0_NSATTR	BIT(8)
232 #define ARM_SMMU_CB_FSYNR0_PTWF		BIT(10)
233 #define ARM_SMMU_CB_FSYNR0_AFR		BIT(11)
234 #define ARM_SMMU_CB_FSYNR0_S1CBNDX	GENMASK(23, 16)
235 
236 #define ARM_SMMU_CB_FSYNR1		0x6c
237 
238 #define ARM_SMMU_CB_S1_TLBIVA		0x600
239 #define ARM_SMMU_CB_S1_TLBIASID		0x610
240 #define ARM_SMMU_CB_S1_TLBIVAL		0x620
241 #define ARM_SMMU_CB_S2_TLBIIPAS2	0x630
242 #define ARM_SMMU_CB_S2_TLBIIPAS2L	0x638
243 #define ARM_SMMU_CB_TLBSYNC		0x7f0
244 #define ARM_SMMU_CB_TLBSTATUS		0x7f4
245 #define ARM_SMMU_CB_ATS1PR		0x800
246 
247 #define ARM_SMMU_CB_ATSR		0x8f0
248 #define ARM_SMMU_CB_ATSR_ACTIVE		BIT(0)
249 
250 #define ARM_SMMU_RESUME_TERMINATE	BIT(0)
251 
252 /* Maximum number of context banks per SMMU */
253 #define ARM_SMMU_MAX_CBS		128
254 
255 #define TLB_LOOP_TIMEOUT		1000000	/* 1s! */
256 #define TLB_SPIN_COUNT			10
257 
258 /* Shared driver definitions */
259 enum arm_smmu_arch_version {
260 	ARM_SMMU_V1,
261 	ARM_SMMU_V1_64K,
262 	ARM_SMMU_V2,
263 };
264 
265 enum arm_smmu_implementation {
266 	GENERIC_SMMU,
267 	ARM_MMU500,
268 	CAVIUM_SMMUV2,
269 	QCOM_SMMUV2,
270 };
271 
272 struct arm_smmu_s2cr {
273 	struct iommu_group		*group;
274 	int				count;
275 	enum arm_smmu_s2cr_type		type;
276 	enum arm_smmu_s2cr_privcfg	privcfg;
277 	u8				cbndx;
278 };
279 
280 struct arm_smmu_smr {
281 	u16				mask;
282 	u16				id;
283 	bool				valid;
284 	bool				pinned;
285 };
286 
287 struct arm_smmu_device {
288 	struct device			*dev;
289 
290 	void __iomem			*base;
291 	phys_addr_t			ioaddr;
292 	unsigned int			numpage;
293 	unsigned int			pgshift;
294 
295 #define ARM_SMMU_FEAT_COHERENT_WALK	(1 << 0)
296 #define ARM_SMMU_FEAT_STREAM_MATCH	(1 << 1)
297 #define ARM_SMMU_FEAT_TRANS_S1		(1 << 2)
298 #define ARM_SMMU_FEAT_TRANS_S2		(1 << 3)
299 #define ARM_SMMU_FEAT_TRANS_NESTED	(1 << 4)
300 #define ARM_SMMU_FEAT_TRANS_OPS		(1 << 5)
301 #define ARM_SMMU_FEAT_VMID16		(1 << 6)
302 #define ARM_SMMU_FEAT_FMT_AARCH64_4K	(1 << 7)
303 #define ARM_SMMU_FEAT_FMT_AARCH64_16K	(1 << 8)
304 #define ARM_SMMU_FEAT_FMT_AARCH64_64K	(1 << 9)
305 #define ARM_SMMU_FEAT_FMT_AARCH32_L	(1 << 10)
306 #define ARM_SMMU_FEAT_FMT_AARCH32_S	(1 << 11)
307 #define ARM_SMMU_FEAT_EXIDS		(1 << 12)
308 	u32				features;
309 
310 	enum arm_smmu_arch_version	version;
311 	enum arm_smmu_implementation	model;
312 	const struct arm_smmu_impl	*impl;
313 
314 	u32				num_context_banks;
315 	u32				num_s2_context_banks;
316 	DECLARE_BITMAP(context_map, ARM_SMMU_MAX_CBS);
317 	struct arm_smmu_cb		*cbs;
318 	atomic_t			irptndx;
319 
320 	u32				num_mapping_groups;
321 	u16				streamid_mask;
322 	u16				smr_mask_mask;
323 	struct arm_smmu_smr		*smrs;
324 	struct arm_smmu_s2cr		*s2crs;
325 	struct mutex			stream_map_mutex;
326 
327 	unsigned long			va_size;
328 	unsigned long			ipa_size;
329 	unsigned long			pa_size;
330 	unsigned long			pgsize_bitmap;
331 
332 	int				num_context_irqs;
333 	int				num_clks;
334 	unsigned int			*irqs;
335 	struct clk_bulk_data		*clks;
336 
337 	spinlock_t			global_sync_lock;
338 
339 	/* IOMMU core code handle */
340 	struct iommu_device		iommu;
341 };
342 
343 enum arm_smmu_context_fmt {
344 	ARM_SMMU_CTX_FMT_NONE,
345 	ARM_SMMU_CTX_FMT_AARCH64,
346 	ARM_SMMU_CTX_FMT_AARCH32_L,
347 	ARM_SMMU_CTX_FMT_AARCH32_S,
348 };
349 
350 struct arm_smmu_cfg {
351 	u8				cbndx;
352 	u8				irptndx;
353 	union {
354 		u16			asid;
355 		u16			vmid;
356 	};
357 	enum arm_smmu_cbar_type		cbar;
358 	enum arm_smmu_context_fmt	fmt;
359 	bool				flush_walk_prefer_tlbiasid;
360 };
361 #define ARM_SMMU_INVALID_IRPTNDX	0xff
362 
363 struct arm_smmu_cb {
364 	u64				ttbr[2];
365 	u32				tcr[2];
366 	u32				mair[2];
367 	struct arm_smmu_cfg		*cfg;
368 };
369 
370 enum arm_smmu_domain_stage {
371 	ARM_SMMU_DOMAIN_S1 = 0,
372 	ARM_SMMU_DOMAIN_S2,
373 	ARM_SMMU_DOMAIN_NESTED,
374 };
375 
376 struct arm_smmu_domain {
377 	struct arm_smmu_device		*smmu;
378 	struct io_pgtable_ops		*pgtbl_ops;
379 	unsigned long			pgtbl_quirks;
380 	const struct iommu_flush_ops	*flush_ops;
381 	struct arm_smmu_cfg		cfg;
382 	enum arm_smmu_domain_stage	stage;
383 	struct mutex			init_mutex; /* Protects smmu pointer */
384 	spinlock_t			cb_lock; /* Serialises ATS1* ops and TLB syncs */
385 	struct iommu_domain		domain;
386 };
387 
388 struct arm_smmu_master_cfg {
389 	struct arm_smmu_device		*smmu;
390 	s16				smendx[];
391 };
392 
arm_smmu_lpae_tcr(const struct io_pgtable_cfg * cfg)393 static inline u32 arm_smmu_lpae_tcr(const struct io_pgtable_cfg *cfg)
394 {
395 	u32 tcr = FIELD_PREP(ARM_SMMU_TCR_TG0, cfg->arm_lpae_s1_cfg.tcr.tg) |
396 		FIELD_PREP(ARM_SMMU_TCR_SH0, cfg->arm_lpae_s1_cfg.tcr.sh) |
397 		FIELD_PREP(ARM_SMMU_TCR_ORGN0, cfg->arm_lpae_s1_cfg.tcr.orgn) |
398 		FIELD_PREP(ARM_SMMU_TCR_IRGN0, cfg->arm_lpae_s1_cfg.tcr.irgn) |
399 		FIELD_PREP(ARM_SMMU_TCR_T0SZ, cfg->arm_lpae_s1_cfg.tcr.tsz);
400 
401        /*
402 	* When TTBR1 is selected shift the TCR fields by 16 bits and disable
403 	* translation in TTBR0
404 	*/
405 	if (cfg->quirks & IO_PGTABLE_QUIRK_ARM_TTBR1) {
406 		tcr = (tcr << 16) & ~ARM_SMMU_TCR_A1;
407 		tcr |= ARM_SMMU_TCR_EPD0;
408 	} else
409 		tcr |= ARM_SMMU_TCR_EPD1;
410 
411 	return tcr;
412 }
413 
arm_smmu_lpae_tcr2(const struct io_pgtable_cfg * cfg)414 static inline u32 arm_smmu_lpae_tcr2(const struct io_pgtable_cfg *cfg)
415 {
416 	return FIELD_PREP(ARM_SMMU_TCR2_PASIZE, cfg->arm_lpae_s1_cfg.tcr.ips) |
417 	       FIELD_PREP(ARM_SMMU_TCR2_SEP, ARM_SMMU_TCR2_SEP_UPSTREAM);
418 }
419 
arm_smmu_lpae_vtcr(const struct io_pgtable_cfg * cfg)420 static inline u32 arm_smmu_lpae_vtcr(const struct io_pgtable_cfg *cfg)
421 {
422 	return ARM_SMMU_VTCR_RES1 |
423 	       FIELD_PREP(ARM_SMMU_VTCR_PS, cfg->arm_lpae_s2_cfg.vtcr.ps) |
424 	       FIELD_PREP(ARM_SMMU_VTCR_TG0, cfg->arm_lpae_s2_cfg.vtcr.tg) |
425 	       FIELD_PREP(ARM_SMMU_VTCR_SH0, cfg->arm_lpae_s2_cfg.vtcr.sh) |
426 	       FIELD_PREP(ARM_SMMU_VTCR_ORGN0, cfg->arm_lpae_s2_cfg.vtcr.orgn) |
427 	       FIELD_PREP(ARM_SMMU_VTCR_IRGN0, cfg->arm_lpae_s2_cfg.vtcr.irgn) |
428 	       FIELD_PREP(ARM_SMMU_VTCR_SL0, cfg->arm_lpae_s2_cfg.vtcr.sl) |
429 	       FIELD_PREP(ARM_SMMU_VTCR_T0SZ, cfg->arm_lpae_s2_cfg.vtcr.tsz);
430 }
431 
432 /* Implementation details, yay! */
433 struct arm_smmu_impl {
434 	u32 (*read_reg)(struct arm_smmu_device *smmu, int page, int offset);
435 	void (*write_reg)(struct arm_smmu_device *smmu, int page, int offset,
436 			  u32 val);
437 	u64 (*read_reg64)(struct arm_smmu_device *smmu, int page, int offset);
438 	void (*write_reg64)(struct arm_smmu_device *smmu, int page, int offset,
439 			    u64 val);
440 	int (*cfg_probe)(struct arm_smmu_device *smmu);
441 	int (*reset)(struct arm_smmu_device *smmu);
442 	int (*init_context)(struct arm_smmu_domain *smmu_domain,
443 			struct io_pgtable_cfg *cfg, struct device *dev);
444 	void (*tlb_sync)(struct arm_smmu_device *smmu, int page, int sync,
445 			 int status);
446 	int (*def_domain_type)(struct device *dev);
447 	irqreturn_t (*global_fault)(int irq, void *dev);
448 	irqreturn_t (*context_fault)(int irq, void *dev);
449 	bool context_fault_needs_threaded_irq;
450 	int (*alloc_context_bank)(struct arm_smmu_domain *smmu_domain,
451 				  struct arm_smmu_device *smmu,
452 				  struct device *dev, int start);
453 	void (*write_s2cr)(struct arm_smmu_device *smmu, int idx);
454 	void (*write_sctlr)(struct arm_smmu_device *smmu, int idx, u32 reg);
455 	void (*probe_finalize)(struct arm_smmu_device *smmu, struct device *dev);
456 };
457 
458 #define INVALID_SMENDX			-1
459 #define cfg_smendx(cfg, fw, i) \
460 	(i >= fw->num_ids ? INVALID_SMENDX : cfg->smendx[i])
461 #define for_each_cfg_sme(cfg, fw, i, idx) \
462 	for (i = 0; idx = cfg_smendx(cfg, fw, i), i < fw->num_ids; ++i)
463 
__arm_smmu_alloc_bitmap(unsigned long * map,int start,int end)464 static inline int __arm_smmu_alloc_bitmap(unsigned long *map, int start, int end)
465 {
466 	int idx;
467 
468 	do {
469 		idx = find_next_zero_bit(map, end, start);
470 		if (idx == end)
471 			return -ENOSPC;
472 	} while (test_and_set_bit(idx, map));
473 
474 	return idx;
475 }
476 
arm_smmu_page(struct arm_smmu_device * smmu,int n)477 static inline void __iomem *arm_smmu_page(struct arm_smmu_device *smmu, int n)
478 {
479 	return smmu->base + (n << smmu->pgshift);
480 }
481 
arm_smmu_readl(struct arm_smmu_device * smmu,int page,int offset)482 static inline u32 arm_smmu_readl(struct arm_smmu_device *smmu, int page, int offset)
483 {
484 	if (smmu->impl && unlikely(smmu->impl->read_reg))
485 		return smmu->impl->read_reg(smmu, page, offset);
486 	return readl_relaxed(arm_smmu_page(smmu, page) + offset);
487 }
488 
arm_smmu_writel(struct arm_smmu_device * smmu,int page,int offset,u32 val)489 static inline void arm_smmu_writel(struct arm_smmu_device *smmu, int page,
490 				   int offset, u32 val)
491 {
492 	if (smmu->impl && unlikely(smmu->impl->write_reg))
493 		smmu->impl->write_reg(smmu, page, offset, val);
494 	else
495 		writel_relaxed(val, arm_smmu_page(smmu, page) + offset);
496 }
497 
arm_smmu_readq(struct arm_smmu_device * smmu,int page,int offset)498 static inline u64 arm_smmu_readq(struct arm_smmu_device *smmu, int page, int offset)
499 {
500 	if (smmu->impl && unlikely(smmu->impl->read_reg64))
501 		return smmu->impl->read_reg64(smmu, page, offset);
502 	return readq_relaxed(arm_smmu_page(smmu, page) + offset);
503 }
504 
arm_smmu_writeq(struct arm_smmu_device * smmu,int page,int offset,u64 val)505 static inline void arm_smmu_writeq(struct arm_smmu_device *smmu, int page,
506 				   int offset, u64 val)
507 {
508 	if (smmu->impl && unlikely(smmu->impl->write_reg64))
509 		smmu->impl->write_reg64(smmu, page, offset, val);
510 	else
511 		writeq_relaxed(val, arm_smmu_page(smmu, page) + offset);
512 }
513 
514 #define ARM_SMMU_GR0		0
515 #define ARM_SMMU_GR1		1
516 #define ARM_SMMU_CB(s, n)	((s)->numpage + (n))
517 
518 #define arm_smmu_gr0_read(s, o)		\
519 	arm_smmu_readl((s), ARM_SMMU_GR0, (o))
520 #define arm_smmu_gr0_write(s, o, v)	\
521 	arm_smmu_writel((s), ARM_SMMU_GR0, (o), (v))
522 
523 #define arm_smmu_gr1_read(s, o)		\
524 	arm_smmu_readl((s), ARM_SMMU_GR1, (o))
525 #define arm_smmu_gr1_write(s, o, v)	\
526 	arm_smmu_writel((s), ARM_SMMU_GR1, (o), (v))
527 
528 #define arm_smmu_cb_read(s, n, o)	\
529 	arm_smmu_readl((s), ARM_SMMU_CB((s), (n)), (o))
530 #define arm_smmu_cb_write(s, n, o, v)	\
531 	arm_smmu_writel((s), ARM_SMMU_CB((s), (n)), (o), (v))
532 #define arm_smmu_cb_readq(s, n, o)	\
533 	arm_smmu_readq((s), ARM_SMMU_CB((s), (n)), (o))
534 #define arm_smmu_cb_writeq(s, n, o, v)	\
535 	arm_smmu_writeq((s), ARM_SMMU_CB((s), (n)), (o), (v))
536 
537 struct arm_smmu_device *arm_smmu_impl_init(struct arm_smmu_device *smmu);
538 struct arm_smmu_device *nvidia_smmu_impl_init(struct arm_smmu_device *smmu);
539 struct arm_smmu_device *qcom_smmu_impl_init(struct arm_smmu_device *smmu);
540 
541 void arm_smmu_write_context_bank(struct arm_smmu_device *smmu, int idx);
542 int arm_mmu500_reset(struct arm_smmu_device *smmu);
543 
544 struct arm_smmu_context_fault_info {
545 	unsigned long iova;
546 	u32 fsr;
547 	u32 fsynr;
548 	u32 cbfrsynra;
549 };
550 
551 void arm_smmu_read_context_fault_info(struct arm_smmu_device *smmu, int idx,
552 				      struct arm_smmu_context_fault_info *cfi);
553 
554 void arm_smmu_print_context_fault_info(struct arm_smmu_device *smmu, int idx,
555 				       const struct arm_smmu_context_fault_info *cfi);
556 
557 #endif /* _ARM_SMMU_H */
558