xref: /linux/drivers/gpu/drm/i915/display/intel_de.h (revision a4871e6201c46c8e1d04308265b4b4c5753c8209)
1 /* SPDX-License-Identifier: MIT */
2 /*
3  * Copyright © 2019 Intel Corporation
4  */
5 
6 #ifndef __INTEL_DE_H__
7 #define __INTEL_DE_H__
8 
9 #include "intel_display_conversion.h"
10 #include "intel_display_core.h"
11 #include "intel_dmc_wl.h"
12 #include "intel_dsb.h"
13 #include "intel_uncore.h"
14 #include "intel_uncore_trace.h"
15 
16 static inline struct intel_uncore *__to_uncore(struct intel_display *display)
17 {
18 	return to_intel_uncore(display->drm);
19 }
20 
21 static inline u32
22 __intel_de_read(struct intel_display *display, i915_reg_t reg)
23 {
24 	u32 val;
25 
26 	intel_dmc_wl_get(display, reg);
27 
28 	val = intel_uncore_read(__to_uncore(display), reg);
29 
30 	intel_dmc_wl_put(display, reg);
31 
32 	return val;
33 }
34 #define intel_de_read(p,...) __intel_de_read(__to_intel_display(p), __VA_ARGS__)
35 
36 static inline u8
37 intel_de_read8(struct intel_display *display, i915_reg_t reg)
38 {
39 	u8 val;
40 
41 	intel_dmc_wl_get(display, reg);
42 
43 	val = intel_uncore_read8(__to_uncore(display), reg);
44 
45 	intel_dmc_wl_put(display, reg);
46 
47 	return val;
48 }
49 
50 static inline u64
51 intel_de_read64_2x32(struct intel_display *display,
52 		     i915_reg_t lower_reg, i915_reg_t upper_reg)
53 {
54 	u64 val;
55 
56 	intel_dmc_wl_get(display, lower_reg);
57 	intel_dmc_wl_get(display, upper_reg);
58 
59 	val = intel_uncore_read64_2x32(__to_uncore(display), lower_reg,
60 				       upper_reg);
61 
62 	intel_dmc_wl_put(display, upper_reg);
63 	intel_dmc_wl_put(display, lower_reg);
64 
65 	return val;
66 }
67 
68 static inline void
69 __intel_de_posting_read(struct intel_display *display, i915_reg_t reg)
70 {
71 	intel_dmc_wl_get(display, reg);
72 
73 	intel_uncore_posting_read(__to_uncore(display), reg);
74 
75 	intel_dmc_wl_put(display, reg);
76 }
77 #define intel_de_posting_read(p,...) __intel_de_posting_read(__to_intel_display(p), __VA_ARGS__)
78 
79 static inline void
80 __intel_de_write(struct intel_display *display, i915_reg_t reg, u32 val)
81 {
82 	intel_dmc_wl_get(display, reg);
83 
84 	intel_uncore_write(__to_uncore(display), reg, val);
85 
86 	intel_dmc_wl_put(display, reg);
87 }
88 #define intel_de_write(p,...) __intel_de_write(__to_intel_display(p), __VA_ARGS__)
89 
90 static inline u32
91 __intel_de_rmw_nowl(struct intel_display *display, i915_reg_t reg,
92 		    u32 clear, u32 set)
93 {
94 	return intel_uncore_rmw(__to_uncore(display), reg, clear, set);
95 }
96 
97 static inline u32
98 __intel_de_rmw(struct intel_display *display, i915_reg_t reg, u32 clear,
99 	       u32 set)
100 {
101 	u32 val;
102 
103 	intel_dmc_wl_get(display, reg);
104 
105 	val = __intel_de_rmw_nowl(display, reg, clear, set);
106 
107 	intel_dmc_wl_put(display, reg);
108 
109 	return val;
110 }
111 #define intel_de_rmw(p,...) __intel_de_rmw(__to_intel_display(p), __VA_ARGS__)
112 
113 static inline int
114 __intel_de_wait_for_register_nowl(struct intel_display *display,
115 				  i915_reg_t reg,
116 				  u32 mask, u32 value, unsigned int timeout)
117 {
118 	return intel_wait_for_register(__to_uncore(display), reg, mask,
119 				       value, timeout);
120 }
121 
122 static inline int
123 __intel_de_wait_for_register_atomic_nowl(struct intel_display *display,
124 					 i915_reg_t reg,
125 					 u32 mask, u32 value,
126 					 unsigned int fast_timeout_us)
127 {
128 	return __intel_wait_for_register(__to_uncore(display), reg, mask,
129 					 value, fast_timeout_us, 0, NULL);
130 }
131 
132 static inline int
133 intel_de_wait(struct intel_display *display, i915_reg_t reg,
134 	      u32 mask, u32 value, unsigned int timeout)
135 {
136 	int ret;
137 
138 	intel_dmc_wl_get(display, reg);
139 
140 	ret = __intel_de_wait_for_register_nowl(display, reg, mask, value,
141 						timeout);
142 
143 	intel_dmc_wl_put(display, reg);
144 
145 	return ret;
146 }
147 
148 static inline int
149 intel_de_wait_fw(struct intel_display *display, i915_reg_t reg,
150 		 u32 mask, u32 value, unsigned int timeout)
151 {
152 	int ret;
153 
154 	intel_dmc_wl_get(display, reg);
155 
156 	ret = intel_wait_for_register_fw(__to_uncore(display), reg, mask,
157 					 value, timeout);
158 
159 	intel_dmc_wl_put(display, reg);
160 
161 	return ret;
162 }
163 
164 static inline int
165 intel_de_wait_custom(struct intel_display *display, i915_reg_t reg,
166 		     u32 mask, u32 value,
167 		     unsigned int fast_timeout_us,
168 		     unsigned int slow_timeout_ms, u32 *out_value)
169 {
170 	int ret;
171 
172 	intel_dmc_wl_get(display, reg);
173 
174 	ret = __intel_wait_for_register(__to_uncore(display), reg, mask,
175 					value,
176 					fast_timeout_us, slow_timeout_ms, out_value);
177 
178 	intel_dmc_wl_put(display, reg);
179 
180 	return ret;
181 }
182 
183 static inline int
184 intel_de_wait_for_set(struct intel_display *display, i915_reg_t reg,
185 		      u32 mask, unsigned int timeout)
186 {
187 	return intel_de_wait(display, reg, mask, mask, timeout);
188 }
189 
190 static inline int
191 intel_de_wait_for_clear(struct intel_display *display, i915_reg_t reg,
192 			u32 mask, unsigned int timeout)
193 {
194 	return intel_de_wait(display, reg, mask, 0, timeout);
195 }
196 
197 /*
198  * Unlocked mmio-accessors, think carefully before using these.
199  *
200  * Certain architectures will die if the same cacheline is concurrently accessed
201  * by different clients (e.g. on Ivybridge). Access to registers should
202  * therefore generally be serialised, by either the dev_priv->uncore.lock or
203  * a more localised lock guarding all access to that bank of registers.
204  */
205 static inline u32
206 intel_de_read_fw(struct intel_display *display, i915_reg_t reg)
207 {
208 	u32 val;
209 
210 	val = intel_uncore_read_fw(__to_uncore(display), reg);
211 	trace_i915_reg_rw(false, reg, val, sizeof(val), true);
212 
213 	return val;
214 }
215 
216 static inline void
217 intel_de_write_fw(struct intel_display *display, i915_reg_t reg, u32 val)
218 {
219 	trace_i915_reg_rw(true, reg, val, sizeof(val), true);
220 	intel_uncore_write_fw(__to_uncore(display), reg, val);
221 }
222 
223 static inline u32
224 intel_de_read_notrace(struct intel_display *display, i915_reg_t reg)
225 {
226 	return intel_uncore_read_notrace(__to_uncore(display), reg);
227 }
228 
229 static inline void
230 intel_de_write_notrace(struct intel_display *display, i915_reg_t reg, u32 val)
231 {
232 	intel_uncore_write_notrace(__to_uncore(display), reg, val);
233 }
234 
235 static __always_inline void
236 intel_de_write_dsb(struct intel_display *display, struct intel_dsb *dsb,
237 		   i915_reg_t reg, u32 val)
238 {
239 	if (dsb)
240 		intel_dsb_reg_write(dsb, reg, val);
241 	else
242 		intel_de_write_fw(display, reg, val);
243 }
244 
245 #endif /* __INTEL_DE_H__ */
246