xref: /linux/drivers/gpu/drm/i915/display/intel_de.h (revision 3027ce13e04eee76539ca65c2cb1028a01c8c508)
1 /* SPDX-License-Identifier: MIT */
2 /*
3  * Copyright © 2019 Intel Corporation
4  */
5 
6 #ifndef __INTEL_DE_H__
7 #define __INTEL_DE_H__
8 
9 #include "i915_drv.h"
10 #include "i915_trace.h"
11 #include "intel_uncore.h"
12 
13 static inline u32
14 intel_de_read(struct drm_i915_private *i915, i915_reg_t reg)
15 {
16 	u32 val;
17 
18 	intel_dmc_wl_get(i915, reg);
19 
20 	val = intel_uncore_read(&i915->uncore, reg);
21 
22 	intel_dmc_wl_put(i915, reg);
23 
24 	return val;
25 }
26 
27 static inline u8
28 intel_de_read8(struct drm_i915_private *i915, i915_reg_t reg)
29 {
30 	u8 val;
31 
32 	intel_dmc_wl_get(i915, reg);
33 
34 	val = intel_uncore_read8(&i915->uncore, reg);
35 
36 	intel_dmc_wl_put(i915, reg);
37 
38 	return val;
39 }
40 
41 static inline u64
42 intel_de_read64_2x32(struct drm_i915_private *i915,
43 		     i915_reg_t lower_reg, i915_reg_t upper_reg)
44 {
45 	u64 val;
46 
47 	intel_dmc_wl_get(i915, lower_reg);
48 	intel_dmc_wl_get(i915, upper_reg);
49 
50 	val = intel_uncore_read64_2x32(&i915->uncore, lower_reg, upper_reg);
51 
52 	intel_dmc_wl_put(i915, upper_reg);
53 	intel_dmc_wl_put(i915, lower_reg);
54 
55 	return val;
56 }
57 
58 static inline void
59 intel_de_posting_read(struct drm_i915_private *i915, i915_reg_t reg)
60 {
61 	intel_dmc_wl_get(i915, reg);
62 
63 	intel_uncore_posting_read(&i915->uncore, reg);
64 
65 	intel_dmc_wl_put(i915, reg);
66 }
67 
68 static inline void
69 intel_de_write(struct drm_i915_private *i915, i915_reg_t reg, u32 val)
70 {
71 	intel_dmc_wl_get(i915, reg);
72 
73 	intel_uncore_write(&i915->uncore, reg, val);
74 
75 	intel_dmc_wl_put(i915, reg);
76 }
77 
78 static inline u32
79 __intel_de_rmw_nowl(struct drm_i915_private *i915, i915_reg_t reg,
80 		    u32 clear, u32 set)
81 {
82 	return intel_uncore_rmw(&i915->uncore, reg, clear, set);
83 }
84 
85 static inline u32
86 intel_de_rmw(struct drm_i915_private *i915, i915_reg_t reg, u32 clear, u32 set)
87 {
88 	u32 val;
89 
90 	intel_dmc_wl_get(i915, reg);
91 
92 	val = __intel_de_rmw_nowl(i915, reg, clear, set);
93 
94 	intel_dmc_wl_put(i915, reg);
95 
96 	return val;
97 }
98 
99 static inline int
100 __intel_wait_for_register_nowl(struct drm_i915_private *i915, i915_reg_t reg,
101 			       u32 mask, u32 value, unsigned int timeout)
102 {
103 	return intel_wait_for_register(&i915->uncore, reg, mask,
104 				       value, timeout);
105 }
106 
107 static inline int
108 intel_de_wait(struct drm_i915_private *i915, i915_reg_t reg,
109 	      u32 mask, u32 value, unsigned int timeout)
110 {
111 	int ret;
112 
113 	intel_dmc_wl_get(i915, reg);
114 
115 	ret = __intel_wait_for_register_nowl(i915, reg, mask, value, timeout);
116 
117 	intel_dmc_wl_put(i915, reg);
118 
119 	return ret;
120 }
121 
122 static inline int
123 intel_de_wait_fw(struct drm_i915_private *i915, i915_reg_t reg,
124 		 u32 mask, u32 value, unsigned int timeout)
125 {
126 	int ret;
127 
128 	intel_dmc_wl_get(i915, reg);
129 
130 	ret = intel_wait_for_register_fw(&i915->uncore, reg, mask, value, timeout);
131 
132 	intel_dmc_wl_put(i915, reg);
133 
134 	return ret;
135 }
136 
137 static inline int
138 intel_de_wait_custom(struct drm_i915_private *i915, i915_reg_t reg,
139 		     u32 mask, u32 value,
140 		     unsigned int fast_timeout_us,
141 		     unsigned int slow_timeout_ms, u32 *out_value)
142 {
143 	int ret;
144 
145 	intel_dmc_wl_get(i915, reg);
146 
147 	ret = __intel_wait_for_register(&i915->uncore, reg, mask, value,
148 					fast_timeout_us, slow_timeout_ms, out_value);
149 
150 	intel_dmc_wl_put(i915, reg);
151 
152 	return ret;
153 }
154 
155 static inline int
156 intel_de_wait_for_set(struct drm_i915_private *i915, i915_reg_t reg,
157 		      u32 mask, unsigned int timeout)
158 {
159 	return intel_de_wait(i915, reg, mask, mask, timeout);
160 }
161 
162 static inline int
163 intel_de_wait_for_clear(struct drm_i915_private *i915, i915_reg_t reg,
164 			u32 mask, unsigned int timeout)
165 {
166 	return intel_de_wait(i915, reg, mask, 0, timeout);
167 }
168 
169 /*
170  * Unlocked mmio-accessors, think carefully before using these.
171  *
172  * Certain architectures will die if the same cacheline is concurrently accessed
173  * by different clients (e.g. on Ivybridge). Access to registers should
174  * therefore generally be serialised, by either the dev_priv->uncore.lock or
175  * a more localised lock guarding all access to that bank of registers.
176  */
177 static inline u32
178 intel_de_read_fw(struct drm_i915_private *i915, i915_reg_t reg)
179 {
180 	u32 val;
181 
182 	val = intel_uncore_read_fw(&i915->uncore, reg);
183 	trace_i915_reg_rw(false, reg, val, sizeof(val), true);
184 
185 	return val;
186 }
187 
188 static inline void
189 intel_de_write_fw(struct drm_i915_private *i915, i915_reg_t reg, u32 val)
190 {
191 	trace_i915_reg_rw(true, reg, val, sizeof(val), true);
192 	intel_uncore_write_fw(&i915->uncore, reg, val);
193 }
194 
195 static inline u32
196 intel_de_read_notrace(struct drm_i915_private *i915, i915_reg_t reg)
197 {
198 	return intel_uncore_read_notrace(&i915->uncore, reg);
199 }
200 
201 static inline void
202 intel_de_write_notrace(struct drm_i915_private *i915, i915_reg_t reg, u32 val)
203 {
204 	intel_uncore_write_notrace(&i915->uncore, reg, val);
205 }
206 
207 #endif /* __INTEL_DE_H__ */
208