1 /* SPDX-License-Identifier: MIT */
2 /*
3 * Copyright © 2019 Intel Corporation
4 */
5
6 #ifndef __INTEL_DE_H__
7 #define __INTEL_DE_H__
8
9 #include "intel_display_core.h"
10 #include "intel_dmc_wl.h"
11 #include "intel_dsb.h"
12 #include "intel_uncore.h"
13 #include "intel_uncore_trace.h"
14
__to_uncore(struct intel_display * display)15 static inline struct intel_uncore *__to_uncore(struct intel_display *display)
16 {
17 return to_intel_uncore(display->drm);
18 }
19
20 static inline u32
intel_de_read(struct intel_display * display,i915_reg_t reg)21 intel_de_read(struct intel_display *display, i915_reg_t reg)
22 {
23 u32 val;
24
25 intel_dmc_wl_get(display, reg);
26
27 val = intel_uncore_read(__to_uncore(display), reg);
28
29 intel_dmc_wl_put(display, reg);
30
31 return val;
32 }
33
34 static inline u8
intel_de_read8(struct intel_display * display,i915_reg_t reg)35 intel_de_read8(struct intel_display *display, i915_reg_t reg)
36 {
37 u8 val;
38
39 intel_dmc_wl_get(display, reg);
40
41 val = intel_uncore_read8(__to_uncore(display), reg);
42
43 intel_dmc_wl_put(display, reg);
44
45 return val;
46 }
47
48 static inline u64
intel_de_read64_2x32(struct intel_display * display,i915_reg_t lower_reg,i915_reg_t upper_reg)49 intel_de_read64_2x32(struct intel_display *display,
50 i915_reg_t lower_reg, i915_reg_t upper_reg)
51 {
52 u64 val;
53
54 intel_dmc_wl_get(display, lower_reg);
55 intel_dmc_wl_get(display, upper_reg);
56
57 val = intel_uncore_read64_2x32(__to_uncore(display), lower_reg,
58 upper_reg);
59
60 intel_dmc_wl_put(display, upper_reg);
61 intel_dmc_wl_put(display, lower_reg);
62
63 return val;
64 }
65
66 static inline void
intel_de_posting_read(struct intel_display * display,i915_reg_t reg)67 intel_de_posting_read(struct intel_display *display, i915_reg_t reg)
68 {
69 intel_dmc_wl_get(display, reg);
70
71 intel_uncore_posting_read(__to_uncore(display), reg);
72
73 intel_dmc_wl_put(display, reg);
74 }
75
76 static inline void
intel_de_write(struct intel_display * display,i915_reg_t reg,u32 val)77 intel_de_write(struct intel_display *display, i915_reg_t reg, u32 val)
78 {
79 intel_dmc_wl_get(display, reg);
80
81 intel_uncore_write(__to_uncore(display), reg, val);
82
83 intel_dmc_wl_put(display, reg);
84 }
85
86 static inline u32
__intel_de_rmw_nowl(struct intel_display * display,i915_reg_t reg,u32 clear,u32 set)87 __intel_de_rmw_nowl(struct intel_display *display, i915_reg_t reg,
88 u32 clear, u32 set)
89 {
90 return intel_uncore_rmw(__to_uncore(display), reg, clear, set);
91 }
92
93 static inline u32
intel_de_rmw(struct intel_display * display,i915_reg_t reg,u32 clear,u32 set)94 intel_de_rmw(struct intel_display *display, i915_reg_t reg, u32 clear, u32 set)
95 {
96 u32 val;
97
98 intel_dmc_wl_get(display, reg);
99
100 val = __intel_de_rmw_nowl(display, reg, clear, set);
101
102 intel_dmc_wl_put(display, reg);
103
104 return val;
105 }
106
107 static inline int
__intel_de_wait_for_register_nowl(struct intel_display * display,i915_reg_t reg,u32 mask,u32 value,unsigned int timeout_ms)108 __intel_de_wait_for_register_nowl(struct intel_display *display,
109 i915_reg_t reg,
110 u32 mask, u32 value, unsigned int timeout_ms)
111 {
112 return intel_wait_for_register(__to_uncore(display), reg, mask,
113 value, timeout_ms);
114 }
115
116 static inline int
__intel_de_wait_for_register_atomic_nowl(struct intel_display * display,i915_reg_t reg,u32 mask,u32 value,unsigned int fast_timeout_us)117 __intel_de_wait_for_register_atomic_nowl(struct intel_display *display,
118 i915_reg_t reg,
119 u32 mask, u32 value,
120 unsigned int fast_timeout_us)
121 {
122 return __intel_wait_for_register(__to_uncore(display), reg, mask,
123 value, fast_timeout_us, 0, NULL);
124 }
125
126 static inline int
intel_de_wait(struct intel_display * display,i915_reg_t reg,u32 mask,u32 value,unsigned int timeout_ms)127 intel_de_wait(struct intel_display *display, i915_reg_t reg,
128 u32 mask, u32 value, unsigned int timeout_ms)
129 {
130 int ret;
131
132 intel_dmc_wl_get(display, reg);
133
134 ret = __intel_de_wait_for_register_nowl(display, reg, mask, value,
135 timeout_ms);
136
137 intel_dmc_wl_put(display, reg);
138
139 return ret;
140 }
141
142 static inline int
intel_de_wait_fw(struct intel_display * display,i915_reg_t reg,u32 mask,u32 value,unsigned int timeout_ms,u32 * out_value)143 intel_de_wait_fw(struct intel_display *display, i915_reg_t reg,
144 u32 mask, u32 value, unsigned int timeout_ms, u32 *out_value)
145 {
146 int ret;
147
148 intel_dmc_wl_get(display, reg);
149
150 ret = intel_wait_for_register_fw(__to_uncore(display), reg, mask,
151 value, timeout_ms, out_value);
152
153 intel_dmc_wl_put(display, reg);
154
155 return ret;
156 }
157
158 static inline int
intel_de_wait_custom(struct intel_display * display,i915_reg_t reg,u32 mask,u32 value,unsigned int fast_timeout_us,unsigned int slow_timeout_ms,u32 * out_value)159 intel_de_wait_custom(struct intel_display *display, i915_reg_t reg,
160 u32 mask, u32 value,
161 unsigned int fast_timeout_us,
162 unsigned int slow_timeout_ms, u32 *out_value)
163 {
164 int ret;
165
166 intel_dmc_wl_get(display, reg);
167
168 ret = __intel_wait_for_register(__to_uncore(display), reg, mask,
169 value,
170 fast_timeout_us, slow_timeout_ms, out_value);
171
172 intel_dmc_wl_put(display, reg);
173
174 return ret;
175 }
176
177 static inline int
intel_de_wait_for_set(struct intel_display * display,i915_reg_t reg,u32 mask,unsigned int timeout_ms)178 intel_de_wait_for_set(struct intel_display *display, i915_reg_t reg,
179 u32 mask, unsigned int timeout_ms)
180 {
181 return intel_de_wait(display, reg, mask, mask, timeout_ms);
182 }
183
184 static inline int
intel_de_wait_for_clear(struct intel_display * display,i915_reg_t reg,u32 mask,unsigned int timeout_ms)185 intel_de_wait_for_clear(struct intel_display *display, i915_reg_t reg,
186 u32 mask, unsigned int timeout_ms)
187 {
188 return intel_de_wait(display, reg, mask, 0, timeout_ms);
189 }
190
191 /*
192 * Unlocked mmio-accessors, think carefully before using these.
193 *
194 * Certain architectures will die if the same cacheline is concurrently accessed
195 * by different clients (e.g. on Ivybridge). Access to registers should
196 * therefore generally be serialised, by either the dev_priv->uncore.lock or
197 * a more localised lock guarding all access to that bank of registers.
198 */
199 static inline u32
intel_de_read_fw(struct intel_display * display,i915_reg_t reg)200 intel_de_read_fw(struct intel_display *display, i915_reg_t reg)
201 {
202 u32 val;
203
204 val = intel_uncore_read_fw(__to_uncore(display), reg);
205 trace_i915_reg_rw(false, reg, val, sizeof(val), true);
206
207 return val;
208 }
209
210 static inline void
intel_de_write_fw(struct intel_display * display,i915_reg_t reg,u32 val)211 intel_de_write_fw(struct intel_display *display, i915_reg_t reg, u32 val)
212 {
213 trace_i915_reg_rw(true, reg, val, sizeof(val), true);
214 intel_uncore_write_fw(__to_uncore(display), reg, val);
215 }
216
217 static inline u32
intel_de_read_notrace(struct intel_display * display,i915_reg_t reg)218 intel_de_read_notrace(struct intel_display *display, i915_reg_t reg)
219 {
220 return intel_uncore_read_notrace(__to_uncore(display), reg);
221 }
222
223 static inline void
intel_de_write_notrace(struct intel_display * display,i915_reg_t reg,u32 val)224 intel_de_write_notrace(struct intel_display *display, i915_reg_t reg, u32 val)
225 {
226 intel_uncore_write_notrace(__to_uncore(display), reg, val);
227 }
228
229 static __always_inline void
intel_de_write_dsb(struct intel_display * display,struct intel_dsb * dsb,i915_reg_t reg,u32 val)230 intel_de_write_dsb(struct intel_display *display, struct intel_dsb *dsb,
231 i915_reg_t reg, u32 val)
232 {
233 if (dsb)
234 intel_dsb_reg_write(dsb, reg, val);
235 else
236 intel_de_write_fw(display, reg, val);
237 }
238
239 #endif /* __INTEL_DE_H__ */
240