1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32#include <drm/drmP.h>
33#include <drm/drm_atomic.h>
34#include <drm/drm_atomic_helper.h>
35#include <drm/drm_plane_helper.h>
36#include "intel_drv.h"
37
38
39
40
41
42
43
44
45
46
47
48
49int
50intel_connector_atomic_get_property(struct drm_connector *connector,
51 const struct drm_connector_state *state,
52 struct drm_property *property,
53 uint64_t *val)
54{
55 int i;
56
57
58
59
60
61
62
63
64
65
66 for (i = 0; i < connector->base.properties->count; i++) {
67 if (connector->base.properties->properties[i] == property) {
68 *val = connector->base.properties->values[i];
69 return 0;
70 }
71 }
72
73 return -EINVAL;
74}
75
76
77
78
79
80
81
82
83
84
85struct drm_crtc_state *
86intel_crtc_duplicate_state(struct drm_crtc *crtc)
87{
88 struct intel_crtc_state *crtc_state;
89
90 crtc_state = kmemdup(crtc->state, sizeof(*crtc_state), GFP_KERNEL);
91 if (!crtc_state)
92 return NULL;
93
94 __drm_atomic_helper_crtc_duplicate_state(crtc, &crtc_state->base);
95
96 crtc_state->update_pipe = false;
97 crtc_state->disable_lp_wm = false;
98 crtc_state->disable_cxsr = false;
99 crtc_state->wm_changed = false;
100 crtc_state->fb_changed = false;
101
102 return &crtc_state->base;
103}
104
105
106
107
108
109
110
111
112void
113intel_crtc_destroy_state(struct drm_crtc *crtc,
114 struct drm_crtc_state *state)
115{
116 drm_atomic_helper_crtc_destroy_state(crtc, state);
117}
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136int intel_atomic_setup_scalers(struct drm_device *dev,
137 struct intel_crtc *intel_crtc,
138 struct intel_crtc_state *crtc_state)
139{
140 struct drm_plane *plane = NULL;
141 struct intel_plane *intel_plane;
142 struct intel_plane_state *plane_state = NULL;
143 struct intel_crtc_scaler_state *scaler_state =
144 &crtc_state->scaler_state;
145 struct drm_atomic_state *drm_state = crtc_state->base.state;
146 int num_scalers_need;
147 int i, j;
148
149 num_scalers_need = hweight32(scaler_state->scaler_users);
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164 if (num_scalers_need > intel_crtc->num_scalers){
165 DRM_DEBUG_KMS("Too many scaling requests %d > %d\n",
166 num_scalers_need, intel_crtc->num_scalers);
167 return -EINVAL;
168 }
169
170
171 for (i = 0; i < sizeof(scaler_state->scaler_users) * 8; i++) {
172 int *scaler_id;
173 const char *name;
174 int idx;
175
176
177 if (!(scaler_state->scaler_users & (1 << i)))
178 continue;
179
180 if (i == SKL_CRTC_INDEX) {
181 name = "CRTC";
182 idx = intel_crtc->base.base.id;
183
184
185 scaler_id = &scaler_state->scaler_id;
186 } else {
187 name = "PLANE";
188
189
190
191 plane = drm_state->planes[i];
192
193
194
195
196
197 if (!plane) {
198 struct drm_plane_state *state;
199 plane = drm_plane_from_index(dev, i);
200 state = drm_atomic_get_plane_state(drm_state, plane);
201 if (IS_ERR(state)) {
202 DRM_DEBUG_KMS("Failed to add [PLANE:%d] to drm_state\n",
203 plane->base.id);
204 return PTR_ERR(state);
205 }
206
207
208
209
210
211
212 crtc_state->base.planes_changed = true;
213 }
214
215 intel_plane = to_intel_plane(plane);
216 idx = plane->base.id;
217
218
219 if (WARN_ON(intel_plane->pipe != intel_crtc->pipe)) {
220 continue;
221 }
222
223 plane_state = to_intel_plane_state(drm_state->plane_states[i]);
224 scaler_id = &plane_state->scaler_id;
225 }
226
227 if (*scaler_id < 0) {
228
229 for (j = 0; j < intel_crtc->num_scalers; j++) {
230 if (!scaler_state->scalers[j].in_use) {
231 scaler_state->scalers[j].in_use = 1;
232 *scaler_id = j;
233 DRM_DEBUG_KMS("Attached scaler id %u.%u to %s:%d\n",
234 intel_crtc->pipe, *scaler_id, name, idx);
235 break;
236 }
237 }
238 }
239
240 if (WARN_ON(*scaler_id < 0)) {
241 DRM_DEBUG_KMS("Cannot find scaler for %s:%d\n", name, idx);
242 continue;
243 }
244
245
246 if (num_scalers_need == 1 && intel_crtc->pipe != PIPE_C) {
247
248
249
250
251
252 *scaler_id = 0;
253 scaler_state->scalers[0].in_use = 1;
254 scaler_state->scalers[0].mode = PS_SCALER_MODE_HQ;
255 scaler_state->scalers[1].in_use = 0;
256 } else {
257 scaler_state->scalers[*scaler_id].mode = PS_SCALER_MODE_DYN;
258 }
259 }
260
261 return 0;
262}
263
264static void
265intel_atomic_duplicate_dpll_state(struct drm_i915_private *dev_priv,
266 struct intel_shared_dpll_config *shared_dpll)
267{
268 enum intel_dpll_id i;
269
270
271 for (i = 0; i < dev_priv->num_shared_dpll; i++) {
272 struct intel_shared_dpll *pll = &dev_priv->shared_dplls[i];
273
274 shared_dpll[i] = pll->config;
275 }
276}
277
278struct intel_shared_dpll_config *
279intel_atomic_get_shared_dpll_state(struct drm_atomic_state *s)
280{
281 struct intel_atomic_state *state = to_intel_atomic_state(s);
282
283 WARN_ON(!drm_modeset_is_locked(&s->dev->mode_config.connection_mutex));
284
285 if (!state->dpll_set) {
286 state->dpll_set = true;
287
288 intel_atomic_duplicate_dpll_state(to_i915(s->dev),
289 state->shared_dpll);
290 }
291
292 return state->shared_dpll;
293}
294
295struct drm_atomic_state *
296intel_atomic_state_alloc(struct drm_device *dev)
297{
298 struct intel_atomic_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
299
300 if (!state || drm_atomic_state_init(dev, &state->base) < 0) {
301 kfree(state);
302 return NULL;
303 }
304
305 return &state->base;
306}
307
308void intel_atomic_state_clear(struct drm_atomic_state *s)
309{
310 struct intel_atomic_state *state = to_intel_atomic_state(s);
311 drm_atomic_state_default_clear(&state->base);
312 state->dpll_set = state->modeset = false;
313}
314