1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include <linux/firmware.h>
25#include "i915_drv.h"
26#include "i915_reg.h"
27
28
29
30
31
32
33
34
35
36
37#define GEN12_CSR_MAX_FW_SIZE ICL_CSR_MAX_FW_SIZE
38
39#define ICL_CSR_PATH "i915/icl_dmc_ver1_07.bin"
40#define ICL_CSR_VERSION_REQUIRED CSR_VERSION(1, 7)
41#define ICL_CSR_MAX_FW_SIZE 0x6000
42MODULE_FIRMWARE(ICL_CSR_PATH);
43
44#define CNL_CSR_PATH "i915/cnl_dmc_ver1_07.bin"
45#define CNL_CSR_VERSION_REQUIRED CSR_VERSION(1, 7)
46#define CNL_CSR_MAX_FW_SIZE GLK_CSR_MAX_FW_SIZE
47MODULE_FIRMWARE(CNL_CSR_PATH);
48
49#define GLK_CSR_PATH "i915/glk_dmc_ver1_04.bin"
50#define GLK_CSR_VERSION_REQUIRED CSR_VERSION(1, 4)
51#define GLK_CSR_MAX_FW_SIZE 0x4000
52MODULE_FIRMWARE(GLK_CSR_PATH);
53
54#define KBL_CSR_PATH "i915/kbl_dmc_ver1_04.bin"
55#define KBL_CSR_VERSION_REQUIRED CSR_VERSION(1, 4)
56#define KBL_CSR_MAX_FW_SIZE BXT_CSR_MAX_FW_SIZE
57MODULE_FIRMWARE(KBL_CSR_PATH);
58
59#define SKL_CSR_PATH "i915/skl_dmc_ver1_27.bin"
60#define SKL_CSR_VERSION_REQUIRED CSR_VERSION(1, 27)
61#define SKL_CSR_MAX_FW_SIZE BXT_CSR_MAX_FW_SIZE
62MODULE_FIRMWARE(SKL_CSR_PATH);
63
64#define BXT_CSR_PATH "i915/bxt_dmc_ver1_07.bin"
65#define BXT_CSR_VERSION_REQUIRED CSR_VERSION(1, 7)
66#define BXT_CSR_MAX_FW_SIZE 0x3000
67MODULE_FIRMWARE(BXT_CSR_PATH);
68
69#define CSR_DEFAULT_FW_OFFSET 0xFFFFFFFF
70
71struct intel_css_header {
72
73 u32 module_type;
74
75
76 u32 header_len;
77
78
79 u32 header_ver;
80
81
82 u32 module_id;
83
84
85 u32 module_vendor;
86
87
88 u32 date;
89
90
91 u32 size;
92
93
94 u32 key_size;
95
96
97 u32 modulus_size;
98
99
100 u32 exponent_size;
101
102
103 u32 reserved1[12];
104
105
106 u32 version;
107
108
109 u32 reserved2[8];
110
111
112 u32 kernel_header_info;
113} __packed;
114
115struct intel_fw_info {
116 u16 reserved1;
117
118
119 char stepping;
120
121
122 char substepping;
123
124 u32 offset;
125 u32 reserved2;
126} __packed;
127
128struct intel_package_header {
129
130 unsigned char header_len;
131
132
133 unsigned char header_ver;
134
135 unsigned char reserved[10];
136
137
138 u32 num_entries;
139
140 struct intel_fw_info fw_info[20];
141} __packed;
142
143struct intel_dmc_header {
144
145 u32 signature;
146
147
148 unsigned char header_len;
149
150
151 unsigned char header_ver;
152
153
154 u16 dmcc_ver;
155
156
157 u32 project;
158
159
160 u32 fw_size;
161
162
163 u32 fw_version;
164
165
166 u32 mmio_count;
167
168
169 u32 mmioaddr[8];
170
171
172 u32 mmiodata[8];
173
174
175 unsigned char dfile[32];
176
177 u32 reserved1[2];
178} __packed;
179
180struct stepping_info {
181 char stepping;
182 char substepping;
183};
184
185static const struct stepping_info skl_stepping_info[] = {
186 {'A', '0'}, {'B', '0'}, {'C', '0'},
187 {'D', '0'}, {'E', '0'}, {'F', '0'},
188 {'G', '0'}, {'H', '0'}, {'I', '0'},
189 {'J', '0'}, {'K', '0'}
190};
191
192static const struct stepping_info bxt_stepping_info[] = {
193 {'A', '0'}, {'A', '1'}, {'A', '2'},
194 {'B', '0'}, {'B', '1'}, {'B', '2'}
195};
196
197static const struct stepping_info icl_stepping_info[] = {
198 {'A', '0'}, {'A', '1'}, {'A', '2'},
199 {'B', '0'}, {'B', '2'},
200 {'C', '0'}
201};
202
203static const struct stepping_info no_stepping_info = { '*', '*' };
204
205static const struct stepping_info *
206intel_get_stepping_info(struct drm_i915_private *dev_priv)
207{
208 const struct stepping_info *si;
209 unsigned int size;
210
211 if (IS_ICELAKE(dev_priv)) {
212 size = ARRAY_SIZE(icl_stepping_info);
213 si = icl_stepping_info;
214 } else if (IS_SKYLAKE(dev_priv)) {
215 size = ARRAY_SIZE(skl_stepping_info);
216 si = skl_stepping_info;
217 } else if (IS_BROXTON(dev_priv)) {
218 size = ARRAY_SIZE(bxt_stepping_info);
219 si = bxt_stepping_info;
220 } else {
221 size = 0;
222 si = NULL;
223 }
224
225 if (INTEL_REVID(dev_priv) < size)
226 return si + INTEL_REVID(dev_priv);
227
228 return &no_stepping_info;
229}
230
231static void gen9_set_dc_state_debugmask(struct drm_i915_private *dev_priv)
232{
233 u32 val, mask;
234
235 mask = DC_STATE_DEBUG_MASK_MEMORY_UP;
236
237 if (IS_GEN9_LP(dev_priv))
238 mask |= DC_STATE_DEBUG_MASK_CORES;
239
240
241 val = I915_READ(DC_STATE_DEBUG);
242 if ((val & mask) != mask) {
243 val |= mask;
244 I915_WRITE(DC_STATE_DEBUG, val);
245 POSTING_READ(DC_STATE_DEBUG);
246 }
247}
248
249
250
251
252
253
254
255
256
257void intel_csr_load_program(struct drm_i915_private *dev_priv)
258{
259 u32 *payload = dev_priv->csr.dmc_payload;
260 u32 i, fw_size;
261
262 if (!HAS_CSR(dev_priv)) {
263 DRM_ERROR("No CSR support available for this platform\n");
264 return;
265 }
266
267 if (!dev_priv->csr.dmc_payload) {
268 DRM_ERROR("Tried to program CSR with empty payload\n");
269 return;
270 }
271
272 fw_size = dev_priv->csr.dmc_fw_size;
273 assert_rpm_wakelock_held(dev_priv);
274
275 preempt_disable();
276
277 for (i = 0; i < fw_size; i++)
278 I915_WRITE_FW(CSR_PROGRAM(i), payload[i]);
279
280 preempt_enable();
281
282 for (i = 0; i < dev_priv->csr.mmio_count; i++) {
283 I915_WRITE(dev_priv->csr.mmioaddr[i],
284 dev_priv->csr.mmiodata[i]);
285 }
286
287 dev_priv->csr.dc_state = 0;
288
289 gen9_set_dc_state_debugmask(dev_priv);
290}
291
292static u32 *parse_csr_fw(struct drm_i915_private *dev_priv,
293 const struct firmware *fw)
294{
295 struct intel_css_header *css_header;
296 struct intel_package_header *package_header;
297 struct intel_dmc_header *dmc_header;
298 struct intel_csr *csr = &dev_priv->csr;
299 const struct stepping_info *si = intel_get_stepping_info(dev_priv);
300 u32 dmc_offset = CSR_DEFAULT_FW_OFFSET, readcount = 0, nbytes;
301 u32 i;
302 u32 *dmc_payload;
303
304 if (!fw)
305 return NULL;
306
307
308 css_header = (struct intel_css_header *)fw->data;
309 if (sizeof(struct intel_css_header) !=
310 (css_header->header_len * 4)) {
311 DRM_ERROR("DMC firmware has wrong CSS header length "
312 "(%u bytes)\n",
313 (css_header->header_len * 4));
314 return NULL;
315 }
316
317 if (csr->required_version &&
318 css_header->version != csr->required_version) {
319 DRM_INFO("Refusing to load DMC firmware v%u.%u,"
320 " please use v%u.%u\n",
321 CSR_VERSION_MAJOR(css_header->version),
322 CSR_VERSION_MINOR(css_header->version),
323 CSR_VERSION_MAJOR(csr->required_version),
324 CSR_VERSION_MINOR(csr->required_version));
325 return NULL;
326 }
327
328 csr->version = css_header->version;
329
330 readcount += sizeof(struct intel_css_header);
331
332
333 package_header = (struct intel_package_header *)
334 &fw->data[readcount];
335 if (sizeof(struct intel_package_header) !=
336 (package_header->header_len * 4)) {
337 DRM_ERROR("DMC firmware has wrong package header length "
338 "(%u bytes)\n",
339 (package_header->header_len * 4));
340 return NULL;
341 }
342 readcount += sizeof(struct intel_package_header);
343
344
345 for (i = 0; i < package_header->num_entries; i++) {
346 if (package_header->fw_info[i].substepping == '*' &&
347 si->stepping == package_header->fw_info[i].stepping) {
348 dmc_offset = package_header->fw_info[i].offset;
349 break;
350 } else if (si->stepping == package_header->fw_info[i].stepping &&
351 si->substepping == package_header->fw_info[i].substepping) {
352 dmc_offset = package_header->fw_info[i].offset;
353 break;
354 } else if (package_header->fw_info[i].stepping == '*' &&
355 package_header->fw_info[i].substepping == '*')
356 dmc_offset = package_header->fw_info[i].offset;
357 }
358 if (dmc_offset == CSR_DEFAULT_FW_OFFSET) {
359 DRM_ERROR("DMC firmware not supported for %c stepping\n",
360 si->stepping);
361 return NULL;
362 }
363
364 dmc_offset *= 4;
365 readcount += dmc_offset;
366
367
368 dmc_header = (struct intel_dmc_header *)&fw->data[readcount];
369 if (sizeof(struct intel_dmc_header) != (dmc_header->header_len)) {
370 DRM_ERROR("DMC firmware has wrong dmc header length "
371 "(%u bytes)\n",
372 (dmc_header->header_len));
373 return NULL;
374 }
375 readcount += sizeof(struct intel_dmc_header);
376
377
378 if (dmc_header->mmio_count > ARRAY_SIZE(csr->mmioaddr)) {
379 DRM_ERROR("DMC firmware has wrong mmio count %u\n",
380 dmc_header->mmio_count);
381 return NULL;
382 }
383 csr->mmio_count = dmc_header->mmio_count;
384 for (i = 0; i < dmc_header->mmio_count; i++) {
385 if (dmc_header->mmioaddr[i] < CSR_MMIO_START_RANGE ||
386 dmc_header->mmioaddr[i] > CSR_MMIO_END_RANGE) {
387 DRM_ERROR("DMC firmware has wrong mmio address 0x%x\n",
388 dmc_header->mmioaddr[i]);
389 return NULL;
390 }
391 csr->mmioaddr[i] = _MMIO(dmc_header->mmioaddr[i]);
392 csr->mmiodata[i] = dmc_header->mmiodata[i];
393 }
394
395
396 nbytes = dmc_header->fw_size * 4;
397 if (nbytes > csr->max_fw_size) {
398 DRM_ERROR("DMC FW too big (%u bytes)\n", nbytes);
399 return NULL;
400 }
401 csr->dmc_fw_size = dmc_header->fw_size;
402
403 dmc_payload = kmalloc(nbytes, GFP_KERNEL);
404 if (!dmc_payload) {
405 DRM_ERROR("Memory allocation failed for dmc payload\n");
406 return NULL;
407 }
408
409 return memcpy(dmc_payload, &fw->data[readcount], nbytes);
410}
411
412static void intel_csr_runtime_pm_get(struct drm_i915_private *dev_priv)
413{
414 WARN_ON(dev_priv->csr.wakeref);
415 dev_priv->csr.wakeref =
416 intel_display_power_get(dev_priv, POWER_DOMAIN_INIT);
417}
418
419static void intel_csr_runtime_pm_put(struct drm_i915_private *dev_priv)
420{
421 intel_wakeref_t wakeref __maybe_unused =
422 fetch_and_zero(&dev_priv->csr.wakeref);
423
424 intel_display_power_put(dev_priv, POWER_DOMAIN_INIT, wakeref);
425}
426
427static void csr_load_work_fn(struct work_struct *work)
428{
429 struct drm_i915_private *dev_priv;
430 struct intel_csr *csr;
431 const struct firmware *fw = NULL;
432
433 dev_priv = container_of(work, typeof(*dev_priv), csr.work);
434 csr = &dev_priv->csr;
435
436 request_firmware(&fw, dev_priv->csr.fw_path, &dev_priv->drm.pdev->dev);
437 if (fw)
438 dev_priv->csr.dmc_payload = parse_csr_fw(dev_priv, fw);
439
440 if (dev_priv->csr.dmc_payload) {
441 intel_csr_load_program(dev_priv);
442 intel_csr_runtime_pm_put(dev_priv);
443
444 DRM_INFO("Finished loading DMC firmware %s (v%u.%u)\n",
445 dev_priv->csr.fw_path,
446 CSR_VERSION_MAJOR(csr->version),
447 CSR_VERSION_MINOR(csr->version));
448 } else {
449 dev_notice(dev_priv->drm.dev,
450 "Failed to load DMC firmware %s."
451 " Disabling runtime power management.\n",
452 csr->fw_path);
453 dev_notice(dev_priv->drm.dev, "DMC firmware homepage: %s",
454 INTEL_UC_FIRMWARE_URL);
455 }
456
457 release_firmware(fw);
458}
459
460
461
462
463
464
465
466
467void intel_csr_ucode_init(struct drm_i915_private *dev_priv)
468{
469 struct intel_csr *csr = &dev_priv->csr;
470
471 INIT_WORK(&dev_priv->csr.work, csr_load_work_fn);
472
473 if (!HAS_CSR(dev_priv))
474 return;
475
476
477
478
479
480
481
482
483
484 intel_csr_runtime_pm_get(dev_priv);
485
486 if (INTEL_GEN(dev_priv) >= 12) {
487
488 csr->max_fw_size = GEN12_CSR_MAX_FW_SIZE;
489 } else if (IS_ICELAKE(dev_priv)) {
490 csr->fw_path = ICL_CSR_PATH;
491 csr->required_version = ICL_CSR_VERSION_REQUIRED;
492 csr->max_fw_size = ICL_CSR_MAX_FW_SIZE;
493 } else if (IS_CANNONLAKE(dev_priv)) {
494 csr->fw_path = CNL_CSR_PATH;
495 csr->required_version = CNL_CSR_VERSION_REQUIRED;
496 csr->max_fw_size = CNL_CSR_MAX_FW_SIZE;
497 } else if (IS_GEMINILAKE(dev_priv)) {
498 csr->fw_path = GLK_CSR_PATH;
499 csr->required_version = GLK_CSR_VERSION_REQUIRED;
500 csr->max_fw_size = GLK_CSR_MAX_FW_SIZE;
501 } else if (IS_KABYLAKE(dev_priv) || IS_COFFEELAKE(dev_priv)) {
502 csr->fw_path = KBL_CSR_PATH;
503 csr->required_version = KBL_CSR_VERSION_REQUIRED;
504 csr->max_fw_size = KBL_CSR_MAX_FW_SIZE;
505 } else if (IS_SKYLAKE(dev_priv)) {
506 csr->fw_path = SKL_CSR_PATH;
507 csr->required_version = SKL_CSR_VERSION_REQUIRED;
508 csr->max_fw_size = SKL_CSR_MAX_FW_SIZE;
509 } else if (IS_BROXTON(dev_priv)) {
510 csr->fw_path = BXT_CSR_PATH;
511 csr->required_version = BXT_CSR_VERSION_REQUIRED;
512 csr->max_fw_size = BXT_CSR_MAX_FW_SIZE;
513 }
514
515 if (i915_modparams.dmc_firmware_path) {
516 if (strlen(i915_modparams.dmc_firmware_path) == 0) {
517 csr->fw_path = NULL;
518 DRM_INFO("Disabling CSR firmware and runtime PM\n");
519 return;
520 }
521
522 csr->fw_path = i915_modparams.dmc_firmware_path;
523
524 csr->required_version = 0;
525 }
526
527 if (csr->fw_path == NULL) {
528 DRM_DEBUG_KMS("No known CSR firmware for platform, disabling runtime PM\n");
529 WARN_ON(!IS_ALPHA_SUPPORT(INTEL_INFO(dev_priv)));
530
531 return;
532 }
533
534 DRM_DEBUG_KMS("Loading %s\n", csr->fw_path);
535 schedule_work(&dev_priv->csr.work);
536}
537
538
539
540
541
542
543
544
545
546void intel_csr_ucode_suspend(struct drm_i915_private *dev_priv)
547{
548 if (!HAS_CSR(dev_priv))
549 return;
550
551 flush_work(&dev_priv->csr.work);
552
553
554 if (!dev_priv->csr.dmc_payload)
555 intel_csr_runtime_pm_put(dev_priv);
556}
557
558
559
560
561
562
563
564
565void intel_csr_ucode_resume(struct drm_i915_private *dev_priv)
566{
567 if (!HAS_CSR(dev_priv))
568 return;
569
570
571
572
573
574 if (!dev_priv->csr.dmc_payload)
575 intel_csr_runtime_pm_get(dev_priv);
576}
577
578
579
580
581
582
583
584
585void intel_csr_ucode_fini(struct drm_i915_private *dev_priv)
586{
587 if (!HAS_CSR(dev_priv))
588 return;
589
590 intel_csr_ucode_suspend(dev_priv);
591 WARN_ON(dev_priv->csr.wakeref);
592
593 kfree(dev_priv->csr.dmc_payload);
594}
595