1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#include <linux/firmware.h>
25#include "i915_drv.h"
26#include "intel_uc.h"
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43#define BXT_HUC_FW_MAJOR 01
44#define BXT_HUC_FW_MINOR 07
45#define BXT_BLD_NUM 1398
46
47#define SKL_HUC_FW_MAJOR 01
48#define SKL_HUC_FW_MINOR 07
49#define SKL_BLD_NUM 1398
50
51#define KBL_HUC_FW_MAJOR 02
52#define KBL_HUC_FW_MINOR 00
53#define KBL_BLD_NUM 1810
54
55#define GLK_HUC_FW_MAJOR 02
56#define GLK_HUC_FW_MINOR 00
57#define GLK_BLD_NUM 1748
58
59#define HUC_FW_PATH(platform, major, minor, bld_num) \
60 "i915/" __stringify(platform) "_huc_ver" __stringify(major) "_" \
61 __stringify(minor) "_" __stringify(bld_num) ".bin"
62
63#define I915_SKL_HUC_UCODE HUC_FW_PATH(skl, SKL_HUC_FW_MAJOR, \
64 SKL_HUC_FW_MINOR, SKL_BLD_NUM)
65MODULE_FIRMWARE(I915_SKL_HUC_UCODE);
66
67#define I915_BXT_HUC_UCODE HUC_FW_PATH(bxt, BXT_HUC_FW_MAJOR, \
68 BXT_HUC_FW_MINOR, BXT_BLD_NUM)
69MODULE_FIRMWARE(I915_BXT_HUC_UCODE);
70
71#define I915_KBL_HUC_UCODE HUC_FW_PATH(kbl, KBL_HUC_FW_MAJOR, \
72 KBL_HUC_FW_MINOR, KBL_BLD_NUM)
73MODULE_FIRMWARE(I915_KBL_HUC_UCODE);
74
75#define I915_GLK_HUC_UCODE HUC_FW_PATH(glk, GLK_HUC_FW_MAJOR, \
76 GLK_HUC_FW_MINOR, GLK_BLD_NUM)
77
78
79
80
81
82
83
84
85
86static int huc_ucode_xfer(struct drm_i915_private *dev_priv)
87{
88 struct intel_uc_fw *huc_fw = &dev_priv->huc.fw;
89 struct i915_vma *vma;
90 unsigned long offset = 0;
91 u32 size;
92 int ret;
93
94 ret = i915_gem_object_set_to_gtt_domain(huc_fw->obj, false);
95 if (ret) {
96 DRM_DEBUG_DRIVER("set-domain failed %d\n", ret);
97 return ret;
98 }
99
100 vma = i915_gem_object_ggtt_pin(huc_fw->obj, NULL, 0, 0,
101 PIN_OFFSET_BIAS | GUC_WOPCM_TOP);
102 if (IS_ERR(vma)) {
103 DRM_DEBUG_DRIVER("pin failed %d\n", (int)PTR_ERR(vma));
104 return PTR_ERR(vma);
105 }
106
107 intel_uncore_forcewake_get(dev_priv, FORCEWAKE_ALL);
108
109
110 offset = guc_ggtt_offset(vma) + huc_fw->header_offset;
111 I915_WRITE(DMA_ADDR_0_LOW, lower_32_bits(offset));
112 I915_WRITE(DMA_ADDR_0_HIGH, upper_32_bits(offset) & 0xFFFF);
113
114
115
116
117 I915_WRITE(DMA_ADDR_1_LOW, 0);
118 I915_WRITE(DMA_ADDR_1_HIGH, DMA_ADDRESS_SPACE_WOPCM);
119
120 size = huc_fw->header_size + huc_fw->ucode_size;
121 I915_WRITE(DMA_COPY_SIZE, size);
122
123
124 I915_WRITE(DMA_CTRL, _MASKED_BIT_ENABLE(HUC_UKERNEL | START_DMA));
125
126
127 ret = wait_for((I915_READ(DMA_CTRL) & START_DMA) == 0, 100);
128
129 DRM_DEBUG_DRIVER("HuC DMA transfer wait over with ret %d\n", ret);
130
131
132 I915_WRITE(DMA_CTRL, _MASKED_BIT_DISABLE(HUC_UKERNEL));
133
134 intel_uncore_forcewake_put(dev_priv, FORCEWAKE_ALL);
135
136
137
138
139
140 i915_vma_unpin(vma);
141
142 return ret;
143}
144
145
146
147
148
149void intel_huc_select_fw(struct intel_huc *huc)
150{
151 struct drm_i915_private *dev_priv = huc_to_i915(huc);
152
153 huc->fw.path = NULL;
154 huc->fw.fetch_status = INTEL_UC_FIRMWARE_NONE;
155 huc->fw.load_status = INTEL_UC_FIRMWARE_NONE;
156 huc->fw.type = INTEL_UC_FW_TYPE_HUC;
157
158 if (i915.huc_firmware_path) {
159 huc->fw.path = i915.huc_firmware_path;
160 huc->fw.major_ver_wanted = 0;
161 huc->fw.minor_ver_wanted = 0;
162 } else if (IS_SKYLAKE(dev_priv)) {
163 huc->fw.path = I915_SKL_HUC_UCODE;
164 huc->fw.major_ver_wanted = SKL_HUC_FW_MAJOR;
165 huc->fw.minor_ver_wanted = SKL_HUC_FW_MINOR;
166 } else if (IS_BROXTON(dev_priv)) {
167 huc->fw.path = I915_BXT_HUC_UCODE;
168 huc->fw.major_ver_wanted = BXT_HUC_FW_MAJOR;
169 huc->fw.minor_ver_wanted = BXT_HUC_FW_MINOR;
170 } else if (IS_KABYLAKE(dev_priv) || IS_COFFEELAKE(dev_priv)) {
171 huc->fw.path = I915_KBL_HUC_UCODE;
172 huc->fw.major_ver_wanted = KBL_HUC_FW_MAJOR;
173 huc->fw.minor_ver_wanted = KBL_HUC_FW_MINOR;
174 } else if (IS_GEMINILAKE(dev_priv)) {
175 huc->fw.path = I915_GLK_HUC_UCODE;
176 huc->fw.major_ver_wanted = GLK_HUC_FW_MAJOR;
177 huc->fw.minor_ver_wanted = GLK_HUC_FW_MINOR;
178 } else {
179 DRM_ERROR("No HuC firmware known for platform with HuC!\n");
180 return;
181 }
182}
183
184
185
186
187
188
189
190
191
192
193
194
195
196void intel_huc_init_hw(struct intel_huc *huc)
197{
198 struct drm_i915_private *dev_priv = huc_to_i915(huc);
199 int err;
200
201 DRM_DEBUG_DRIVER("%s fw status: fetch %s, load %s\n",
202 huc->fw.path,
203 intel_uc_fw_status_repr(huc->fw.fetch_status),
204 intel_uc_fw_status_repr(huc->fw.load_status));
205
206 if (huc->fw.fetch_status != INTEL_UC_FIRMWARE_SUCCESS)
207 return;
208
209 huc->fw.load_status = INTEL_UC_FIRMWARE_PENDING;
210
211 err = huc_ucode_xfer(dev_priv);
212
213 huc->fw.load_status = err ?
214 INTEL_UC_FIRMWARE_FAIL : INTEL_UC_FIRMWARE_SUCCESS;
215
216 DRM_DEBUG_DRIVER("%s fw status: fetch %s, load %s\n",
217 huc->fw.path,
218 intel_uc_fw_status_repr(huc->fw.fetch_status),
219 intel_uc_fw_status_repr(huc->fw.load_status));
220
221 if (huc->fw.load_status != INTEL_UC_FIRMWARE_SUCCESS)
222 DRM_ERROR("Failed to complete HuC uCode load with ret %d\n", err);
223
224 return;
225}
226
227
228
229
230
231
232
233
234void intel_guc_auth_huc(struct drm_i915_private *dev_priv)
235{
236 struct intel_guc *guc = &dev_priv->guc;
237 struct intel_huc *huc = &dev_priv->huc;
238 struct i915_vma *vma;
239 int ret;
240 u32 data[2];
241
242 if (huc->fw.load_status != INTEL_UC_FIRMWARE_SUCCESS)
243 return;
244
245 vma = i915_gem_object_ggtt_pin(huc->fw.obj, NULL, 0, 0,
246 PIN_OFFSET_BIAS | GUC_WOPCM_TOP);
247 if (IS_ERR(vma)) {
248 DRM_ERROR("failed to pin huc fw object %d\n",
249 (int)PTR_ERR(vma));
250 return;
251 }
252
253
254 data[0] = INTEL_GUC_ACTION_AUTHENTICATE_HUC;
255 data[1] = guc_ggtt_offset(vma) + huc->fw.rsa_offset;
256
257 ret = intel_guc_send(guc, data, ARRAY_SIZE(data));
258 if (ret) {
259 DRM_ERROR("HuC: GuC did not ack Auth request %d\n", ret);
260 goto out;
261 }
262
263
264 ret = intel_wait_for_register(dev_priv,
265 HUC_STATUS2,
266 HUC_FW_VERIFIED,
267 HUC_FW_VERIFIED,
268 50);
269
270 if (ret) {
271 DRM_ERROR("HuC: Authentication failed %d\n", ret);
272 goto out;
273 }
274
275out:
276 i915_vma_unpin(vma);
277}
278
279