1
2
3#include "adf_common_drv.h"
4#include "adf_gen2_hw_data.h"
5#include "icp_qat_hw.h"
6#include <linux/pci.h>
7
8u32 adf_gen2_get_num_accels(struct adf_hw_device_data *self)
9{
10 if (!self || !self->accel_mask)
11 return 0;
12
13 return hweight16(self->accel_mask);
14}
15EXPORT_SYMBOL_GPL(adf_gen2_get_num_accels);
16
17u32 adf_gen2_get_num_aes(struct adf_hw_device_data *self)
18{
19 if (!self || !self->ae_mask)
20 return 0;
21
22 return hweight32(self->ae_mask);
23}
24EXPORT_SYMBOL_GPL(adf_gen2_get_num_aes);
25
26void adf_gen2_enable_error_correction(struct adf_accel_dev *accel_dev)
27{
28 struct adf_hw_device_data *hw_data = accel_dev->hw_device;
29 void __iomem *pmisc_addr = adf_get_pmisc_base(accel_dev);
30 unsigned long accel_mask = hw_data->accel_mask;
31 unsigned long ae_mask = hw_data->ae_mask;
32 unsigned int val, i;
33
34
35 for_each_set_bit(i, &ae_mask, hw_data->num_engines) {
36 val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_AE_CTX_ENABLES(i));
37 val |= ADF_GEN2_ENABLE_AE_ECC_ERR;
38 ADF_CSR_WR(pmisc_addr, ADF_GEN2_AE_CTX_ENABLES(i), val);
39 val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_AE_MISC_CONTROL(i));
40 val |= ADF_GEN2_ENABLE_AE_ECC_PARITY_CORR;
41 ADF_CSR_WR(pmisc_addr, ADF_GEN2_AE_MISC_CONTROL(i), val);
42 }
43
44
45 for_each_set_bit(i, &accel_mask, hw_data->num_accel) {
46 val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_UERRSSMSH(i));
47 val |= ADF_GEN2_ERRSSMSH_EN;
48 ADF_CSR_WR(pmisc_addr, ADF_GEN2_UERRSSMSH(i), val);
49 val = ADF_CSR_RD(pmisc_addr, ADF_GEN2_CERRSSMSH(i));
50 val |= ADF_GEN2_ERRSSMSH_EN;
51 ADF_CSR_WR(pmisc_addr, ADF_GEN2_CERRSSMSH(i), val);
52 }
53}
54EXPORT_SYMBOL_GPL(adf_gen2_enable_error_correction);
55
56void adf_gen2_cfg_iov_thds(struct adf_accel_dev *accel_dev, bool enable,
57 int num_a_regs, int num_b_regs)
58{
59 void __iomem *pmisc_addr = adf_get_pmisc_base(accel_dev);
60 u32 reg;
61 int i;
62
63
64 for (i = 0; i < num_a_regs; i++) {
65 reg = READ_CSR_AE2FUNCTION_MAP_A(pmisc_addr, i);
66 if (enable)
67 reg |= AE2FUNCTION_MAP_VALID;
68 else
69 reg &= ~AE2FUNCTION_MAP_VALID;
70 WRITE_CSR_AE2FUNCTION_MAP_A(pmisc_addr, i, reg);
71 }
72
73
74 for (i = 0; i < num_b_regs; i++) {
75 reg = READ_CSR_AE2FUNCTION_MAP_B(pmisc_addr, i);
76 if (enable)
77 reg |= AE2FUNCTION_MAP_VALID;
78 else
79 reg &= ~AE2FUNCTION_MAP_VALID;
80 WRITE_CSR_AE2FUNCTION_MAP_B(pmisc_addr, i, reg);
81 }
82}
83EXPORT_SYMBOL_GPL(adf_gen2_cfg_iov_thds);
84
85void adf_gen2_get_admin_info(struct admin_info *admin_csrs_info)
86{
87 admin_csrs_info->mailbox_offset = ADF_MAILBOX_BASE_OFFSET;
88 admin_csrs_info->admin_msg_ur = ADF_ADMINMSGUR_OFFSET;
89 admin_csrs_info->admin_msg_lr = ADF_ADMINMSGLR_OFFSET;
90}
91EXPORT_SYMBOL_GPL(adf_gen2_get_admin_info);
92
93void adf_gen2_get_arb_info(struct arb_info *arb_info)
94{
95 arb_info->arb_cfg = ADF_ARB_CONFIG;
96 arb_info->arb_offset = ADF_ARB_OFFSET;
97 arb_info->wt2sam_offset = ADF_ARB_WRK_2_SER_MAP_OFFSET;
98}
99EXPORT_SYMBOL_GPL(adf_gen2_get_arb_info);
100
101static u64 build_csr_ring_base_addr(dma_addr_t addr, u32 size)
102{
103 return BUILD_RING_BASE_ADDR(addr, size);
104}
105
106static u32 read_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring)
107{
108 return READ_CSR_RING_HEAD(csr_base_addr, bank, ring);
109}
110
111static void write_csr_ring_head(void __iomem *csr_base_addr, u32 bank, u32 ring,
112 u32 value)
113{
114 WRITE_CSR_RING_HEAD(csr_base_addr, bank, ring, value);
115}
116
117static u32 read_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring)
118{
119 return READ_CSR_RING_TAIL(csr_base_addr, bank, ring);
120}
121
122static void write_csr_ring_tail(void __iomem *csr_base_addr, u32 bank, u32 ring,
123 u32 value)
124{
125 WRITE_CSR_RING_TAIL(csr_base_addr, bank, ring, value);
126}
127
128static u32 read_csr_e_stat(void __iomem *csr_base_addr, u32 bank)
129{
130 return READ_CSR_E_STAT(csr_base_addr, bank);
131}
132
133static void write_csr_ring_config(void __iomem *csr_base_addr, u32 bank,
134 u32 ring, u32 value)
135{
136 WRITE_CSR_RING_CONFIG(csr_base_addr, bank, ring, value);
137}
138
139static void write_csr_ring_base(void __iomem *csr_base_addr, u32 bank, u32 ring,
140 dma_addr_t addr)
141{
142 WRITE_CSR_RING_BASE(csr_base_addr, bank, ring, addr);
143}
144
145static void write_csr_int_flag(void __iomem *csr_base_addr, u32 bank, u32 value)
146{
147 WRITE_CSR_INT_FLAG(csr_base_addr, bank, value);
148}
149
150static void write_csr_int_srcsel(void __iomem *csr_base_addr, u32 bank)
151{
152 WRITE_CSR_INT_SRCSEL(csr_base_addr, bank);
153}
154
155static void write_csr_int_col_en(void __iomem *csr_base_addr, u32 bank,
156 u32 value)
157{
158 WRITE_CSR_INT_COL_EN(csr_base_addr, bank, value);
159}
160
161static void write_csr_int_col_ctl(void __iomem *csr_base_addr, u32 bank,
162 u32 value)
163{
164 WRITE_CSR_INT_COL_CTL(csr_base_addr, bank, value);
165}
166
167static void write_csr_int_flag_and_col(void __iomem *csr_base_addr, u32 bank,
168 u32 value)
169{
170 WRITE_CSR_INT_FLAG_AND_COL(csr_base_addr, bank, value);
171}
172
173static void write_csr_ring_srv_arb_en(void __iomem *csr_base_addr, u32 bank,
174 u32 value)
175{
176 WRITE_CSR_RING_SRV_ARB_EN(csr_base_addr, bank, value);
177}
178
179void adf_gen2_init_hw_csr_ops(struct adf_hw_csr_ops *csr_ops)
180{
181 csr_ops->build_csr_ring_base_addr = build_csr_ring_base_addr;
182 csr_ops->read_csr_ring_head = read_csr_ring_head;
183 csr_ops->write_csr_ring_head = write_csr_ring_head;
184 csr_ops->read_csr_ring_tail = read_csr_ring_tail;
185 csr_ops->write_csr_ring_tail = write_csr_ring_tail;
186 csr_ops->read_csr_e_stat = read_csr_e_stat;
187 csr_ops->write_csr_ring_config = write_csr_ring_config;
188 csr_ops->write_csr_ring_base = write_csr_ring_base;
189 csr_ops->write_csr_int_flag = write_csr_int_flag;
190 csr_ops->write_csr_int_srcsel = write_csr_int_srcsel;
191 csr_ops->write_csr_int_col_en = write_csr_int_col_en;
192 csr_ops->write_csr_int_col_ctl = write_csr_int_col_ctl;
193 csr_ops->write_csr_int_flag_and_col = write_csr_int_flag_and_col;
194 csr_ops->write_csr_ring_srv_arb_en = write_csr_ring_srv_arb_en;
195}
196EXPORT_SYMBOL_GPL(adf_gen2_init_hw_csr_ops);
197
198u32 adf_gen2_get_accel_cap(struct adf_accel_dev *accel_dev)
199{
200 struct adf_hw_device_data *hw_data = accel_dev->hw_device;
201 struct pci_dev *pdev = accel_dev->accel_pci_dev.pci_dev;
202 u32 straps = hw_data->straps;
203 u32 fuses = hw_data->fuses;
204 u32 legfuses;
205 u32 capabilities = ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC |
206 ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC |
207 ICP_ACCEL_CAPABILITIES_AUTHENTICATION |
208 ICP_ACCEL_CAPABILITIES_CIPHER |
209 ICP_ACCEL_CAPABILITIES_COMPRESSION;
210
211
212 pci_read_config_dword(pdev, ADF_DEVICE_LEGFUSE_OFFSET, &legfuses);
213
214
215 if (legfuses & ICP_ACCEL_MASK_CIPHER_SLICE) {
216 capabilities &= ~ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC;
217 capabilities &= ~ICP_ACCEL_CAPABILITIES_CIPHER;
218 }
219 if (legfuses & ICP_ACCEL_MASK_PKE_SLICE)
220 capabilities &= ~ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC;
221 if (legfuses & ICP_ACCEL_MASK_AUTH_SLICE) {
222 capabilities &= ~ICP_ACCEL_CAPABILITIES_AUTHENTICATION;
223 capabilities &= ~ICP_ACCEL_CAPABILITIES_CIPHER;
224 }
225 if (legfuses & ICP_ACCEL_MASK_COMPRESS_SLICE)
226 capabilities &= ~ICP_ACCEL_CAPABILITIES_COMPRESSION;
227
228 if ((straps | fuses) & ADF_POWERGATE_PKE)
229 capabilities &= ~ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC;
230
231 if ((straps | fuses) & ADF_POWERGATE_DC)
232 capabilities &= ~ICP_ACCEL_CAPABILITIES_COMPRESSION;
233
234 return capabilities;
235}
236EXPORT_SYMBOL_GPL(adf_gen2_get_accel_cap);
237
238void adf_gen2_set_ssm_wdtimer(struct adf_accel_dev *accel_dev)
239{
240 struct adf_hw_device_data *hw_data = accel_dev->hw_device;
241 void __iomem *pmisc_addr = adf_get_pmisc_base(accel_dev);
242 u32 timer_val_pke = ADF_SSM_WDT_PKE_DEFAULT_VALUE;
243 u32 timer_val = ADF_SSM_WDT_DEFAULT_VALUE;
244 unsigned long accel_mask = hw_data->accel_mask;
245 u32 i = 0;
246
247
248 for_each_set_bit(i, &accel_mask, hw_data->num_accel) {
249
250 ADF_CSR_WR(pmisc_addr, ADF_SSMWDT(i), timer_val);
251
252 ADF_CSR_WR(pmisc_addr, ADF_SSMWDTPKE(i), timer_val_pke);
253 }
254}
255EXPORT_SYMBOL_GPL(adf_gen2_set_ssm_wdtimer);
256