1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17#include "hw.h"
18#include <linux/export.h>
19
20#define AR_BufLen 0x00000fff
21
22static void ar9002_hw_rx_enable(struct ath_hw *ah)
23{
24 REG_WRITE(ah, AR_CR, AR_CR_RXE);
25}
26
27static void ar9002_hw_set_desc_link(void *ds, u32 ds_link)
28{
29 ((struct ath_desc*) ds)->ds_link = ds_link;
30}
31
32static bool ar9002_hw_get_isr(struct ath_hw *ah, enum ath9k_int *masked,
33 u32 *sync_cause_p)
34{
35 u32 isr = 0;
36 u32 mask2 = 0;
37 struct ath9k_hw_capabilities *pCap = &ah->caps;
38 u32 sync_cause = 0;
39 bool fatal_int = false;
40 struct ath_common *common = ath9k_hw_common(ah);
41
42 if (!AR_SREV_9100(ah)) {
43 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
44 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
45 == AR_RTC_STATUS_ON) {
46 isr = REG_READ(ah, AR_ISR);
47 }
48 }
49
50 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
51 AR_INTR_SYNC_DEFAULT;
52
53 *masked = 0;
54
55 if (!isr && !sync_cause)
56 return false;
57 } else {
58 *masked = 0;
59 isr = REG_READ(ah, AR_ISR);
60 }
61
62 if (isr) {
63 if (isr & AR_ISR_BCNMISC) {
64 u32 isr2;
65 isr2 = REG_READ(ah, AR_ISR_S2);
66 if (isr2 & AR_ISR_S2_TIM)
67 mask2 |= ATH9K_INT_TIM;
68 if (isr2 & AR_ISR_S2_DTIM)
69 mask2 |= ATH9K_INT_DTIM;
70 if (isr2 & AR_ISR_S2_DTIMSYNC)
71 mask2 |= ATH9K_INT_DTIMSYNC;
72 if (isr2 & (AR_ISR_S2_CABEND))
73 mask2 |= ATH9K_INT_CABEND;
74 if (isr2 & AR_ISR_S2_GTT)
75 mask2 |= ATH9K_INT_GTT;
76 if (isr2 & AR_ISR_S2_CST)
77 mask2 |= ATH9K_INT_CST;
78 if (isr2 & AR_ISR_S2_TSFOOR)
79 mask2 |= ATH9K_INT_TSFOOR;
80
81 if (!(pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)) {
82 REG_WRITE(ah, AR_ISR_S2, isr2);
83 isr &= ~AR_ISR_BCNMISC;
84 }
85 }
86
87 if (pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)
88 isr = REG_READ(ah, AR_ISR_RAC);
89
90 if (isr == 0xffffffff) {
91 *masked = 0;
92 return false;
93 }
94
95 *masked = isr & ATH9K_INT_COMMON;
96
97 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM |
98 AR_ISR_RXOK | AR_ISR_RXERR))
99 *masked |= ATH9K_INT_RX;
100
101 if (isr &
102 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
103 AR_ISR_TXEOL)) {
104 u32 s0_s, s1_s;
105
106 *masked |= ATH9K_INT_TX;
107
108 if (pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED) {
109 s0_s = REG_READ(ah, AR_ISR_S0_S);
110 s1_s = REG_READ(ah, AR_ISR_S1_S);
111 } else {
112 s0_s = REG_READ(ah, AR_ISR_S0);
113 REG_WRITE(ah, AR_ISR_S0, s0_s);
114 s1_s = REG_READ(ah, AR_ISR_S1);
115 REG_WRITE(ah, AR_ISR_S1, s1_s);
116
117 isr &= ~(AR_ISR_TXOK |
118 AR_ISR_TXDESC |
119 AR_ISR_TXERR |
120 AR_ISR_TXEOL);
121 }
122
123 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
124 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
125 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
126 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
127 }
128
129 if (isr & AR_ISR_RXORN) {
130 ath_dbg(common, INTERRUPT,
131 "receive FIFO overrun interrupt\n");
132 }
133
134 *masked |= mask2;
135 }
136
137 if (!AR_SREV_9100(ah) && (isr & AR_ISR_GENTMR)) {
138 u32 s5_s;
139
140 if (pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED) {
141 s5_s = REG_READ(ah, AR_ISR_S5_S);
142 } else {
143 s5_s = REG_READ(ah, AR_ISR_S5);
144 }
145
146 ah->intr_gen_timer_trigger =
147 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
148
149 ah->intr_gen_timer_thresh =
150 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
151
152 if (ah->intr_gen_timer_trigger)
153 *masked |= ATH9K_INT_GENTIMER;
154
155 if ((s5_s & AR_ISR_S5_TIM_TIMER) &&
156 !(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
157 *masked |= ATH9K_INT_TIM_TIMER;
158
159 if (!(pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)) {
160 REG_WRITE(ah, AR_ISR_S5, s5_s);
161 isr &= ~AR_ISR_GENTMR;
162 }
163 }
164
165 if (!(pCap->hw_caps & ATH9K_HW_CAP_RAC_SUPPORTED)) {
166 REG_WRITE(ah, AR_ISR, isr);
167 REG_READ(ah, AR_ISR);
168 }
169
170 if (AR_SREV_9100(ah))
171 return true;
172
173 if (sync_cause) {
174 if (sync_cause_p)
175 *sync_cause_p = sync_cause;
176 fatal_int =
177 (sync_cause &
178 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
179 ? true : false;
180
181 if (fatal_int) {
182 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
183 ath_dbg(common, ANY,
184 "received PCI FATAL interrupt\n");
185 }
186 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
187 ath_dbg(common, ANY,
188 "received PCI PERR interrupt\n");
189 }
190 *masked |= ATH9K_INT_FATAL;
191 }
192 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
193 ath_dbg(common, INTERRUPT,
194 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
195 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
196 REG_WRITE(ah, AR_RC, 0);
197 *masked |= ATH9K_INT_FATAL;
198 }
199 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
200 ath_dbg(common, INTERRUPT,
201 "AR_INTR_SYNC_LOCAL_TIMEOUT\n");
202 }
203
204 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
205 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
206 }
207
208 return true;
209}
210
211static void
212ar9002_set_txdesc(struct ath_hw *ah, void *ds, struct ath_tx_info *i)
213{
214 struct ar5416_desc *ads = AR5416DESC(ds);
215 u32 ctl1, ctl6;
216
217 ads->ds_txstatus0 = ads->ds_txstatus1 = 0;
218 ads->ds_txstatus2 = ads->ds_txstatus3 = 0;
219 ads->ds_txstatus4 = ads->ds_txstatus5 = 0;
220 ads->ds_txstatus6 = ads->ds_txstatus7 = 0;
221 ads->ds_txstatus8 = ads->ds_txstatus9 = 0;
222
223 WRITE_ONCE(ads->ds_link, i->link);
224 WRITE_ONCE(ads->ds_data, i->buf_addr[0]);
225
226 ctl1 = i->buf_len[0] | (i->is_last ? 0 : AR_TxMore);
227 ctl6 = SM(i->keytype, AR_EncrType);
228
229 if (AR_SREV_9285(ah)) {
230 ads->ds_ctl8 = 0;
231 ads->ds_ctl9 = 0;
232 ads->ds_ctl10 = 0;
233 ads->ds_ctl11 = 0;
234 }
235
236 if ((i->is_first || i->is_last) &&
237 i->aggr != AGGR_BUF_MIDDLE && i->aggr != AGGR_BUF_LAST) {
238 WRITE_ONCE(ads->ds_ctl2, set11nTries(i->rates, 0)
239 | set11nTries(i->rates, 1)
240 | set11nTries(i->rates, 2)
241 | set11nTries(i->rates, 3)
242 | (i->dur_update ? AR_DurUpdateEna : 0)
243 | SM(0, AR_BurstDur));
244
245 WRITE_ONCE(ads->ds_ctl3, set11nRate(i->rates, 0)
246 | set11nRate(i->rates, 1)
247 | set11nRate(i->rates, 2)
248 | set11nRate(i->rates, 3));
249 } else {
250 WRITE_ONCE(ads->ds_ctl2, 0);
251 WRITE_ONCE(ads->ds_ctl3, 0);
252 }
253
254 if (!i->is_first) {
255 WRITE_ONCE(ads->ds_ctl0, 0);
256 WRITE_ONCE(ads->ds_ctl1, ctl1);
257 WRITE_ONCE(ads->ds_ctl6, ctl6);
258 return;
259 }
260
261 ctl1 |= (i->keyix != ATH9K_TXKEYIX_INVALID ? SM(i->keyix, AR_DestIdx) : 0)
262 | SM(i->type, AR_FrameType)
263 | (i->flags & ATH9K_TXDESC_NOACK ? AR_NoAck : 0)
264 | (i->flags & ATH9K_TXDESC_EXT_ONLY ? AR_ExtOnly : 0)
265 | (i->flags & ATH9K_TXDESC_EXT_AND_CTL ? AR_ExtAndCtl : 0);
266
267 switch (i->aggr) {
268 case AGGR_BUF_FIRST:
269 ctl6 |= SM(i->aggr_len, AR_AggrLen);
270
271 case AGGR_BUF_MIDDLE:
272 ctl1 |= AR_IsAggr | AR_MoreAggr;
273 ctl6 |= SM(i->ndelim, AR_PadDelim);
274 break;
275 case AGGR_BUF_LAST:
276 ctl1 |= AR_IsAggr;
277 break;
278 case AGGR_BUF_NONE:
279 break;
280 }
281
282 WRITE_ONCE(ads->ds_ctl0, (i->pkt_len & AR_FrameLen)
283 | (i->flags & ATH9K_TXDESC_VMF ? AR_VirtMoreFrag : 0)
284 | SM(i->txpower[0], AR_XmitPower0)
285 | (i->flags & ATH9K_TXDESC_VEOL ? AR_VEOL : 0)
286 | (i->flags & ATH9K_TXDESC_INTREQ ? AR_TxIntrReq : 0)
287 | (i->keyix != ATH9K_TXKEYIX_INVALID ? AR_DestIdxValid : 0)
288 | (i->flags & ATH9K_TXDESC_CLRDMASK ? AR_ClrDestMask : 0)
289 | (i->flags & ATH9K_TXDESC_RTSENA ? AR_RTSEnable :
290 (i->flags & ATH9K_TXDESC_CTSENA ? AR_CTSEnable : 0)));
291
292 WRITE_ONCE(ads->ds_ctl1, ctl1);
293 WRITE_ONCE(ads->ds_ctl6, ctl6);
294
295 if (i->aggr == AGGR_BUF_MIDDLE || i->aggr == AGGR_BUF_LAST)
296 return;
297
298 WRITE_ONCE(ads->ds_ctl4, set11nPktDurRTSCTS(i->rates, 0)
299 | set11nPktDurRTSCTS(i->rates, 1));
300
301 WRITE_ONCE(ads->ds_ctl5, set11nPktDurRTSCTS(i->rates, 2)
302 | set11nPktDurRTSCTS(i->rates, 3));
303
304 WRITE_ONCE(ads->ds_ctl7, set11nRateFlags(i->rates, 0)
305 | set11nRateFlags(i->rates, 1)
306 | set11nRateFlags(i->rates, 2)
307 | set11nRateFlags(i->rates, 3)
308 | SM(i->rtscts_rate, AR_RTSCTSRate));
309
310 WRITE_ONCE(ads->ds_ctl9, SM(i->txpower[1], AR_XmitPower1));
311 WRITE_ONCE(ads->ds_ctl10, SM(i->txpower[2], AR_XmitPower2));
312 WRITE_ONCE(ads->ds_ctl11, SM(i->txpower[3], AR_XmitPower3));
313}
314
315static int ar9002_hw_proc_txdesc(struct ath_hw *ah, void *ds,
316 struct ath_tx_status *ts)
317{
318 struct ar5416_desc *ads = AR5416DESC(ds);
319 u32 status;
320
321 status = READ_ONCE(ads->ds_txstatus9);
322 if ((status & AR_TxDone) == 0)
323 return -EINPROGRESS;
324
325 ts->ts_tstamp = ads->AR_SendTimestamp;
326 ts->ts_status = 0;
327 ts->ts_flags = 0;
328
329 if (status & AR_TxOpExceeded)
330 ts->ts_status |= ATH9K_TXERR_XTXOP;
331 ts->tid = MS(status, AR_TxTid);
332 ts->ts_rateindex = MS(status, AR_FinalTxIdx);
333 ts->ts_seqnum = MS(status, AR_SeqNum);
334
335 status = READ_ONCE(ads->ds_txstatus0);
336 ts->ts_rssi_ctl0 = MS(status, AR_TxRSSIAnt00);
337 ts->ts_rssi_ctl1 = MS(status, AR_TxRSSIAnt01);
338 ts->ts_rssi_ctl2 = MS(status, AR_TxRSSIAnt02);
339 if (status & AR_TxBaStatus) {
340 ts->ts_flags |= ATH9K_TX_BA;
341 ts->ba_low = ads->AR_BaBitmapLow;
342 ts->ba_high = ads->AR_BaBitmapHigh;
343 }
344
345 status = READ_ONCE(ads->ds_txstatus1);
346 if (status & AR_FrmXmitOK)
347 ts->ts_status |= ATH9K_TX_ACKED;
348 else {
349 if (status & AR_ExcessiveRetries)
350 ts->ts_status |= ATH9K_TXERR_XRETRY;
351 if (status & AR_Filtered)
352 ts->ts_status |= ATH9K_TXERR_FILT;
353 if (status & AR_FIFOUnderrun) {
354 ts->ts_status |= ATH9K_TXERR_FIFO;
355 ath9k_hw_updatetxtriglevel(ah, true);
356 }
357 }
358 if (status & AR_TxTimerExpired)
359 ts->ts_status |= ATH9K_TXERR_TIMER_EXPIRED;
360 if (status & AR_DescCfgErr)
361 ts->ts_flags |= ATH9K_TX_DESC_CFG_ERR;
362 if (status & AR_TxDataUnderrun) {
363 ts->ts_flags |= ATH9K_TX_DATA_UNDERRUN;
364 ath9k_hw_updatetxtriglevel(ah, true);
365 }
366 if (status & AR_TxDelimUnderrun) {
367 ts->ts_flags |= ATH9K_TX_DELIM_UNDERRUN;
368 ath9k_hw_updatetxtriglevel(ah, true);
369 }
370 ts->ts_shortretry = MS(status, AR_RTSFailCnt);
371 ts->ts_longretry = MS(status, AR_DataFailCnt);
372 ts->ts_virtcol = MS(status, AR_VirtRetryCnt);
373
374 status = READ_ONCE(ads->ds_txstatus5);
375 ts->ts_rssi = MS(status, AR_TxRSSICombined);
376 ts->ts_rssi_ext0 = MS(status, AR_TxRSSIAnt10);
377 ts->ts_rssi_ext1 = MS(status, AR_TxRSSIAnt11);
378 ts->ts_rssi_ext2 = MS(status, AR_TxRSSIAnt12);
379
380 ts->evm0 = ads->AR_TxEVM0;
381 ts->evm1 = ads->AR_TxEVM1;
382 ts->evm2 = ads->AR_TxEVM2;
383
384 return 0;
385}
386
387static int ar9002_hw_get_duration(struct ath_hw *ah, const void *ds, int index)
388{
389 struct ar5416_desc *ads = AR5416DESC(ds);
390
391 switch (index) {
392 case 0:
393 return MS(READ_ONCE(ads->ds_ctl4), AR_PacketDur0);
394 case 1:
395 return MS(READ_ONCE(ads->ds_ctl4), AR_PacketDur1);
396 case 2:
397 return MS(READ_ONCE(ads->ds_ctl5), AR_PacketDur2);
398 case 3:
399 return MS(READ_ONCE(ads->ds_ctl5), AR_PacketDur3);
400 default:
401 return -1;
402 }
403}
404
405void ath9k_hw_setuprxdesc(struct ath_hw *ah, struct ath_desc *ds,
406 u32 size, u32 flags)
407{
408 struct ar5416_desc *ads = AR5416DESC(ds);
409
410 ads->ds_ctl1 = size & AR_BufLen;
411 if (flags & ATH9K_RXDESC_INTREQ)
412 ads->ds_ctl1 |= AR_RxIntrReq;
413
414 memset(&ads->u.rx, 0, sizeof(ads->u.rx));
415}
416EXPORT_SYMBOL(ath9k_hw_setuprxdesc);
417
418void ar9002_hw_attach_mac_ops(struct ath_hw *ah)
419{
420 struct ath_hw_ops *ops = ath9k_hw_ops(ah);
421
422 ops->rx_enable = ar9002_hw_rx_enable;
423 ops->set_desc_link = ar9002_hw_set_desc_link;
424 ops->get_isr = ar9002_hw_get_isr;
425 ops->set_txdesc = ar9002_set_txdesc;
426 ops->proc_txdesc = ar9002_hw_proc_txdesc;
427 ops->get_duration = ar9002_hw_get_duration;
428}
429