1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include "../wifi.h"
27#include "../pci.h"
28#include "../base.h"
29#include "../stats.h"
30#include "reg.h"
31#include "def.h"
32#include "phy.h"
33#include "trx.h"
34#include "led.h"
35#include "dm.h"
36#include "fw.h"
37
38static u8 _rtl92ee_map_hwqueue_to_fwqueue(struct sk_buff *skb, u8 hw_queue)
39{
40 __le16 fc = rtl_get_fc(skb);
41
42 if (unlikely(ieee80211_is_beacon(fc)))
43 return QSLT_BEACON;
44 if (ieee80211_is_mgmt(fc) || ieee80211_is_ctl(fc))
45 return QSLT_MGNT;
46
47 return skb->priority;
48}
49
50static void _rtl92ee_query_rxphystatus(struct ieee80211_hw *hw,
51 struct rtl_stats *pstatus, u8 *pdesc,
52 struct rx_fwinfo *p_drvinfo,
53 bool bpacket_match_bssid,
54 bool bpacket_toself,
55 bool packet_beacon)
56{
57 struct rtl_priv *rtlpriv = rtl_priv(hw);
58 struct phy_status_rpt *p_phystrpt = (struct phy_status_rpt *)p_drvinfo;
59 s8 rx_pwr_all = 0, rx_pwr[4];
60 u8 rf_rx_num = 0, evm, pwdb_all;
61 u8 i, max_spatial_stream;
62 u32 rssi, total_rssi = 0;
63 bool is_cck = pstatus->is_cck;
64 u8 lan_idx, vga_idx;
65
66
67 pstatus->packet_matchbssid = bpacket_match_bssid;
68 pstatus->packet_toself = bpacket_toself;
69 pstatus->packet_beacon = packet_beacon;
70 pstatus->rx_mimo_signalquality[0] = -1;
71 pstatus->rx_mimo_signalquality[1] = -1;
72
73 if (is_cck) {
74 u8 cck_highpwr;
75 u8 cck_agc_rpt;
76
77 cck_agc_rpt = p_phystrpt->cck_agc_rpt_ofdm_cfosho_a;
78
79
80
81
82
83 cck_highpwr = (u8)rtl_get_bbreg(hw, RFPGA0_XA_HSSIPARAMETER2,
84 BIT(9));
85
86 lan_idx = ((cck_agc_rpt & 0xE0) >> 5);
87 vga_idx = (cck_agc_rpt & 0x1f);
88 switch (lan_idx) {
89 case 7:
90 if (vga_idx <= 27)
91 rx_pwr_all = -100 + 2 * (27 - vga_idx);
92 else
93 rx_pwr_all = -100;
94 break;
95 case 6:
96 rx_pwr_all = -48 + 2 * (2 - vga_idx);
97 break;
98 case 5:
99 rx_pwr_all = -42 + 2 * (7 - vga_idx);
100 break;
101 case 4:
102 rx_pwr_all = -36 + 2 * (7 - vga_idx);
103 break;
104 case 3:
105 rx_pwr_all = -24 + 2 * (7 - vga_idx);
106 break;
107 case 2:
108 if (cck_highpwr)
109 rx_pwr_all = -12 + 2 * (5 - vga_idx);
110 else
111 rx_pwr_all = -6 + 2 * (5 - vga_idx);
112 break;
113 case 1:
114 rx_pwr_all = 8 - 2 * vga_idx;
115 break;
116 case 0:
117 rx_pwr_all = 14 - 2 * vga_idx;
118 break;
119 default:
120 break;
121 }
122 rx_pwr_all += 16;
123 pwdb_all = rtl_query_rxpwrpercentage(rx_pwr_all);
124
125 if (!cck_highpwr) {
126 if (pwdb_all >= 80)
127 pwdb_all = ((pwdb_all - 80) << 1) +
128 ((pwdb_all - 80) >> 1) + 80;
129 else if ((pwdb_all <= 78) && (pwdb_all >= 20))
130 pwdb_all += 3;
131 if (pwdb_all > 100)
132 pwdb_all = 100;
133 }
134
135 pstatus->rx_pwdb_all = pwdb_all;
136 pstatus->bt_rx_rssi_percentage = pwdb_all;
137 pstatus->recvsignalpower = rx_pwr_all;
138
139
140 if (bpacket_match_bssid) {
141 u8 sq, sq_rpt;
142
143 if (pstatus->rx_pwdb_all > 40) {
144 sq = 100;
145 } else {
146 sq_rpt = p_phystrpt->cck_sig_qual_ofdm_pwdb_all;
147 if (sq_rpt > 64)
148 sq = 0;
149 else if (sq_rpt < 20)
150 sq = 100;
151 else
152 sq = ((64 - sq_rpt) * 100) / 44;
153 }
154
155 pstatus->signalquality = sq;
156 pstatus->rx_mimo_signalquality[0] = sq;
157 pstatus->rx_mimo_signalquality[1] = -1;
158 }
159 } else {
160
161 for (i = RF90_PATH_A; i < RF6052_MAX_PATH; i++) {
162
163 if (rtlpriv->dm.rfpath_rxenable[i])
164 rf_rx_num++;
165
166 rx_pwr[i] = ((p_phystrpt->path_agc[i].gain & 0x3f) * 2)
167 - 110;
168
169 pstatus->rx_pwr[i] = rx_pwr[i];
170
171 rssi = rtl_query_rxpwrpercentage(rx_pwr[i]);
172 total_rssi += rssi;
173
174 pstatus->rx_mimo_signalstrength[i] = (u8)rssi;
175 }
176
177
178
179
180 rx_pwr_all = ((p_phystrpt->cck_sig_qual_ofdm_pwdb_all >> 1)
181 & 0x7f) - 110;
182
183 pwdb_all = rtl_query_rxpwrpercentage(rx_pwr_all);
184 pstatus->rx_pwdb_all = pwdb_all;
185 pstatus->bt_rx_rssi_percentage = pwdb_all;
186 pstatus->rxpower = rx_pwr_all;
187 pstatus->recvsignalpower = rx_pwr_all;
188
189
190 if (pstatus->rate >= DESC_RATEMCS8 &&
191 pstatus->rate <= DESC_RATEMCS15)
192 max_spatial_stream = 2;
193 else
194 max_spatial_stream = 1;
195
196 for (i = 0; i < max_spatial_stream; i++) {
197 evm = rtl_evm_db_to_percentage(
198 p_phystrpt->stream_rxevm[i]);
199
200 if (bpacket_match_bssid) {
201
202
203
204 if (i == 0)
205 pstatus->signalquality = (u8)(evm &
206 0xff);
207 pstatus->rx_mimo_signalquality[i] = (u8)(evm &
208 0xff);
209 }
210 }
211
212 if (bpacket_match_bssid) {
213 for (i = RF90_PATH_A; i <= RF90_PATH_B; i++)
214 rtl_priv(hw)->dm.cfo_tail[i] =
215 (int)p_phystrpt->path_cfotail[i];
216
217 if (rtl_priv(hw)->dm.packet_count == 0xffffffff)
218 rtl_priv(hw)->dm.packet_count = 0;
219 else
220 rtl_priv(hw)->dm.packet_count++;
221 }
222 }
223
224
225
226
227 if (is_cck)
228 pstatus->signalstrength = (u8)(rtl_signal_scale_mapping(hw,
229 pwdb_all));
230 else if (rf_rx_num != 0)
231 pstatus->signalstrength = (u8)(rtl_signal_scale_mapping(hw,
232 total_rssi /= rf_rx_num));
233}
234
235static void _rtl92ee_translate_rx_signal_stuff(struct ieee80211_hw *hw,
236 struct sk_buff *skb,
237 struct rtl_stats *pstatus,
238 u8 *pdesc,
239 struct rx_fwinfo *p_drvinfo)
240{
241 struct rtl_mac *mac = rtl_mac(rtl_priv(hw));
242 struct rtl_efuse *rtlefuse = rtl_efuse(rtl_priv(hw));
243 struct ieee80211_hdr *hdr;
244 u8 *tmp_buf;
245 u8 *praddr;
246 u8 *psaddr;
247 __le16 fc;
248 bool packet_matchbssid, packet_toself, packet_beacon;
249
250 tmp_buf = skb->data + pstatus->rx_drvinfo_size +
251 pstatus->rx_bufshift + 24;
252
253 hdr = (struct ieee80211_hdr *)tmp_buf;
254 fc = hdr->frame_control;
255 praddr = hdr->addr1;
256 psaddr = ieee80211_get_SA(hdr);
257 ether_addr_copy(pstatus->psaddr, psaddr);
258
259 packet_matchbssid = (!ieee80211_is_ctl(fc) &&
260 (ether_addr_equal(mac->bssid,
261 ieee80211_has_tods(fc) ?
262 hdr->addr1 :
263 ieee80211_has_fromds(fc) ?
264 hdr->addr2 : hdr->addr3)) &&
265 (!pstatus->hwerror) && (!pstatus->crc) &&
266 (!pstatus->icv));
267
268 packet_toself = packet_matchbssid &&
269 (ether_addr_equal(praddr, rtlefuse->dev_addr));
270
271 if (ieee80211_is_beacon(fc))
272 packet_beacon = true;
273 else
274 packet_beacon = false;
275
276 if (packet_beacon && packet_matchbssid)
277 rtl_priv(hw)->dm.dbginfo.num_qry_beacon_pkt++;
278
279 if (packet_matchbssid && ieee80211_is_data_qos(hdr->frame_control) &&
280 !is_multicast_ether_addr(ieee80211_get_DA(hdr))) {
281 struct ieee80211_qos_hdr *hdr_qos =
282 (struct ieee80211_qos_hdr *)tmp_buf;
283 u16 tid = le16_to_cpu(hdr_qos->qos_ctrl) & 0xf;
284
285 if (tid != 0 && tid != 3)
286 rtl_priv(hw)->dm.dbginfo.num_non_be_pkt++;
287 }
288
289 _rtl92ee_query_rxphystatus(hw, pstatus, pdesc, p_drvinfo,
290 packet_matchbssid, packet_toself,
291 packet_beacon);
292 rtl_process_phyinfo(hw, tmp_buf, pstatus);
293}
294
295static void _rtl92ee_insert_emcontent(struct rtl_tcb_desc *ptcb_desc,
296 u8 *virtualaddress)
297{
298 u32 dwtmp = 0;
299
300 memset(virtualaddress, 0, 8);
301
302 SET_EARLYMODE_PKTNUM(virtualaddress, ptcb_desc->empkt_num);
303 if (ptcb_desc->empkt_num == 1) {
304 dwtmp = ptcb_desc->empkt_len[0];
305 } else {
306 dwtmp = ptcb_desc->empkt_len[0];
307 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
308 dwtmp += ptcb_desc->empkt_len[1];
309 }
310 SET_EARLYMODE_LEN0(virtualaddress, dwtmp);
311
312 if (ptcb_desc->empkt_num <= 3) {
313 dwtmp = ptcb_desc->empkt_len[2];
314 } else {
315 dwtmp = ptcb_desc->empkt_len[2];
316 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
317 dwtmp += ptcb_desc->empkt_len[3];
318 }
319 SET_EARLYMODE_LEN1(virtualaddress, dwtmp);
320 if (ptcb_desc->empkt_num <= 5) {
321 dwtmp = ptcb_desc->empkt_len[4];
322 } else {
323 dwtmp = ptcb_desc->empkt_len[4];
324 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
325 dwtmp += ptcb_desc->empkt_len[5];
326 }
327 SET_EARLYMODE_LEN2_1(virtualaddress, dwtmp & 0xF);
328 SET_EARLYMODE_LEN2_2(virtualaddress, dwtmp >> 4);
329 if (ptcb_desc->empkt_num <= 7) {
330 dwtmp = ptcb_desc->empkt_len[6];
331 } else {
332 dwtmp = ptcb_desc->empkt_len[6];
333 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
334 dwtmp += ptcb_desc->empkt_len[7];
335 }
336 SET_EARLYMODE_LEN3(virtualaddress, dwtmp);
337 if (ptcb_desc->empkt_num <= 9) {
338 dwtmp = ptcb_desc->empkt_len[8];
339 } else {
340 dwtmp = ptcb_desc->empkt_len[8];
341 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
342 dwtmp += ptcb_desc->empkt_len[9];
343 }
344 SET_EARLYMODE_LEN4(virtualaddress, dwtmp);
345}
346
347bool rtl92ee_rx_query_desc(struct ieee80211_hw *hw,
348 struct rtl_stats *status,
349 struct ieee80211_rx_status *rx_status,
350 u8 *pdesc, struct sk_buff *skb)
351{
352 struct rtl_priv *rtlpriv = rtl_priv(hw);
353 struct rx_fwinfo *p_drvinfo;
354 struct ieee80211_hdr *hdr;
355 u32 phystatus = GET_RX_DESC_PHYST(pdesc);
356
357 if (GET_RX_STATUS_DESC_RPT_SEL(pdesc) == 0)
358 status->packet_report_type = NORMAL_RX;
359 else
360 status->packet_report_type = C2H_PACKET;
361 status->length = (u16)GET_RX_DESC_PKT_LEN(pdesc);
362 status->rx_drvinfo_size = (u8)GET_RX_DESC_DRV_INFO_SIZE(pdesc) *
363 RX_DRV_INFO_SIZE_UNIT;
364 status->rx_bufshift = (u8)(GET_RX_DESC_SHIFT(pdesc) & 0x03);
365 status->icv = (u16)GET_RX_DESC_ICV(pdesc);
366 status->crc = (u16)GET_RX_DESC_CRC32(pdesc);
367 status->hwerror = (status->crc | status->icv);
368 status->decrypted = !GET_RX_DESC_SWDEC(pdesc);
369 status->rate = (u8)GET_RX_DESC_RXMCS(pdesc);
370 status->isampdu = (bool)(GET_RX_DESC_PAGGR(pdesc) == 1);
371 status->timestamp_low = GET_RX_DESC_TSFL(pdesc);
372 status->is_cck = RTL92EE_RX_HAL_IS_CCK_RATE(status->rate);
373
374 status->macid = GET_RX_DESC_MACID(pdesc);
375 if (GET_RX_STATUS_DESC_MAGIC_MATCH(pdesc))
376 status->wake_match = BIT(2);
377 else if (GET_RX_STATUS_DESC_MAGIC_MATCH(pdesc))
378 status->wake_match = BIT(1);
379 else if (GET_RX_STATUS_DESC_UNICAST_MATCH(pdesc))
380 status->wake_match = BIT(0);
381 else
382 status->wake_match = 0;
383 if (status->wake_match)
384 RT_TRACE(rtlpriv, COMP_RXDESC, DBG_LOUD,
385 "GGGGGGGGGGGGGet Wakeup Packet!! WakeMatch=%d\n",
386 status->wake_match);
387 rx_status->freq = hw->conf.chandef.chan->center_freq;
388 rx_status->band = hw->conf.chandef.chan->band;
389
390 hdr = (struct ieee80211_hdr *)(skb->data + status->rx_drvinfo_size +
391 status->rx_bufshift + 24);
392
393 if (status->crc)
394 rx_status->flag |= RX_FLAG_FAILED_FCS_CRC;
395
396 if (status->rx_is40Mhzpacket)
397 rx_status->bw = RATE_INFO_BW_40;
398
399 if (status->is_ht)
400 rx_status->encoding = RX_ENC_HT;
401
402 rx_status->flag |= RX_FLAG_MACTIME_START;
403
404
405
406
407
408
409
410
411
412 if (status->decrypted) {
413 if ((!_ieee80211_is_robust_mgmt_frame(hdr)) &&
414 (ieee80211_has_protected(hdr->frame_control)))
415 rx_status->flag |= RX_FLAG_DECRYPTED;
416 else
417 rx_status->flag &= ~RX_FLAG_DECRYPTED;
418 }
419
420
421
422
423
424
425 rx_status->rate_idx = rtlwifi_rate_mapping(hw, status->is_ht,
426 false, status->rate);
427
428 rx_status->mactime = status->timestamp_low;
429 if (phystatus) {
430 p_drvinfo = (struct rx_fwinfo *)(skb->data +
431 status->rx_bufshift + 24);
432
433 _rtl92ee_translate_rx_signal_stuff(hw, skb, status, pdesc,
434 p_drvinfo);
435 }
436 rx_status->signal = status->recvsignalpower + 10;
437 if (status->packet_report_type == TX_REPORT2) {
438 status->macid_valid_entry[0] =
439 GET_RX_RPT2_DESC_MACID_VALID_1(pdesc);
440 status->macid_valid_entry[1] =
441 GET_RX_RPT2_DESC_MACID_VALID_2(pdesc);
442 }
443 return true;
444}
445
446
447void rtl92ee_rx_check_dma_ok(struct ieee80211_hw *hw, u8 *header_desc,
448 u8 queue_index)
449{
450 u8 first_seg = 0;
451 u8 last_seg = 0;
452 u16 total_len = 0;
453 u16 read_cnt = 0;
454
455 if (header_desc == NULL)
456 return;
457
458 total_len = (u16)GET_RX_BUFFER_DESC_TOTAL_LENGTH(header_desc);
459
460 first_seg = (u8)GET_RX_BUFFER_DESC_FS(header_desc);
461
462 last_seg = (u8)GET_RX_BUFFER_DESC_LS(header_desc);
463
464 while (total_len == 0 && first_seg == 0 && last_seg == 0) {
465 read_cnt++;
466 total_len = (u16)GET_RX_BUFFER_DESC_TOTAL_LENGTH(header_desc);
467 first_seg = (u8)GET_RX_BUFFER_DESC_FS(header_desc);
468 last_seg = (u8)GET_RX_BUFFER_DESC_LS(header_desc);
469
470 if (read_cnt > 20)
471 break;
472 }
473}
474
475u16 rtl92ee_rx_desc_buff_remained_cnt(struct ieee80211_hw *hw, u8 queue_index)
476{
477 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
478 struct rtl_priv *rtlpriv = rtl_priv(hw);
479 u16 read_point = 0, write_point = 0, remind_cnt = 0;
480 u32 tmp_4byte = 0;
481 static bool start_rx;
482
483 tmp_4byte = rtl_read_dword(rtlpriv, REG_RXQ_TXBD_IDX);
484 read_point = (u16)((tmp_4byte>>16) & 0x7ff);
485 write_point = (u16)(tmp_4byte & 0x7ff);
486
487 if (write_point != rtlpci->rx_ring[queue_index].next_rx_rp) {
488 RT_TRACE(rtlpriv, COMP_RXDESC, DBG_DMESG,
489 "!!!write point is 0x%x, reg 0x3B4 value is 0x%x\n",
490 write_point, tmp_4byte);
491 tmp_4byte = rtl_read_dword(rtlpriv, REG_RXQ_TXBD_IDX);
492 read_point = (u16)((tmp_4byte>>16) & 0x7ff);
493 write_point = (u16)(tmp_4byte & 0x7ff);
494 }
495
496 if (read_point > 0)
497 start_rx = true;
498 if (!start_rx)
499 return 0;
500
501 remind_cnt = calc_fifo_space(read_point, write_point);
502
503 if (remind_cnt == 0)
504 return 0;
505
506 rtlpci->rx_ring[queue_index].next_rx_rp = write_point;
507
508 return remind_cnt;
509}
510
511static u16 get_desc_addr_fr_q_idx(u16 queue_index)
512{
513 u16 desc_address = REG_BEQ_TXBD_IDX;
514
515 switch (queue_index) {
516 case BK_QUEUE:
517 desc_address = REG_BKQ_TXBD_IDX;
518 break;
519 case BE_QUEUE:
520 desc_address = REG_BEQ_TXBD_IDX;
521 break;
522 case VI_QUEUE:
523 desc_address = REG_VIQ_TXBD_IDX;
524 break;
525 case VO_QUEUE:
526 desc_address = REG_VOQ_TXBD_IDX;
527 break;
528 case BEACON_QUEUE:
529 desc_address = REG_BEQ_TXBD_IDX;
530 break;
531 case TXCMD_QUEUE:
532 desc_address = REG_BEQ_TXBD_IDX;
533 break;
534 case MGNT_QUEUE:
535 desc_address = REG_MGQ_TXBD_IDX;
536 break;
537 case HIGH_QUEUE:
538 desc_address = REG_HI0Q_TXBD_IDX;
539 break;
540 case HCCA_QUEUE:
541 desc_address = REG_BEQ_TXBD_IDX;
542 break;
543 default:
544 break;
545 }
546 return desc_address;
547}
548
549u16 rtl92ee_get_available_desc(struct ieee80211_hw *hw, u8 q_idx)
550{
551 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
552 struct rtl_priv *rtlpriv = rtl_priv(hw);
553 u16 point_diff = 0;
554 u16 current_tx_read_point = 0, current_tx_write_point = 0;
555 u32 tmp_4byte;
556
557 tmp_4byte = rtl_read_dword(rtlpriv,
558 get_desc_addr_fr_q_idx(q_idx));
559 current_tx_read_point = (u16)((tmp_4byte >> 16) & 0x0fff);
560 current_tx_write_point = (u16)((tmp_4byte) & 0x0fff);
561
562 point_diff = calc_fifo_space(current_tx_read_point,
563 current_tx_write_point);
564
565 rtlpci->tx_ring[q_idx].avl_desc = point_diff;
566 return point_diff;
567}
568
569void rtl92ee_pre_fill_tx_bd_desc(struct ieee80211_hw *hw,
570 u8 *tx_bd_desc, u8 *desc, u8 queue_index,
571 struct sk_buff *skb, dma_addr_t addr)
572{
573 struct rtl_priv *rtlpriv = rtl_priv(hw);
574 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
575 u32 pkt_len = skb->len;
576 u16 desc_size = 40;
577 u32 psblen = 0;
578 u16 tx_page_size = 0;
579 u32 total_packet_size = 0;
580 u16 current_bd_desc;
581 u8 i = 0;
582 u16 real_desc_size = 0x28;
583 u16 append_early_mode_size = 0;
584 u8 segmentnum = 1 << (RTL8192EE_SEG_NUM + 1);
585 dma_addr_t desc_dma_addr;
586 bool dma64 = rtlpriv->cfg->mod_params->dma64;
587
588 tx_page_size = 2;
589 current_bd_desc = rtlpci->tx_ring[queue_index].cur_tx_wp;
590
591 total_packet_size = desc_size+pkt_len;
592
593 if (rtlpriv->rtlhal.earlymode_enable) {
594 if (queue_index < BEACON_QUEUE) {
595 append_early_mode_size = 8;
596 total_packet_size += append_early_mode_size;
597 }
598 }
599
600 if (tx_page_size > 0) {
601 psblen = (pkt_len + real_desc_size + append_early_mode_size) /
602 (tx_page_size * 128);
603
604 if (psblen * (tx_page_size * 128) < total_packet_size)
605 psblen += 1;
606 }
607
608
609 desc_dma_addr = rtlpci->tx_ring[queue_index].dma +
610 (current_bd_desc * TX_DESC_SIZE);
611
612
613 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, 0);
614 SET_TX_BUFF_DESC_PSB(tx_bd_desc, 0);
615 SET_TX_BUFF_DESC_OWN(tx_bd_desc, 0);
616
617 for (i = 1; i < segmentnum; i++) {
618 SET_TXBUFFER_DESC_LEN_WITH_OFFSET(tx_bd_desc, i, 0);
619 SET_TXBUFFER_DESC_AMSDU_WITH_OFFSET(tx_bd_desc, i, 0);
620 SET_TXBUFFER_DESC_ADD_LOW_WITH_OFFSET(tx_bd_desc, i, 0);
621 SET_TXBUFFER_DESC_ADD_HIGH_WITH_OFFSET(tx_bd_desc, i, 0, dma64);
622 }
623
624
625 CLEAR_PCI_TX_DESC_CONTENT(desc, TX_DESC_SIZE);
626
627 if (rtlpriv->rtlhal.earlymode_enable) {
628 if (queue_index < BEACON_QUEUE) {
629
630 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, desc_size + 8);
631 } else {
632 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, desc_size);
633 }
634 } else {
635 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, desc_size);
636 }
637 SET_TX_BUFF_DESC_PSB(tx_bd_desc, psblen);
638 SET_TX_BUFF_DESC_ADDR_LOW_0(tx_bd_desc, desc_dma_addr);
639 SET_TX_BUFF_DESC_ADDR_HIGH_0(tx_bd_desc, ((u64)desc_dma_addr >> 32),
640 dma64);
641
642 SET_TXBUFFER_DESC_LEN_WITH_OFFSET(tx_bd_desc, 1, pkt_len);
643
644 SET_TXBUFFER_DESC_AMSDU_WITH_OFFSET(tx_bd_desc, 1, 0);
645 SET_TXBUFFER_DESC_ADD_LOW_WITH_OFFSET(tx_bd_desc, 1, addr);
646 SET_TXBUFFER_DESC_ADD_HIGH_WITH_OFFSET(tx_bd_desc, 1,
647 ((u64)addr >> 32), dma64);
648
649 SET_TX_DESC_PKT_SIZE(desc, (u16)(pkt_len));
650 SET_TX_DESC_TX_BUFFER_SIZE(desc, (u16)(pkt_len));
651}
652
653void rtl92ee_tx_fill_desc(struct ieee80211_hw *hw,
654 struct ieee80211_hdr *hdr, u8 *pdesc_tx,
655 u8 *pbd_desc_tx,
656 struct ieee80211_tx_info *info,
657 struct ieee80211_sta *sta,
658 struct sk_buff *skb,
659 u8 hw_queue, struct rtl_tcb_desc *ptcb_desc)
660{
661 struct rtl_priv *rtlpriv = rtl_priv(hw);
662 struct rtl_mac *mac = rtl_mac(rtl_priv(hw));
663 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
664 struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
665 u8 *pdesc = (u8 *)pdesc_tx;
666 u16 seq_number;
667 __le16 fc = hdr->frame_control;
668 unsigned int buf_len = 0;
669 u8 fw_qsel = _rtl92ee_map_hwqueue_to_fwqueue(skb, hw_queue);
670 bool firstseg = ((hdr->seq_ctrl &
671 cpu_to_le16(IEEE80211_SCTL_FRAG)) == 0);
672 bool lastseg = ((hdr->frame_control &
673 cpu_to_le16(IEEE80211_FCTL_MOREFRAGS)) == 0);
674 dma_addr_t mapping;
675 u8 bw_40 = 0;
676 u8 short_gi = 0;
677
678 if (mac->opmode == NL80211_IFTYPE_STATION) {
679 bw_40 = mac->bw_40;
680 } else if (mac->opmode == NL80211_IFTYPE_AP ||
681 mac->opmode == NL80211_IFTYPE_ADHOC) {
682 if (sta)
683 bw_40 = sta->ht_cap.cap &
684 IEEE80211_HT_CAP_SUP_WIDTH_20_40;
685 }
686 seq_number = (le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_SEQ) >> 4;
687 rtl_get_tcb_desc(hw, info, sta, skb, ptcb_desc);
688
689 if (rtlhal->earlymode_enable) {
690 skb_push(skb, EM_HDR_LEN);
691 memset(skb->data, 0, EM_HDR_LEN);
692 }
693 buf_len = skb->len;
694 mapping = pci_map_single(rtlpci->pdev, skb->data, skb->len,
695 PCI_DMA_TODEVICE);
696 if (pci_dma_mapping_error(rtlpci->pdev, mapping)) {
697 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
698 "DMA mapping error\n");
699 return;
700 }
701
702 if (pbd_desc_tx != NULL)
703 rtl92ee_pre_fill_tx_bd_desc(hw, pbd_desc_tx, pdesc, hw_queue,
704 skb, mapping);
705
706 if (ieee80211_is_nullfunc(fc) || ieee80211_is_ctl(fc)) {
707 firstseg = true;
708 lastseg = true;
709 }
710 if (firstseg) {
711 if (rtlhal->earlymode_enable) {
712 SET_TX_DESC_PKT_OFFSET(pdesc, 1);
713 SET_TX_DESC_OFFSET(pdesc,
714 USB_HWDESC_HEADER_LEN + EM_HDR_LEN);
715 if (ptcb_desc->empkt_num) {
716 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
717 "Insert 8 byte.pTcb->EMPktNum:%d\n",
718 ptcb_desc->empkt_num);
719 _rtl92ee_insert_emcontent(ptcb_desc,
720 (u8 *)(skb->data));
721 }
722 } else {
723 SET_TX_DESC_OFFSET(pdesc, USB_HWDESC_HEADER_LEN);
724 }
725
726
727 rtl_get_tx_report(ptcb_desc, pdesc, hw);
728
729 SET_TX_DESC_TX_RATE(pdesc, ptcb_desc->hw_rate);
730
731 if (ieee80211_is_mgmt(fc)) {
732 ptcb_desc->use_driver_rate = true;
733 } else {
734 if (rtlpriv->ra.is_special_data) {
735 ptcb_desc->use_driver_rate = true;
736 SET_TX_DESC_TX_RATE(pdesc, DESC_RATE11M);
737 } else {
738 ptcb_desc->use_driver_rate = false;
739 }
740 }
741
742 if (ptcb_desc->hw_rate > DESC_RATEMCS0)
743 short_gi = (ptcb_desc->use_shortgi) ? 1 : 0;
744 else
745 short_gi = (ptcb_desc->use_shortpreamble) ? 1 : 0;
746
747 if (info->flags & IEEE80211_TX_CTL_AMPDU) {
748 SET_TX_DESC_AGG_ENABLE(pdesc, 1);
749 SET_TX_DESC_MAX_AGG_NUM(pdesc, 0x14);
750 }
751 SET_TX_DESC_SEQ(pdesc, seq_number);
752 SET_TX_DESC_RTS_ENABLE(pdesc,
753 ((ptcb_desc->rts_enable &&
754 !ptcb_desc->cts_enable) ? 1 : 0));
755 SET_TX_DESC_HW_RTS_ENABLE(pdesc, 0);
756 SET_TX_DESC_CTS2SELF(pdesc,
757 ((ptcb_desc->cts_enable) ? 1 : 0));
758
759 SET_TX_DESC_RTS_RATE(pdesc, ptcb_desc->rts_rate);
760 SET_TX_DESC_RTS_SC(pdesc, ptcb_desc->rts_sc);
761 SET_TX_DESC_RTS_SHORT(pdesc,
762 ((ptcb_desc->rts_rate <= DESC_RATE54M) ?
763 (ptcb_desc->rts_use_shortpreamble ? 1 : 0) :
764 (ptcb_desc->rts_use_shortgi ? 1 : 0)));
765
766 if (ptcb_desc->tx_enable_sw_calc_duration)
767 SET_TX_DESC_NAV_USE_HDR(pdesc, 1);
768
769 if (bw_40) {
770 if (ptcb_desc->packet_bw == HT_CHANNEL_WIDTH_20_40) {
771 SET_TX_DESC_DATA_BW(pdesc, 1);
772 SET_TX_DESC_TX_SUB_CARRIER(pdesc, 3);
773 } else {
774 SET_TX_DESC_DATA_BW(pdesc, 0);
775 SET_TX_DESC_TX_SUB_CARRIER(pdesc,
776 mac->cur_40_prime_sc);
777 }
778 } else {
779 SET_TX_DESC_DATA_BW(pdesc, 0);
780 SET_TX_DESC_TX_SUB_CARRIER(pdesc, 0);
781 }
782
783 SET_TX_DESC_LINIP(pdesc, 0);
784 if (sta) {
785 u8 ampdu_density = sta->ht_cap.ampdu_density;
786
787 SET_TX_DESC_AMPDU_DENSITY(pdesc, ampdu_density);
788 }
789 if (info->control.hw_key) {
790 struct ieee80211_key_conf *key = info->control.hw_key;
791
792 switch (key->cipher) {
793 case WLAN_CIPHER_SUITE_WEP40:
794 case WLAN_CIPHER_SUITE_WEP104:
795 case WLAN_CIPHER_SUITE_TKIP:
796 SET_TX_DESC_SEC_TYPE(pdesc, 0x1);
797 break;
798 case WLAN_CIPHER_SUITE_CCMP:
799 SET_TX_DESC_SEC_TYPE(pdesc, 0x3);
800 break;
801 default:
802 SET_TX_DESC_SEC_TYPE(pdesc, 0x0);
803 break;
804 }
805 }
806
807 SET_TX_DESC_QUEUE_SEL(pdesc, fw_qsel);
808 SET_TX_DESC_DATA_RATE_FB_LIMIT(pdesc, 0x1F);
809 SET_TX_DESC_RTS_RATE_FB_LIMIT(pdesc, 0xF);
810 SET_TX_DESC_DISABLE_FB(pdesc,
811 ptcb_desc->disable_ratefallback ? 1 : 0);
812 SET_TX_DESC_USE_RATE(pdesc, ptcb_desc->use_driver_rate ? 1 : 0);
813
814
815
816
817
818 if (!ptcb_desc->use_driver_rate) {
819
820
821 }
822 if (ieee80211_is_data_qos(fc)) {
823 if (mac->rdg_en) {
824 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
825 "Enable RDG function.\n");
826 SET_TX_DESC_RDG_ENABLE(pdesc, 1);
827 SET_TX_DESC_HTC(pdesc, 1);
828 }
829 }
830 }
831
832 SET_TX_DESC_FIRST_SEG(pdesc, (firstseg ? 1 : 0));
833 SET_TX_DESC_LAST_SEG(pdesc, (lastseg ? 1 : 0));
834 SET_TX_DESC_TX_BUFFER_ADDRESS(pdesc, mapping);
835 if (rtlpriv->dm.useramask) {
836 SET_TX_DESC_RATE_ID(pdesc, ptcb_desc->ratr_index);
837 SET_TX_DESC_MACID(pdesc, ptcb_desc->mac_id);
838 } else {
839 SET_TX_DESC_RATE_ID(pdesc, 0xC + ptcb_desc->ratr_index);
840 SET_TX_DESC_MACID(pdesc, ptcb_desc->ratr_index);
841 }
842
843 SET_TX_DESC_MORE_FRAG(pdesc, (lastseg ? 0 : 1));
844 if (is_multicast_ether_addr(ieee80211_get_DA(hdr)) ||
845 is_broadcast_ether_addr(ieee80211_get_DA(hdr))) {
846 SET_TX_DESC_BMC(pdesc, 1);
847 }
848 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE, "\n");
849}
850
851void rtl92ee_tx_fill_cmddesc(struct ieee80211_hw *hw,
852 u8 *pdesc, bool firstseg,
853 bool lastseg, struct sk_buff *skb)
854{
855 struct rtl_priv *rtlpriv = rtl_priv(hw);
856 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
857 u8 fw_queue = QSLT_BEACON;
858 dma_addr_t mapping = pci_map_single(rtlpci->pdev,
859 skb->data, skb->len,
860 PCI_DMA_TODEVICE);
861 u8 txdesc_len = 40;
862
863 if (pci_dma_mapping_error(rtlpci->pdev, mapping)) {
864 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
865 "DMA mapping error\n");
866 return;
867 }
868 CLEAR_PCI_TX_DESC_CONTENT(pdesc, txdesc_len);
869
870 if (firstseg)
871 SET_TX_DESC_OFFSET(pdesc, txdesc_len);
872
873 SET_TX_DESC_TX_RATE(pdesc, DESC_RATE1M);
874
875 SET_TX_DESC_SEQ(pdesc, 0);
876
877 SET_TX_DESC_LINIP(pdesc, 0);
878
879 SET_TX_DESC_QUEUE_SEL(pdesc, fw_queue);
880
881 SET_TX_DESC_FIRST_SEG(pdesc, 1);
882 SET_TX_DESC_LAST_SEG(pdesc, 1);
883
884 SET_TX_DESC_TX_BUFFER_SIZE(pdesc, (u16)(skb->len));
885
886 SET_TX_DESC_TX_BUFFER_ADDRESS(pdesc, mapping);
887
888 SET_TX_DESC_RATE_ID(pdesc, 7);
889 SET_TX_DESC_MACID(pdesc, 0);
890
891 SET_TX_DESC_OWN(pdesc, 1);
892
893 SET_TX_DESC_PKT_SIZE((u8 *)pdesc, (u16)(skb->len));
894
895 SET_TX_DESC_FIRST_SEG(pdesc, 1);
896 SET_TX_DESC_LAST_SEG(pdesc, 1);
897
898 SET_TX_DESC_OFFSET(pdesc, 40);
899
900 SET_TX_DESC_USE_RATE(pdesc, 1);
901
902 RT_PRINT_DATA(rtlpriv, COMP_CMD, DBG_LOUD,
903 "H2C Tx Cmd Content\n", pdesc, txdesc_len);
904}
905
906void rtl92ee_set_desc(struct ieee80211_hw *hw, u8 *pdesc, bool istx,
907 u8 desc_name, u8 *val)
908{
909 struct rtl_priv *rtlpriv = rtl_priv(hw);
910 u16 cur_tx_rp = 0;
911 u16 cur_tx_wp = 0;
912 static bool over_run;
913 u32 tmp = 0;
914 u8 q_idx = *val;
915 bool dma64 = rtlpriv->cfg->mod_params->dma64;
916
917 if (istx) {
918 switch (desc_name) {
919 case HW_DESC_TX_NEXTDESC_ADDR:
920 SET_TX_DESC_NEXT_DESC_ADDRESS(pdesc, *(u32 *)val);
921 break;
922 case HW_DESC_OWN:{
923 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
924 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[q_idx];
925 u16 max_tx_desc = ring->entries;
926
927 if (q_idx == BEACON_QUEUE) {
928 ring->cur_tx_wp = 0;
929 ring->cur_tx_rp = 0;
930 SET_TX_BUFF_DESC_OWN(pdesc, 1);
931 return;
932 }
933
934 ring->cur_tx_wp = ((ring->cur_tx_wp + 1) % max_tx_desc);
935
936 if (over_run) {
937 ring->cur_tx_wp = 0;
938 over_run = false;
939 }
940 if (ring->avl_desc > 1) {
941 ring->avl_desc--;
942
943 rtl_write_word(rtlpriv,
944 get_desc_addr_fr_q_idx(q_idx),
945 ring->cur_tx_wp);
946 }
947
948 if (ring->avl_desc < (max_tx_desc - 15)) {
949 u16 point_diff = 0;
950
951 tmp =
952 rtl_read_dword(rtlpriv,
953 get_desc_addr_fr_q_idx(q_idx));
954 cur_tx_rp = (u16)((tmp >> 16) & 0x0fff);
955 cur_tx_wp = (u16)(tmp & 0x0fff);
956
957 ring->cur_tx_wp = cur_tx_wp;
958 ring->cur_tx_rp = cur_tx_rp;
959 point_diff = ((cur_tx_rp > cur_tx_wp) ?
960 (cur_tx_rp - cur_tx_wp) :
961 (TX_DESC_NUM_92E - 1 -
962 cur_tx_wp + cur_tx_rp));
963
964 ring->avl_desc = point_diff;
965 }
966 }
967 break;
968 }
969 } else {
970 switch (desc_name) {
971 case HW_DESC_RX_PREPARE:
972 SET_RX_BUFFER_DESC_LS(pdesc, 0);
973 SET_RX_BUFFER_DESC_FS(pdesc, 0);
974 SET_RX_BUFFER_DESC_TOTAL_LENGTH(pdesc, 0);
975
976 SET_RX_BUFFER_DESC_DATA_LENGTH(pdesc,
977 MAX_RECEIVE_BUFFER_SIZE +
978 RX_DESC_SIZE);
979
980 SET_RX_BUFFER_PHYSICAL_LOW(pdesc, (*(dma_addr_t *)val) &
981 DMA_BIT_MASK(32));
982 SET_RX_BUFFER_PHYSICAL_HIGH(pdesc,
983 ((u64)(*(dma_addr_t *)val)
984 >> 32),
985 dma64);
986 break;
987 case HW_DESC_RXERO:
988 SET_RX_DESC_EOR(pdesc, 1);
989 break;
990 default:
991 WARN_ONCE(true,
992 "rtl8192ee: ERR rxdesc :%d not processed\n",
993 desc_name);
994 break;
995 }
996 }
997}
998
999u64 rtl92ee_get_desc(struct ieee80211_hw *hw,
1000 u8 *pdesc, bool istx, u8 desc_name)
1001{
1002 struct rtl_priv *rtlpriv = rtl_priv(hw);
1003 u64 ret = 0;
1004 bool dma64 = rtlpriv->cfg->mod_params->dma64;
1005
1006 if (istx) {
1007 switch (desc_name) {
1008 case HW_DESC_OWN:
1009 ret = GET_TX_DESC_OWN(pdesc);
1010 break;
1011 case HW_DESC_TXBUFF_ADDR:
1012 ret = GET_TXBUFFER_DESC_ADDR_LOW(pdesc, 1);
1013 ret |= (u64)GET_TXBUFFER_DESC_ADDR_HIGH(pdesc, 1,
1014 dma64) << 32;
1015 break;
1016 default:
1017 WARN_ONCE(true,
1018 "rtl8192ee: ERR txdesc :%d not processed\n",
1019 desc_name);
1020 break;
1021 }
1022 } else {
1023 switch (desc_name) {
1024 case HW_DESC_OWN:
1025 ret = GET_RX_DESC_OWN(pdesc);
1026 break;
1027 case HW_DESC_RXPKT_LEN:
1028 ret = GET_RX_DESC_PKT_LEN(pdesc);
1029 break;
1030 case HW_DESC_RXBUFF_ADDR:
1031 ret = GET_RX_DESC_BUFF_ADDR(pdesc);
1032 break;
1033 default:
1034 WARN_ONCE(true,
1035 "rtl8192ee: ERR rxdesc :%d not processed\n",
1036 desc_name);
1037 break;
1038 }
1039 }
1040 return ret;
1041}
1042
1043bool rtl92ee_is_tx_desc_closed(struct ieee80211_hw *hw, u8 hw_queue, u16 index)
1044{
1045 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1046 struct rtl_priv *rtlpriv = rtl_priv(hw);
1047 u16 read_point, write_point, available_desc_num;
1048 bool ret = false;
1049 static u8 stop_report_cnt;
1050 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
1051
1052 {
1053 u16 point_diff = 0;
1054 u16 cur_tx_rp, cur_tx_wp;
1055 u32 tmpu32 = 0;
1056
1057 tmpu32 =
1058 rtl_read_dword(rtlpriv,
1059 get_desc_addr_fr_q_idx(hw_queue));
1060 cur_tx_rp = (u16)((tmpu32 >> 16) & 0x0fff);
1061 cur_tx_wp = (u16)(tmpu32 & 0x0fff);
1062
1063 ring->cur_tx_wp = cur_tx_wp;
1064 ring->cur_tx_rp = cur_tx_rp;
1065 point_diff = ((cur_tx_rp > cur_tx_wp) ?
1066 (cur_tx_rp - cur_tx_wp) :
1067 (TX_DESC_NUM_92E - cur_tx_wp + cur_tx_rp));
1068
1069 ring->avl_desc = point_diff;
1070 }
1071
1072 read_point = ring->cur_tx_rp;
1073 write_point = ring->cur_tx_wp;
1074 available_desc_num = ring->avl_desc;
1075
1076 if (write_point > read_point) {
1077 if (index < write_point && index >= read_point)
1078 ret = false;
1079 else
1080 ret = true;
1081 } else if (write_point < read_point) {
1082 if (index > write_point && index < read_point)
1083 ret = true;
1084 else
1085 ret = false;
1086 } else {
1087 if (index != read_point)
1088 ret = true;
1089 }
1090
1091 if (hw_queue == BEACON_QUEUE)
1092 ret = true;
1093
1094 if (rtlpriv->rtlhal.driver_is_goingto_unload ||
1095 rtlpriv->psc.rfoff_reason > RF_CHANGE_BY_PS)
1096 ret = true;
1097
1098 if (hw_queue < BEACON_QUEUE) {
1099 if (!ret)
1100 stop_report_cnt++;
1101 else
1102 stop_report_cnt = 0;
1103 }
1104
1105 return ret;
1106}
1107
1108void rtl92ee_tx_polling(struct ieee80211_hw *hw, u8 hw_queue)
1109{
1110}
1111
1112u32 rtl92ee_rx_command_packet(struct ieee80211_hw *hw,
1113 const struct rtl_stats *status,
1114 struct sk_buff *skb)
1115{
1116 u32 result = 0;
1117 struct rtl_priv *rtlpriv = rtl_priv(hw);
1118
1119 switch (status->packet_report_type) {
1120 case NORMAL_RX:
1121 result = 0;
1122 break;
1123 case C2H_PACKET:
1124 rtl92ee_c2h_packet_handler(hw, skb->data, (u8)skb->len);
1125 result = 1;
1126 break;
1127 default:
1128 RT_TRACE(rtlpriv, COMP_RECV, DBG_TRACE,
1129 "Unknown packet type %d\n", status->packet_report_type);
1130 break;
1131 }
1132
1133 return result;
1134}
1135