1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include "../wifi.h"
27#include "../pci.h"
28#include "../base.h"
29#include "../stats.h"
30#include "reg.h"
31#include "def.h"
32#include "phy.h"
33#include "trx.h"
34#include "led.h"
35#include "dm.h"
36#include "fw.h"
37
38static u8 _rtl92ee_map_hwqueue_to_fwqueue(struct sk_buff *skb, u8 hw_queue)
39{
40 __le16 fc = rtl_get_fc(skb);
41
42 if (unlikely(ieee80211_is_beacon(fc)))
43 return QSLT_BEACON;
44 if (ieee80211_is_mgmt(fc) || ieee80211_is_ctl(fc))
45 return QSLT_MGNT;
46
47 return skb->priority;
48}
49
50static void _rtl92ee_query_rxphystatus(struct ieee80211_hw *hw,
51 struct rtl_stats *pstatus, u8 *pdesc,
52 struct rx_fwinfo *p_drvinfo,
53 bool bpacket_match_bssid,
54 bool bpacket_toself,
55 bool packet_beacon)
56{
57 struct rtl_priv *rtlpriv = rtl_priv(hw);
58 struct phy_status_rpt *p_phystrpt = (struct phy_status_rpt *)p_drvinfo;
59 char rx_pwr_all = 0, rx_pwr[4];
60 u8 rf_rx_num = 0, evm, pwdb_all;
61 u8 i, max_spatial_stream;
62 u32 rssi, total_rssi = 0;
63 bool is_cck = pstatus->is_cck;
64 u8 lan_idx, vga_idx;
65
66
67 pstatus->packet_matchbssid = bpacket_match_bssid;
68 pstatus->packet_toself = bpacket_toself;
69 pstatus->packet_beacon = packet_beacon;
70 pstatus->rx_mimo_signalquality[0] = -1;
71 pstatus->rx_mimo_signalquality[1] = -1;
72
73 if (is_cck) {
74 u8 cck_highpwr;
75 u8 cck_agc_rpt;
76
77 cck_agc_rpt = p_phystrpt->cck_agc_rpt_ofdm_cfosho_a;
78
79
80
81
82
83 cck_highpwr = (u8)rtl_get_bbreg(hw, RFPGA0_XA_HSSIPARAMETER2,
84 BIT(9));
85
86 lan_idx = ((cck_agc_rpt & 0xE0) >> 5);
87 vga_idx = (cck_agc_rpt & 0x1f);
88 switch (lan_idx) {
89 case 7:
90 if (vga_idx <= 27)
91 rx_pwr_all = -100 + 2 * (27 - vga_idx);
92 else
93 rx_pwr_all = -100;
94 break;
95 case 6:
96 rx_pwr_all = -48 + 2 * (2 - vga_idx);
97 break;
98 case 5:
99 rx_pwr_all = -42 + 2 * (7 - vga_idx);
100 break;
101 case 4:
102 rx_pwr_all = -36 + 2 * (7 - vga_idx);
103 break;
104 case 3:
105 rx_pwr_all = -24 + 2 * (7 - vga_idx);
106 break;
107 case 2:
108 if (cck_highpwr)
109 rx_pwr_all = -12 + 2 * (5 - vga_idx);
110 else
111 rx_pwr_all = -6 + 2 * (5 - vga_idx);
112 break;
113 case 1:
114 rx_pwr_all = 8 - 2 * vga_idx;
115 break;
116 case 0:
117 rx_pwr_all = 14 - 2 * vga_idx;
118 break;
119 default:
120 break;
121 }
122 rx_pwr_all += 16;
123 pwdb_all = rtl_query_rxpwrpercentage(rx_pwr_all);
124
125 if (!cck_highpwr) {
126 if (pwdb_all >= 80)
127 pwdb_all = ((pwdb_all - 80) << 1) +
128 ((pwdb_all - 80) >> 1) + 80;
129 else if ((pwdb_all <= 78) && (pwdb_all >= 20))
130 pwdb_all += 3;
131 if (pwdb_all > 100)
132 pwdb_all = 100;
133 }
134
135 pstatus->rx_pwdb_all = pwdb_all;
136 pstatus->bt_rx_rssi_percentage = pwdb_all;
137 pstatus->recvsignalpower = rx_pwr_all;
138
139
140 if (bpacket_match_bssid) {
141 u8 sq, sq_rpt;
142
143 if (pstatus->rx_pwdb_all > 40) {
144 sq = 100;
145 } else {
146 sq_rpt = p_phystrpt->cck_sig_qual_ofdm_pwdb_all;
147 if (sq_rpt > 64)
148 sq = 0;
149 else if (sq_rpt < 20)
150 sq = 100;
151 else
152 sq = ((64 - sq_rpt) * 100) / 44;
153 }
154
155 pstatus->signalquality = sq;
156 pstatus->rx_mimo_signalquality[0] = sq;
157 pstatus->rx_mimo_signalquality[1] = -1;
158 }
159 } else {
160
161 for (i = RF90_PATH_A; i < RF6052_MAX_PATH; i++) {
162
163 if (rtlpriv->dm.rfpath_rxenable[i])
164 rf_rx_num++;
165
166 rx_pwr[i] = ((p_phystrpt->path_agc[i].gain & 0x3f) * 2)
167 - 110;
168
169 pstatus->rx_pwr[i] = rx_pwr[i];
170
171 rssi = rtl_query_rxpwrpercentage(rx_pwr[i]);
172 total_rssi += rssi;
173
174 pstatus->rx_mimo_signalstrength[i] = (u8)rssi;
175 }
176
177
178
179
180 rx_pwr_all = ((p_phystrpt->cck_sig_qual_ofdm_pwdb_all >> 1)
181 & 0x7f) - 110;
182
183 pwdb_all = rtl_query_rxpwrpercentage(rx_pwr_all);
184 pstatus->rx_pwdb_all = pwdb_all;
185 pstatus->bt_rx_rssi_percentage = pwdb_all;
186 pstatus->rxpower = rx_pwr_all;
187 pstatus->recvsignalpower = rx_pwr_all;
188
189
190 if (pstatus->rate >= DESC_RATEMCS8 &&
191 pstatus->rate <= DESC_RATEMCS15)
192 max_spatial_stream = 2;
193 else
194 max_spatial_stream = 1;
195
196 for (i = 0; i < max_spatial_stream; i++) {
197 evm = rtl_evm_db_to_percentage(
198 p_phystrpt->stream_rxevm[i]);
199
200 if (bpacket_match_bssid) {
201
202
203
204 if (i == 0)
205 pstatus->signalquality = (u8)(evm &
206 0xff);
207 pstatus->rx_mimo_signalquality[i] = (u8)(evm &
208 0xff);
209 }
210 }
211
212 if (bpacket_match_bssid) {
213 for (i = RF90_PATH_A; i <= RF90_PATH_B; i++)
214 rtl_priv(hw)->dm.cfo_tail[i] =
215 (int)p_phystrpt->path_cfotail[i];
216
217 if (rtl_priv(hw)->dm.packet_count == 0xffffffff)
218 rtl_priv(hw)->dm.packet_count = 0;
219 else
220 rtl_priv(hw)->dm.packet_count++;
221 }
222 }
223
224
225
226
227 if (is_cck)
228 pstatus->signalstrength = (u8)(rtl_signal_scale_mapping(hw,
229 pwdb_all));
230 else if (rf_rx_num != 0)
231 pstatus->signalstrength = (u8)(rtl_signal_scale_mapping(hw,
232 total_rssi /= rf_rx_num));
233}
234
235static void _rtl92ee_translate_rx_signal_stuff(struct ieee80211_hw *hw,
236 struct sk_buff *skb,
237 struct rtl_stats *pstatus,
238 u8 *pdesc,
239 struct rx_fwinfo *p_drvinfo)
240{
241 struct rtl_mac *mac = rtl_mac(rtl_priv(hw));
242 struct rtl_efuse *rtlefuse = rtl_efuse(rtl_priv(hw));
243 struct ieee80211_hdr *hdr;
244 u8 *tmp_buf;
245 u8 *praddr;
246 u8 *psaddr;
247 __le16 fc;
248 bool packet_matchbssid, packet_toself, packet_beacon;
249
250 tmp_buf = skb->data + pstatus->rx_drvinfo_size +
251 pstatus->rx_bufshift + 24;
252
253 hdr = (struct ieee80211_hdr *)tmp_buf;
254 fc = hdr->frame_control;
255 praddr = hdr->addr1;
256 psaddr = ieee80211_get_SA(hdr);
257 ether_addr_copy(pstatus->psaddr, psaddr);
258
259 packet_matchbssid = (!ieee80211_is_ctl(fc) &&
260 (ether_addr_equal(mac->bssid,
261 ieee80211_has_tods(fc) ?
262 hdr->addr1 :
263 ieee80211_has_fromds(fc) ?
264 hdr->addr2 : hdr->addr3)) &&
265 (!pstatus->hwerror) && (!pstatus->crc) &&
266 (!pstatus->icv));
267
268 packet_toself = packet_matchbssid &&
269 (ether_addr_equal(praddr, rtlefuse->dev_addr));
270
271 if (ieee80211_is_beacon(fc))
272 packet_beacon = true;
273 else
274 packet_beacon = false;
275
276 if (packet_beacon && packet_matchbssid)
277 rtl_priv(hw)->dm.dbginfo.num_qry_beacon_pkt++;
278
279 if (packet_matchbssid && ieee80211_is_data_qos(hdr->frame_control) &&
280 !is_multicast_ether_addr(ieee80211_get_DA(hdr))) {
281 struct ieee80211_qos_hdr *hdr_qos =
282 (struct ieee80211_qos_hdr *)tmp_buf;
283 u16 tid = le16_to_cpu(hdr_qos->qos_ctrl) & 0xf;
284
285 if (tid != 0 && tid != 3)
286 rtl_priv(hw)->dm.dbginfo.num_non_be_pkt++;
287 }
288
289 _rtl92ee_query_rxphystatus(hw, pstatus, pdesc, p_drvinfo,
290 packet_matchbssid, packet_toself,
291 packet_beacon);
292 rtl_process_phyinfo(hw, tmp_buf, pstatus);
293}
294
295static void _rtl92ee_insert_emcontent(struct rtl_tcb_desc *ptcb_desc,
296 u8 *virtualaddress)
297{
298 u32 dwtmp = 0;
299
300 memset(virtualaddress, 0, 8);
301
302 SET_EARLYMODE_PKTNUM(virtualaddress, ptcb_desc->empkt_num);
303 if (ptcb_desc->empkt_num == 1) {
304 dwtmp = ptcb_desc->empkt_len[0];
305 } else {
306 dwtmp = ptcb_desc->empkt_len[0];
307 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
308 dwtmp += ptcb_desc->empkt_len[1];
309 }
310 SET_EARLYMODE_LEN0(virtualaddress, dwtmp);
311
312 if (ptcb_desc->empkt_num <= 3) {
313 dwtmp = ptcb_desc->empkt_len[2];
314 } else {
315 dwtmp = ptcb_desc->empkt_len[2];
316 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
317 dwtmp += ptcb_desc->empkt_len[3];
318 }
319 SET_EARLYMODE_LEN1(virtualaddress, dwtmp);
320 if (ptcb_desc->empkt_num <= 5) {
321 dwtmp = ptcb_desc->empkt_len[4];
322 } else {
323 dwtmp = ptcb_desc->empkt_len[4];
324 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
325 dwtmp += ptcb_desc->empkt_len[5];
326 }
327 SET_EARLYMODE_LEN2_1(virtualaddress, dwtmp & 0xF);
328 SET_EARLYMODE_LEN2_2(virtualaddress, dwtmp >> 4);
329 if (ptcb_desc->empkt_num <= 7) {
330 dwtmp = ptcb_desc->empkt_len[6];
331 } else {
332 dwtmp = ptcb_desc->empkt_len[6];
333 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
334 dwtmp += ptcb_desc->empkt_len[7];
335 }
336 SET_EARLYMODE_LEN3(virtualaddress, dwtmp);
337 if (ptcb_desc->empkt_num <= 9) {
338 dwtmp = ptcb_desc->empkt_len[8];
339 } else {
340 dwtmp = ptcb_desc->empkt_len[8];
341 dwtmp += ((dwtmp % 4) ? (4 - dwtmp % 4) : 0) + 4;
342 dwtmp += ptcb_desc->empkt_len[9];
343 }
344 SET_EARLYMODE_LEN4(virtualaddress, dwtmp);
345}
346
347bool rtl92ee_rx_query_desc(struct ieee80211_hw *hw,
348 struct rtl_stats *status,
349 struct ieee80211_rx_status *rx_status,
350 u8 *pdesc, struct sk_buff *skb)
351{
352 struct rtl_priv *rtlpriv = rtl_priv(hw);
353 struct rx_fwinfo *p_drvinfo;
354 struct ieee80211_hdr *hdr;
355 u32 phystatus = GET_RX_DESC_PHYST(pdesc);
356
357 if (GET_RX_STATUS_DESC_RPT_SEL(pdesc) == 0)
358 status->packet_report_type = NORMAL_RX;
359 else
360 status->packet_report_type = C2H_PACKET;
361 status->length = (u16)GET_RX_DESC_PKT_LEN(pdesc);
362 status->rx_drvinfo_size = (u8)GET_RX_DESC_DRV_INFO_SIZE(pdesc) *
363 RX_DRV_INFO_SIZE_UNIT;
364 status->rx_bufshift = (u8)(GET_RX_DESC_SHIFT(pdesc) & 0x03);
365 status->icv = (u16)GET_RX_DESC_ICV(pdesc);
366 status->crc = (u16)GET_RX_DESC_CRC32(pdesc);
367 status->hwerror = (status->crc | status->icv);
368 status->decrypted = !GET_RX_DESC_SWDEC(pdesc);
369 status->rate = (u8)GET_RX_DESC_RXMCS(pdesc);
370 status->isampdu = (bool)(GET_RX_DESC_PAGGR(pdesc) == 1);
371 status->timestamp_low = GET_RX_DESC_TSFL(pdesc);
372 status->is_cck = RTL92EE_RX_HAL_IS_CCK_RATE(status->rate);
373
374 status->macid = GET_RX_DESC_MACID(pdesc);
375 if (GET_RX_STATUS_DESC_MAGIC_MATCH(pdesc))
376 status->wake_match = BIT(2);
377 else if (GET_RX_STATUS_DESC_MAGIC_MATCH(pdesc))
378 status->wake_match = BIT(1);
379 else if (GET_RX_STATUS_DESC_UNICAST_MATCH(pdesc))
380 status->wake_match = BIT(0);
381 else
382 status->wake_match = 0;
383 if (status->wake_match)
384 RT_TRACE(rtlpriv, COMP_RXDESC, DBG_LOUD,
385 "GGGGGGGGGGGGGet Wakeup Packet!! WakeMatch=%d\n",
386 status->wake_match);
387 rx_status->freq = hw->conf.chandef.chan->center_freq;
388 rx_status->band = hw->conf.chandef.chan->band;
389
390 hdr = (struct ieee80211_hdr *)(skb->data + status->rx_drvinfo_size +
391 status->rx_bufshift + 24);
392
393 if (status->crc)
394 rx_status->flag |= RX_FLAG_FAILED_FCS_CRC;
395
396 if (status->rx_is40Mhzpacket)
397 rx_status->flag |= RX_FLAG_40MHZ;
398
399 if (status->is_ht)
400 rx_status->flag |= RX_FLAG_HT;
401
402 rx_status->flag |= RX_FLAG_MACTIME_START;
403
404
405
406
407
408
409
410
411
412 if (status->decrypted) {
413 if ((!_ieee80211_is_robust_mgmt_frame(hdr)) &&
414 (ieee80211_has_protected(hdr->frame_control)))
415 rx_status->flag |= RX_FLAG_DECRYPTED;
416 else
417 rx_status->flag &= ~RX_FLAG_DECRYPTED;
418 }
419
420
421
422
423
424
425 rx_status->rate_idx = rtlwifi_rate_mapping(hw, status->is_ht,
426 false, status->rate);
427
428 rx_status->mactime = status->timestamp_low;
429 if (phystatus) {
430 p_drvinfo = (struct rx_fwinfo *)(skb->data +
431 status->rx_bufshift + 24);
432
433 _rtl92ee_translate_rx_signal_stuff(hw, skb, status, pdesc,
434 p_drvinfo);
435 }
436 rx_status->signal = status->recvsignalpower + 10;
437 if (status->packet_report_type == TX_REPORT2) {
438 status->macid_valid_entry[0] =
439 GET_RX_RPT2_DESC_MACID_VALID_1(pdesc);
440 status->macid_valid_entry[1] =
441 GET_RX_RPT2_DESC_MACID_VALID_2(pdesc);
442 }
443 return true;
444}
445
446
447void rtl92ee_rx_check_dma_ok(struct ieee80211_hw *hw, u8 *header_desc,
448 u8 queue_index)
449{
450 u8 first_seg = 0;
451 u8 last_seg = 0;
452 u16 total_len = 0;
453 u16 read_cnt = 0;
454
455 if (header_desc == NULL)
456 return;
457
458 total_len = (u16)GET_RX_BUFFER_DESC_TOTAL_LENGTH(header_desc);
459
460 first_seg = (u8)GET_RX_BUFFER_DESC_FS(header_desc);
461
462 last_seg = (u8)GET_RX_BUFFER_DESC_LS(header_desc);
463
464 while (total_len == 0 && first_seg == 0 && last_seg == 0) {
465 read_cnt++;
466 total_len = (u16)GET_RX_BUFFER_DESC_TOTAL_LENGTH(header_desc);
467 first_seg = (u8)GET_RX_BUFFER_DESC_FS(header_desc);
468 last_seg = (u8)GET_RX_BUFFER_DESC_LS(header_desc);
469
470 if (read_cnt > 20)
471 break;
472 }
473}
474
475u16 rtl92ee_rx_desc_buff_remained_cnt(struct ieee80211_hw *hw, u8 queue_index)
476{
477 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
478 struct rtl_priv *rtlpriv = rtl_priv(hw);
479 u16 read_point = 0, write_point = 0, remind_cnt = 0;
480 u32 tmp_4byte = 0;
481 static u16 last_read_point;
482 static bool start_rx;
483
484 tmp_4byte = rtl_read_dword(rtlpriv, REG_RXQ_TXBD_IDX);
485 read_point = (u16)((tmp_4byte>>16) & 0x7ff);
486 write_point = (u16)(tmp_4byte & 0x7ff);
487
488 if (write_point != rtlpci->rx_ring[queue_index].next_rx_rp) {
489 RT_TRACE(rtlpriv, COMP_RXDESC, DBG_DMESG,
490 "!!!write point is 0x%x, reg 0x3B4 value is 0x%x\n",
491 write_point, tmp_4byte);
492 tmp_4byte = rtl_read_dword(rtlpriv, REG_RXQ_TXBD_IDX);
493 read_point = (u16)((tmp_4byte>>16) & 0x7ff);
494 write_point = (u16)(tmp_4byte & 0x7ff);
495 }
496
497 if (read_point > 0)
498 start_rx = true;
499 if (!start_rx)
500 return 0;
501
502 remind_cnt = calc_fifo_space(read_point, write_point);
503
504 if (remind_cnt == 0)
505 return 0;
506
507 rtlpci->rx_ring[queue_index].next_rx_rp = write_point;
508
509 last_read_point = read_point;
510 return remind_cnt;
511}
512
513static u16 get_desc_addr_fr_q_idx(u16 queue_index)
514{
515 u16 desc_address = REG_BEQ_TXBD_IDX;
516
517 switch (queue_index) {
518 case BK_QUEUE:
519 desc_address = REG_BKQ_TXBD_IDX;
520 break;
521 case BE_QUEUE:
522 desc_address = REG_BEQ_TXBD_IDX;
523 break;
524 case VI_QUEUE:
525 desc_address = REG_VIQ_TXBD_IDX;
526 break;
527 case VO_QUEUE:
528 desc_address = REG_VOQ_TXBD_IDX;
529 break;
530 case BEACON_QUEUE:
531 desc_address = REG_BEQ_TXBD_IDX;
532 break;
533 case TXCMD_QUEUE:
534 desc_address = REG_BEQ_TXBD_IDX;
535 break;
536 case MGNT_QUEUE:
537 desc_address = REG_MGQ_TXBD_IDX;
538 break;
539 case HIGH_QUEUE:
540 desc_address = REG_HI0Q_TXBD_IDX;
541 break;
542 case HCCA_QUEUE:
543 desc_address = REG_BEQ_TXBD_IDX;
544 break;
545 default:
546 break;
547 }
548 return desc_address;
549}
550
551u16 rtl92ee_get_available_desc(struct ieee80211_hw *hw, u8 q_idx)
552{
553 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
554 struct rtl_priv *rtlpriv = rtl_priv(hw);
555 u16 point_diff = 0;
556 u16 current_tx_read_point = 0, current_tx_write_point = 0;
557 u32 tmp_4byte;
558
559 tmp_4byte = rtl_read_dword(rtlpriv,
560 get_desc_addr_fr_q_idx(q_idx));
561 current_tx_read_point = (u16)((tmp_4byte >> 16) & 0x0fff);
562 current_tx_write_point = (u16)((tmp_4byte) & 0x0fff);
563
564 point_diff = calc_fifo_space(current_tx_read_point,
565 current_tx_write_point);
566
567 rtlpci->tx_ring[q_idx].avl_desc = point_diff;
568 return point_diff;
569}
570
571void rtl92ee_pre_fill_tx_bd_desc(struct ieee80211_hw *hw,
572 u8 *tx_bd_desc, u8 *desc, u8 queue_index,
573 struct sk_buff *skb, dma_addr_t addr)
574{
575 struct rtl_priv *rtlpriv = rtl_priv(hw);
576 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
577 u32 pkt_len = skb->len;
578 u16 desc_size = 40;
579 u32 psblen = 0;
580 u16 tx_page_size = 0;
581 u32 total_packet_size = 0;
582 u16 current_bd_desc;
583 u8 i = 0;
584 u16 real_desc_size = 0x28;
585 u16 append_early_mode_size = 0;
586#if (RTL8192EE_SEG_NUM == 0)
587 u8 segmentnum = 2;
588#elif (RTL8192EE_SEG_NUM == 1)
589 u8 segmentnum = 4;
590#elif (RTL8192EE_SEG_NUM == 2)
591 u8 segmentnum = 8;
592#endif
593
594 tx_page_size = 2;
595 current_bd_desc = rtlpci->tx_ring[queue_index].cur_tx_wp;
596
597 total_packet_size = desc_size+pkt_len;
598
599 if (rtlpriv->rtlhal.earlymode_enable) {
600 if (queue_index < BEACON_QUEUE) {
601 append_early_mode_size = 8;
602 total_packet_size += append_early_mode_size;
603 }
604 }
605
606 if (tx_page_size > 0) {
607 psblen = (pkt_len + real_desc_size + append_early_mode_size) /
608 (tx_page_size * 128);
609
610 if (psblen * (tx_page_size * 128) < total_packet_size)
611 psblen += 1;
612 }
613
614
615 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, 0);
616 SET_TX_BUFF_DESC_PSB(tx_bd_desc, 0);
617 SET_TX_BUFF_DESC_OWN(tx_bd_desc, 0);
618
619 for (i = 1; i < segmentnum; i++) {
620 SET_TXBUFFER_DESC_LEN_WITH_OFFSET(tx_bd_desc, i, 0);
621 SET_TXBUFFER_DESC_AMSDU_WITH_OFFSET(tx_bd_desc, i, 0);
622 SET_TXBUFFER_DESC_ADD_LOW_WITH_OFFSET(tx_bd_desc, i, 0);
623#if (DMA_IS_64BIT == 1)
624 SET_TXBUFFER_DESC_ADD_HIGT_WITH_OFFSET(tx_bd_desc, i, 0);
625#endif
626 }
627 SET_TX_BUFF_DESC_LEN_1(tx_bd_desc, 0);
628 SET_TX_BUFF_DESC_AMSDU_1(tx_bd_desc, 0);
629
630 SET_TX_BUFF_DESC_LEN_2(tx_bd_desc, 0);
631 SET_TX_BUFF_DESC_AMSDU_2(tx_bd_desc, 0);
632 SET_TX_BUFF_DESC_LEN_3(tx_bd_desc, 0);
633 SET_TX_BUFF_DESC_AMSDU_3(tx_bd_desc, 0);
634
635 CLEAR_PCI_TX_DESC_CONTENT(desc, TX_DESC_SIZE);
636
637 if (rtlpriv->rtlhal.earlymode_enable) {
638 if (queue_index < BEACON_QUEUE) {
639
640 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, desc_size + 8);
641 } else {
642 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, desc_size);
643 }
644 } else {
645 SET_TX_BUFF_DESC_LEN_0(tx_bd_desc, desc_size);
646 }
647 SET_TX_BUFF_DESC_PSB(tx_bd_desc, psblen);
648 SET_TX_BUFF_DESC_ADDR_LOW_0(tx_bd_desc,
649 rtlpci->tx_ring[queue_index].dma +
650 (current_bd_desc * TX_DESC_SIZE));
651
652 SET_TXBUFFER_DESC_LEN_WITH_OFFSET(tx_bd_desc, 1, pkt_len);
653
654 SET_TXBUFFER_DESC_AMSDU_WITH_OFFSET(tx_bd_desc, 1, 0);
655 SET_TXBUFFER_DESC_ADD_LOW_WITH_OFFSET(tx_bd_desc, 1, addr);
656
657 SET_TX_DESC_PKT_SIZE(desc, (u16)(pkt_len));
658 SET_TX_DESC_TX_BUFFER_SIZE(desc, (u16)(pkt_len));
659}
660
661void rtl92ee_tx_fill_desc(struct ieee80211_hw *hw,
662 struct ieee80211_hdr *hdr, u8 *pdesc_tx,
663 u8 *pbd_desc_tx,
664 struct ieee80211_tx_info *info,
665 struct ieee80211_sta *sta,
666 struct sk_buff *skb,
667 u8 hw_queue, struct rtl_tcb_desc *ptcb_desc)
668{
669 struct rtl_priv *rtlpriv = rtl_priv(hw);
670 struct rtl_mac *mac = rtl_mac(rtl_priv(hw));
671 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
672 struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
673 u8 *pdesc = (u8 *)pdesc_tx;
674 u16 seq_number;
675 __le16 fc = hdr->frame_control;
676 unsigned int buf_len = 0;
677 u8 fw_qsel = _rtl92ee_map_hwqueue_to_fwqueue(skb, hw_queue);
678 bool firstseg = ((hdr->seq_ctrl &
679 cpu_to_le16(IEEE80211_SCTL_FRAG)) == 0);
680 bool lastseg = ((hdr->frame_control &
681 cpu_to_le16(IEEE80211_FCTL_MOREFRAGS)) == 0);
682 dma_addr_t mapping;
683 u8 bw_40 = 0;
684 u8 short_gi = 0;
685
686 if (mac->opmode == NL80211_IFTYPE_STATION) {
687 bw_40 = mac->bw_40;
688 } else if (mac->opmode == NL80211_IFTYPE_AP ||
689 mac->opmode == NL80211_IFTYPE_ADHOC) {
690 if (sta)
691 bw_40 = sta->ht_cap.cap &
692 IEEE80211_HT_CAP_SUP_WIDTH_20_40;
693 }
694 seq_number = (le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_SEQ) >> 4;
695 rtl_get_tcb_desc(hw, info, sta, skb, ptcb_desc);
696
697 if (rtlhal->earlymode_enable) {
698 skb_push(skb, EM_HDR_LEN);
699 memset(skb->data, 0, EM_HDR_LEN);
700 }
701 buf_len = skb->len;
702 mapping = pci_map_single(rtlpci->pdev, skb->data, skb->len,
703 PCI_DMA_TODEVICE);
704 if (pci_dma_mapping_error(rtlpci->pdev, mapping)) {
705 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
706 "DMA mapping error");
707 return;
708 }
709
710 if (pbd_desc_tx != NULL)
711 rtl92ee_pre_fill_tx_bd_desc(hw, pbd_desc_tx, pdesc, hw_queue,
712 skb, mapping);
713
714 if (ieee80211_is_nullfunc(fc) || ieee80211_is_ctl(fc)) {
715 firstseg = true;
716 lastseg = true;
717 }
718 if (firstseg) {
719 if (rtlhal->earlymode_enable) {
720 SET_TX_DESC_PKT_OFFSET(pdesc, 1);
721 SET_TX_DESC_OFFSET(pdesc,
722 USB_HWDESC_HEADER_LEN + EM_HDR_LEN);
723 if (ptcb_desc->empkt_num) {
724 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
725 "Insert 8 byte.pTcb->EMPktNum:%d\n",
726 ptcb_desc->empkt_num);
727 _rtl92ee_insert_emcontent(ptcb_desc,
728 (u8 *)(skb->data));
729 }
730 } else {
731 SET_TX_DESC_OFFSET(pdesc, USB_HWDESC_HEADER_LEN);
732 }
733
734 SET_TX_DESC_TX_RATE(pdesc, ptcb_desc->hw_rate);
735
736 if (ieee80211_is_mgmt(fc)) {
737 ptcb_desc->use_driver_rate = true;
738 } else {
739 if (rtlpriv->ra.is_special_data) {
740 ptcb_desc->use_driver_rate = true;
741 SET_TX_DESC_TX_RATE(pdesc, DESC_RATE11M);
742 } else {
743 ptcb_desc->use_driver_rate = false;
744 }
745 }
746
747 if (ptcb_desc->hw_rate > DESC_RATEMCS0)
748 short_gi = (ptcb_desc->use_shortgi) ? 1 : 0;
749 else
750 short_gi = (ptcb_desc->use_shortpreamble) ? 1 : 0;
751
752 if (info->flags & IEEE80211_TX_CTL_AMPDU) {
753 SET_TX_DESC_AGG_ENABLE(pdesc, 1);
754 SET_TX_DESC_MAX_AGG_NUM(pdesc, 0x14);
755 }
756 SET_TX_DESC_SEQ(pdesc, seq_number);
757 SET_TX_DESC_RTS_ENABLE(pdesc,
758 ((ptcb_desc->rts_enable &&
759 !ptcb_desc->cts_enable) ? 1 : 0));
760 SET_TX_DESC_HW_RTS_ENABLE(pdesc, 0);
761 SET_TX_DESC_CTS2SELF(pdesc,
762 ((ptcb_desc->cts_enable) ? 1 : 0));
763
764 SET_TX_DESC_RTS_RATE(pdesc, ptcb_desc->rts_rate);
765 SET_TX_DESC_RTS_SC(pdesc, ptcb_desc->rts_sc);
766 SET_TX_DESC_RTS_SHORT(pdesc,
767 ((ptcb_desc->rts_rate <= DESC_RATE54M) ?
768 (ptcb_desc->rts_use_shortpreamble ? 1 : 0) :
769 (ptcb_desc->rts_use_shortgi ? 1 : 0)));
770
771 if (ptcb_desc->tx_enable_sw_calc_duration)
772 SET_TX_DESC_NAV_USE_HDR(pdesc, 1);
773
774 if (bw_40) {
775 if (ptcb_desc->packet_bw == HT_CHANNEL_WIDTH_20_40) {
776 SET_TX_DESC_DATA_BW(pdesc, 1);
777 SET_TX_DESC_TX_SUB_CARRIER(pdesc, 3);
778 } else {
779 SET_TX_DESC_DATA_BW(pdesc, 0);
780 SET_TX_DESC_TX_SUB_CARRIER(pdesc,
781 mac->cur_40_prime_sc);
782 }
783 } else {
784 SET_TX_DESC_DATA_BW(pdesc, 0);
785 SET_TX_DESC_TX_SUB_CARRIER(pdesc, 0);
786 }
787
788 SET_TX_DESC_LINIP(pdesc, 0);
789 if (sta) {
790 u8 ampdu_density = sta->ht_cap.ampdu_density;
791
792 SET_TX_DESC_AMPDU_DENSITY(pdesc, ampdu_density);
793 }
794 if (info->control.hw_key) {
795 struct ieee80211_key_conf *key = info->control.hw_key;
796
797 switch (key->cipher) {
798 case WLAN_CIPHER_SUITE_WEP40:
799 case WLAN_CIPHER_SUITE_WEP104:
800 case WLAN_CIPHER_SUITE_TKIP:
801 SET_TX_DESC_SEC_TYPE(pdesc, 0x1);
802 break;
803 case WLAN_CIPHER_SUITE_CCMP:
804 SET_TX_DESC_SEC_TYPE(pdesc, 0x3);
805 break;
806 default:
807 SET_TX_DESC_SEC_TYPE(pdesc, 0x0);
808 break;
809 }
810 }
811
812 SET_TX_DESC_QUEUE_SEL(pdesc, fw_qsel);
813 SET_TX_DESC_DATA_RATE_FB_LIMIT(pdesc, 0x1F);
814 SET_TX_DESC_RTS_RATE_FB_LIMIT(pdesc, 0xF);
815 SET_TX_DESC_DISABLE_FB(pdesc,
816 ptcb_desc->disable_ratefallback ? 1 : 0);
817 SET_TX_DESC_USE_RATE(pdesc, ptcb_desc->use_driver_rate ? 1 : 0);
818
819
820
821
822
823 if (!ptcb_desc->use_driver_rate) {
824
825
826 }
827 if (ieee80211_is_data_qos(fc)) {
828 if (mac->rdg_en) {
829 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
830 "Enable RDG function.\n");
831 SET_TX_DESC_RDG_ENABLE(pdesc, 1);
832 SET_TX_DESC_HTC(pdesc, 1);
833 }
834 }
835 }
836
837 SET_TX_DESC_FIRST_SEG(pdesc, (firstseg ? 1 : 0));
838 SET_TX_DESC_LAST_SEG(pdesc, (lastseg ? 1 : 0));
839 SET_TX_DESC_TX_BUFFER_ADDRESS(pdesc, mapping);
840 if (rtlpriv->dm.useramask) {
841 SET_TX_DESC_RATE_ID(pdesc, ptcb_desc->ratr_index);
842 SET_TX_DESC_MACID(pdesc, ptcb_desc->mac_id);
843 } else {
844 SET_TX_DESC_RATE_ID(pdesc, 0xC + ptcb_desc->ratr_index);
845 SET_TX_DESC_MACID(pdesc, ptcb_desc->ratr_index);
846 }
847
848 SET_TX_DESC_MORE_FRAG(pdesc, (lastseg ? 0 : 1));
849 if (is_multicast_ether_addr(ieee80211_get_DA(hdr)) ||
850 is_broadcast_ether_addr(ieee80211_get_DA(hdr))) {
851 SET_TX_DESC_BMC(pdesc, 1);
852 }
853 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE, "\n");
854}
855
856void rtl92ee_tx_fill_cmddesc(struct ieee80211_hw *hw,
857 u8 *pdesc, bool firstseg,
858 bool lastseg, struct sk_buff *skb)
859{
860 struct rtl_priv *rtlpriv = rtl_priv(hw);
861 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
862 u8 fw_queue = QSLT_BEACON;
863 dma_addr_t mapping = pci_map_single(rtlpci->pdev,
864 skb->data, skb->len,
865 PCI_DMA_TODEVICE);
866 u8 txdesc_len = 40;
867
868 if (pci_dma_mapping_error(rtlpci->pdev, mapping)) {
869 RT_TRACE(rtlpriv, COMP_SEND, DBG_TRACE,
870 "DMA mapping error");
871 return;
872 }
873 CLEAR_PCI_TX_DESC_CONTENT(pdesc, txdesc_len);
874
875 if (firstseg)
876 SET_TX_DESC_OFFSET(pdesc, txdesc_len);
877
878 SET_TX_DESC_TX_RATE(pdesc, DESC_RATE1M);
879
880 SET_TX_DESC_SEQ(pdesc, 0);
881
882 SET_TX_DESC_LINIP(pdesc, 0);
883
884 SET_TX_DESC_QUEUE_SEL(pdesc, fw_queue);
885
886 SET_TX_DESC_FIRST_SEG(pdesc, 1);
887 SET_TX_DESC_LAST_SEG(pdesc, 1);
888
889 SET_TX_DESC_TX_BUFFER_SIZE(pdesc, (u16)(skb->len));
890
891 SET_TX_DESC_TX_BUFFER_ADDRESS(pdesc, mapping);
892
893 SET_TX_DESC_RATE_ID(pdesc, 7);
894 SET_TX_DESC_MACID(pdesc, 0);
895
896 SET_TX_DESC_OWN(pdesc, 1);
897
898 SET_TX_DESC_PKT_SIZE((u8 *)pdesc, (u16)(skb->len));
899
900 SET_TX_DESC_FIRST_SEG(pdesc, 1);
901 SET_TX_DESC_LAST_SEG(pdesc, 1);
902
903 SET_TX_DESC_OFFSET(pdesc, 40);
904
905 SET_TX_DESC_USE_RATE(pdesc, 1);
906
907 RT_PRINT_DATA(rtlpriv, COMP_CMD, DBG_LOUD,
908 "H2C Tx Cmd Content\n", pdesc, txdesc_len);
909}
910
911void rtl92ee_set_desc(struct ieee80211_hw *hw, u8 *pdesc, bool istx,
912 u8 desc_name, u8 *val)
913{
914 struct rtl_priv *rtlpriv = rtl_priv(hw);
915 u16 cur_tx_rp = 0;
916 u16 cur_tx_wp = 0;
917 static u16 last_txw_point;
918 static bool over_run;
919 u32 tmp = 0;
920 u8 q_idx = *val;
921
922 if (istx) {
923 switch (desc_name) {
924 case HW_DESC_TX_NEXTDESC_ADDR:
925 SET_TX_DESC_NEXT_DESC_ADDRESS(pdesc, *(u32 *)val);
926 break;
927 case HW_DESC_OWN:{
928 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
929 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[q_idx];
930 u16 max_tx_desc = ring->entries;
931
932 if (q_idx == BEACON_QUEUE) {
933 ring->cur_tx_wp = 0;
934 ring->cur_tx_rp = 0;
935 SET_TX_BUFF_DESC_OWN(pdesc, 1);
936 return;
937 }
938
939 ring->cur_tx_wp = ((ring->cur_tx_wp + 1) % max_tx_desc);
940
941 if (over_run) {
942 ring->cur_tx_wp = 0;
943 over_run = false;
944 }
945 if (ring->avl_desc > 1) {
946 ring->avl_desc--;
947
948 rtl_write_word(rtlpriv,
949 get_desc_addr_fr_q_idx(q_idx),
950 ring->cur_tx_wp);
951
952 if (q_idx == 1)
953 last_txw_point = cur_tx_wp;
954 }
955
956 if (ring->avl_desc < (max_tx_desc - 15)) {
957 u16 point_diff = 0;
958
959 tmp =
960 rtl_read_dword(rtlpriv,
961 get_desc_addr_fr_q_idx(q_idx));
962 cur_tx_rp = (u16)((tmp >> 16) & 0x0fff);
963 cur_tx_wp = (u16)(tmp & 0x0fff);
964
965 ring->cur_tx_wp = cur_tx_wp;
966 ring->cur_tx_rp = cur_tx_rp;
967 point_diff = ((cur_tx_rp > cur_tx_wp) ?
968 (cur_tx_rp - cur_tx_wp) :
969 (TX_DESC_NUM_92E - 1 -
970 cur_tx_wp + cur_tx_rp));
971
972 ring->avl_desc = point_diff;
973 }
974 }
975 break;
976 }
977 } else {
978 switch (desc_name) {
979 case HW_DESC_RX_PREPARE:
980 SET_RX_BUFFER_DESC_LS(pdesc, 0);
981 SET_RX_BUFFER_DESC_FS(pdesc, 0);
982 SET_RX_BUFFER_DESC_TOTAL_LENGTH(pdesc, 0);
983
984 SET_RX_BUFFER_DESC_DATA_LENGTH(pdesc,
985 MAX_RECEIVE_BUFFER_SIZE +
986 RX_DESC_SIZE);
987
988 SET_RX_BUFFER_PHYSICAL_LOW(pdesc, *(u32 *)val);
989 break;
990 case HW_DESC_RXERO:
991 SET_RX_DESC_EOR(pdesc, 1);
992 break;
993 default:
994 RT_ASSERT(false,
995 "ERR rxdesc :%d not process\n", desc_name);
996 break;
997 }
998 }
999}
1000
1001u32 rtl92ee_get_desc(u8 *pdesc, bool istx, u8 desc_name)
1002{
1003 u32 ret = 0;
1004
1005 if (istx) {
1006 switch (desc_name) {
1007 case HW_DESC_OWN:
1008 ret = GET_TX_DESC_OWN(pdesc);
1009 break;
1010 case HW_DESC_TXBUFF_ADDR:
1011 ret = GET_TXBUFFER_DESC_ADDR_LOW(pdesc, 1);
1012 break;
1013 default:
1014 RT_ASSERT(false,
1015 "ERR txdesc :%d not process\n", desc_name);
1016 break;
1017 }
1018 } else {
1019 switch (desc_name) {
1020 case HW_DESC_OWN:
1021 ret = GET_RX_DESC_OWN(pdesc);
1022 break;
1023 case HW_DESC_RXPKT_LEN:
1024 ret = GET_RX_DESC_PKT_LEN(pdesc);
1025 break;
1026 case HW_DESC_RXBUFF_ADDR:
1027 ret = GET_RX_DESC_BUFF_ADDR(pdesc);
1028 break;
1029 default:
1030 RT_ASSERT(false,
1031 "ERR rxdesc :%d not process\n", desc_name);
1032 break;
1033 }
1034 }
1035 return ret;
1036}
1037
1038bool rtl92ee_is_tx_desc_closed(struct ieee80211_hw *hw, u8 hw_queue, u16 index)
1039{
1040 struct rtl_pci *rtlpci = rtl_pcidev(rtl_pcipriv(hw));
1041 struct rtl_priv *rtlpriv = rtl_priv(hw);
1042 u16 read_point, write_point, available_desc_num;
1043 bool ret = false;
1044 static u8 stop_report_cnt;
1045 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];
1046
1047 {
1048 u16 point_diff = 0;
1049 u16 cur_tx_rp, cur_tx_wp;
1050 u32 tmpu32 = 0;
1051
1052 tmpu32 =
1053 rtl_read_dword(rtlpriv,
1054 get_desc_addr_fr_q_idx(hw_queue));
1055 cur_tx_rp = (u16)((tmpu32 >> 16) & 0x0fff);
1056 cur_tx_wp = (u16)(tmpu32 & 0x0fff);
1057
1058 ring->cur_tx_wp = cur_tx_wp;
1059 ring->cur_tx_rp = cur_tx_rp;
1060 point_diff = ((cur_tx_rp > cur_tx_wp) ?
1061 (cur_tx_rp - cur_tx_wp) :
1062 (TX_DESC_NUM_92E - cur_tx_wp + cur_tx_rp));
1063
1064 ring->avl_desc = point_diff;
1065 }
1066
1067 read_point = ring->cur_tx_rp;
1068 write_point = ring->cur_tx_wp;
1069 available_desc_num = ring->avl_desc;
1070
1071 if (write_point > read_point) {
1072 if (index < write_point && index >= read_point)
1073 ret = false;
1074 else
1075 ret = true;
1076 } else if (write_point < read_point) {
1077 if (index > write_point && index < read_point)
1078 ret = true;
1079 else
1080 ret = false;
1081 } else {
1082 if (index != read_point)
1083 ret = true;
1084 }
1085
1086 if (hw_queue == BEACON_QUEUE)
1087 ret = true;
1088
1089 if (rtlpriv->rtlhal.driver_is_goingto_unload ||
1090 rtlpriv->psc.rfoff_reason > RF_CHANGE_BY_PS)
1091 ret = true;
1092
1093 if (hw_queue < BEACON_QUEUE) {
1094 if (!ret)
1095 stop_report_cnt++;
1096 else
1097 stop_report_cnt = 0;
1098 }
1099
1100 return ret;
1101}
1102
1103void rtl92ee_tx_polling(struct ieee80211_hw *hw, u8 hw_queue)
1104{
1105}
1106
1107u32 rtl92ee_rx_command_packet(struct ieee80211_hw *hw,
1108 const struct rtl_stats *status,
1109 struct sk_buff *skb)
1110{
1111 u32 result = 0;
1112 struct rtl_priv *rtlpriv = rtl_priv(hw);
1113
1114 switch (status->packet_report_type) {
1115 case NORMAL_RX:
1116 result = 0;
1117 break;
1118 case C2H_PACKET:
1119 rtl92ee_c2h_packet_handler(hw, skb->data, (u8)skb->len);
1120 result = 1;
1121 break;
1122 default:
1123 RT_TRACE(rtlpriv, COMP_RECV, DBG_TRACE,
1124 "Unknown packet type %d\n", status->packet_report_type);
1125 break;
1126 }
1127
1128 return result;
1129}
1130