1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42#include <linux/kernel.h>
43#include <linux/module.h>
44#include <linux/spinlock.h>
45#include <linux/interrupt.h>
46#include <linux/platform_device.h>
47#include <linux/dma-mapping.h>
48#include <linux/delay.h>
49#include <linux/io.h>
50#include <linux/slab.h>
51#include <linux/usb.h>
52
53#include <linux/usb/hcd.h>
54#include <linux/usb/ch11.h>
55
56#include "core.h"
57#include "hcd.h"
58
59
60
61
62
63
64
65
66
67
68
69
70
71static void dwc2_enable_common_interrupts(struct dwc2_hsotg *hsotg)
72{
73 u32 intmsk;
74
75
76 dwc2_writel(hsotg, 0xffffffff, GOTGINT);
77
78
79 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
80
81
82 intmsk = GINTSTS_MODEMIS | GINTSTS_OTGINT;
83
84 if (!hsotg->params.host_dma)
85 intmsk |= GINTSTS_RXFLVL;
86 if (!hsotg->params.external_id_pin_ctl)
87 intmsk |= GINTSTS_CONIDSTSCHNG;
88
89 intmsk |= GINTSTS_WKUPINT | GINTSTS_USBSUSP |
90 GINTSTS_SESSREQINT;
91
92 if (dwc2_is_device_mode(hsotg) && hsotg->params.lpm)
93 intmsk |= GINTSTS_LPMTRANRCVD;
94
95 dwc2_writel(hsotg, intmsk, GINTMSK);
96}
97
98static int dwc2_gahbcfg_init(struct dwc2_hsotg *hsotg)
99{
100 u32 ahbcfg = dwc2_readl(hsotg, GAHBCFG);
101
102 switch (hsotg->hw_params.arch) {
103 case GHWCFG2_EXT_DMA_ARCH:
104 dev_err(hsotg->dev, "External DMA Mode not supported\n");
105 return -EINVAL;
106
107 case GHWCFG2_INT_DMA_ARCH:
108 dev_dbg(hsotg->dev, "Internal DMA Mode\n");
109 if (hsotg->params.ahbcfg != -1) {
110 ahbcfg &= GAHBCFG_CTRL_MASK;
111 ahbcfg |= hsotg->params.ahbcfg &
112 ~GAHBCFG_CTRL_MASK;
113 }
114 break;
115
116 case GHWCFG2_SLAVE_ONLY_ARCH:
117 default:
118 dev_dbg(hsotg->dev, "Slave Only Mode\n");
119 break;
120 }
121
122 if (hsotg->params.host_dma)
123 ahbcfg |= GAHBCFG_DMA_EN;
124 else
125 hsotg->params.dma_desc_enable = false;
126
127 dwc2_writel(hsotg, ahbcfg, GAHBCFG);
128
129 return 0;
130}
131
132static void dwc2_gusbcfg_init(struct dwc2_hsotg *hsotg)
133{
134 u32 usbcfg;
135
136 usbcfg = dwc2_readl(hsotg, GUSBCFG);
137 usbcfg &= ~(GUSBCFG_HNPCAP | GUSBCFG_SRPCAP);
138
139 switch (hsotg->hw_params.op_mode) {
140 case GHWCFG2_OP_MODE_HNP_SRP_CAPABLE:
141 if (hsotg->params.otg_cap ==
142 DWC2_CAP_PARAM_HNP_SRP_CAPABLE)
143 usbcfg |= GUSBCFG_HNPCAP;
144 if (hsotg->params.otg_cap !=
145 DWC2_CAP_PARAM_NO_HNP_SRP_CAPABLE)
146 usbcfg |= GUSBCFG_SRPCAP;
147 break;
148
149 case GHWCFG2_OP_MODE_SRP_ONLY_CAPABLE:
150 case GHWCFG2_OP_MODE_SRP_CAPABLE_DEVICE:
151 case GHWCFG2_OP_MODE_SRP_CAPABLE_HOST:
152 if (hsotg->params.otg_cap !=
153 DWC2_CAP_PARAM_NO_HNP_SRP_CAPABLE)
154 usbcfg |= GUSBCFG_SRPCAP;
155 break;
156
157 case GHWCFG2_OP_MODE_NO_HNP_SRP_CAPABLE:
158 case GHWCFG2_OP_MODE_NO_SRP_CAPABLE_DEVICE:
159 case GHWCFG2_OP_MODE_NO_SRP_CAPABLE_HOST:
160 default:
161 break;
162 }
163
164 dwc2_writel(hsotg, usbcfg, GUSBCFG);
165}
166
167static int dwc2_vbus_supply_init(struct dwc2_hsotg *hsotg)
168{
169 if (hsotg->vbus_supply)
170 return regulator_enable(hsotg->vbus_supply);
171
172 return 0;
173}
174
175static int dwc2_vbus_supply_exit(struct dwc2_hsotg *hsotg)
176{
177 if (hsotg->vbus_supply)
178 return regulator_disable(hsotg->vbus_supply);
179
180 return 0;
181}
182
183
184
185
186
187
188static void dwc2_enable_host_interrupts(struct dwc2_hsotg *hsotg)
189{
190 u32 intmsk;
191
192 dev_dbg(hsotg->dev, "%s()\n", __func__);
193
194
195 dwc2_writel(hsotg, 0, GINTMSK);
196 dwc2_writel(hsotg, 0, HAINTMSK);
197
198
199 dwc2_enable_common_interrupts(hsotg);
200
201
202 intmsk = dwc2_readl(hsotg, GINTMSK);
203 intmsk |= GINTSTS_DISCONNINT | GINTSTS_PRTINT | GINTSTS_HCHINT;
204 dwc2_writel(hsotg, intmsk, GINTMSK);
205}
206
207
208
209
210
211
212static void dwc2_disable_host_interrupts(struct dwc2_hsotg *hsotg)
213{
214 u32 intmsk = dwc2_readl(hsotg, GINTMSK);
215
216
217 intmsk &= ~(GINTSTS_SOF | GINTSTS_PRTINT | GINTSTS_HCHINT |
218 GINTSTS_PTXFEMP | GINTSTS_NPTXFEMP | GINTSTS_DISCONNINT);
219 dwc2_writel(hsotg, intmsk, GINTMSK);
220}
221
222
223
224
225
226
227
228
229static void dwc2_calculate_dynamic_fifo(struct dwc2_hsotg *hsotg)
230{
231 struct dwc2_core_params *params = &hsotg->params;
232 struct dwc2_hw_params *hw = &hsotg->hw_params;
233 u32 rxfsiz, nptxfsiz, ptxfsiz, total_fifo_size;
234
235 total_fifo_size = hw->total_fifo_size;
236 rxfsiz = params->host_rx_fifo_size;
237 nptxfsiz = params->host_nperio_tx_fifo_size;
238 ptxfsiz = params->host_perio_tx_fifo_size;
239
240
241
242
243
244
245
246 if (total_fifo_size < (rxfsiz + nptxfsiz + ptxfsiz)) {
247
248
249
250
251
252
253 rxfsiz = 516 + hw->host_channels;
254
255
256
257
258
259
260 nptxfsiz = 256;
261
262
263
264
265
266
267 ptxfsiz = 768;
268
269 params->host_rx_fifo_size = rxfsiz;
270 params->host_nperio_tx_fifo_size = nptxfsiz;
271 params->host_perio_tx_fifo_size = ptxfsiz;
272 }
273
274
275
276
277
278
279
280
281
282
283
284 if (unlikely(total_fifo_size < (rxfsiz + nptxfsiz + ptxfsiz)))
285 dev_err(hsotg->dev, "invalid fifo sizes\n");
286}
287
288static void dwc2_config_fifos(struct dwc2_hsotg *hsotg)
289{
290 struct dwc2_core_params *params = &hsotg->params;
291 u32 nptxfsiz, hptxfsiz, dfifocfg, grxfsiz;
292
293 if (!params->enable_dynamic_fifo)
294 return;
295
296 dwc2_calculate_dynamic_fifo(hsotg);
297
298
299 grxfsiz = dwc2_readl(hsotg, GRXFSIZ);
300 dev_dbg(hsotg->dev, "initial grxfsiz=%08x\n", grxfsiz);
301 grxfsiz &= ~GRXFSIZ_DEPTH_MASK;
302 grxfsiz |= params->host_rx_fifo_size <<
303 GRXFSIZ_DEPTH_SHIFT & GRXFSIZ_DEPTH_MASK;
304 dwc2_writel(hsotg, grxfsiz, GRXFSIZ);
305 dev_dbg(hsotg->dev, "new grxfsiz=%08x\n",
306 dwc2_readl(hsotg, GRXFSIZ));
307
308
309 dev_dbg(hsotg->dev, "initial gnptxfsiz=%08x\n",
310 dwc2_readl(hsotg, GNPTXFSIZ));
311 nptxfsiz = params->host_nperio_tx_fifo_size <<
312 FIFOSIZE_DEPTH_SHIFT & FIFOSIZE_DEPTH_MASK;
313 nptxfsiz |= params->host_rx_fifo_size <<
314 FIFOSIZE_STARTADDR_SHIFT & FIFOSIZE_STARTADDR_MASK;
315 dwc2_writel(hsotg, nptxfsiz, GNPTXFSIZ);
316 dev_dbg(hsotg->dev, "new gnptxfsiz=%08x\n",
317 dwc2_readl(hsotg, GNPTXFSIZ));
318
319
320 dev_dbg(hsotg->dev, "initial hptxfsiz=%08x\n",
321 dwc2_readl(hsotg, HPTXFSIZ));
322 hptxfsiz = params->host_perio_tx_fifo_size <<
323 FIFOSIZE_DEPTH_SHIFT & FIFOSIZE_DEPTH_MASK;
324 hptxfsiz |= (params->host_rx_fifo_size +
325 params->host_nperio_tx_fifo_size) <<
326 FIFOSIZE_STARTADDR_SHIFT & FIFOSIZE_STARTADDR_MASK;
327 dwc2_writel(hsotg, hptxfsiz, HPTXFSIZ);
328 dev_dbg(hsotg->dev, "new hptxfsiz=%08x\n",
329 dwc2_readl(hsotg, HPTXFSIZ));
330
331 if (hsotg->params.en_multiple_tx_fifo &&
332 hsotg->hw_params.snpsid >= DWC2_CORE_REV_2_91a) {
333
334
335
336
337
338 dfifocfg = dwc2_readl(hsotg, GDFIFOCFG);
339 dfifocfg &= ~GDFIFOCFG_EPINFOBASE_MASK;
340 dfifocfg |= (params->host_rx_fifo_size +
341 params->host_nperio_tx_fifo_size +
342 params->host_perio_tx_fifo_size) <<
343 GDFIFOCFG_EPINFOBASE_SHIFT &
344 GDFIFOCFG_EPINFOBASE_MASK;
345 dwc2_writel(hsotg, dfifocfg, GDFIFOCFG);
346 }
347}
348
349
350
351
352
353
354
355
356
357
358
359u32 dwc2_calc_frame_interval(struct dwc2_hsotg *hsotg)
360{
361 u32 usbcfg;
362 u32 hprt0;
363 int clock = 60;
364
365 usbcfg = dwc2_readl(hsotg, GUSBCFG);
366 hprt0 = dwc2_readl(hsotg, HPRT0);
367
368 if (!(usbcfg & GUSBCFG_PHYSEL) && (usbcfg & GUSBCFG_ULPI_UTMI_SEL) &&
369 !(usbcfg & GUSBCFG_PHYIF16))
370 clock = 60;
371 if ((usbcfg & GUSBCFG_PHYSEL) && hsotg->hw_params.fs_phy_type ==
372 GHWCFG2_FS_PHY_TYPE_SHARED_ULPI)
373 clock = 48;
374 if (!(usbcfg & GUSBCFG_PHY_LP_CLK_SEL) && !(usbcfg & GUSBCFG_PHYSEL) &&
375 !(usbcfg & GUSBCFG_ULPI_UTMI_SEL) && (usbcfg & GUSBCFG_PHYIF16))
376 clock = 30;
377 if (!(usbcfg & GUSBCFG_PHY_LP_CLK_SEL) && !(usbcfg & GUSBCFG_PHYSEL) &&
378 !(usbcfg & GUSBCFG_ULPI_UTMI_SEL) && !(usbcfg & GUSBCFG_PHYIF16))
379 clock = 60;
380 if ((usbcfg & GUSBCFG_PHY_LP_CLK_SEL) && !(usbcfg & GUSBCFG_PHYSEL) &&
381 !(usbcfg & GUSBCFG_ULPI_UTMI_SEL) && (usbcfg & GUSBCFG_PHYIF16))
382 clock = 48;
383 if ((usbcfg & GUSBCFG_PHYSEL) && !(usbcfg & GUSBCFG_PHYIF16) &&
384 hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_SHARED_UTMI)
385 clock = 48;
386 if ((usbcfg & GUSBCFG_PHYSEL) &&
387 hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_DEDICATED)
388 clock = 48;
389
390 if ((hprt0 & HPRT0_SPD_MASK) >> HPRT0_SPD_SHIFT == HPRT0_SPD_HIGH_SPEED)
391
392 return 125 * clock - 1;
393
394
395 return 1000 * clock - 1;
396}
397
398
399
400
401
402
403
404
405
406void dwc2_read_packet(struct dwc2_hsotg *hsotg, u8 *dest, u16 bytes)
407{
408 u32 *data_buf = (u32 *)dest;
409 int word_count = (bytes + 3) / 4;
410 int i;
411
412
413
414
415
416
417
418 dev_vdbg(hsotg->dev, "%s(%p,%p,%d)\n", __func__, hsotg, dest, bytes);
419
420 for (i = 0; i < word_count; i++, data_buf++)
421 *data_buf = dwc2_readl(hsotg, HCFIFO(0));
422}
423
424
425
426
427
428
429
430
431
432
433
434
435static void dwc2_dump_channel_info(struct dwc2_hsotg *hsotg,
436 struct dwc2_host_chan *chan)
437{
438#ifdef VERBOSE_DEBUG
439 int num_channels = hsotg->params.host_channels;
440 struct dwc2_qh *qh;
441 u32 hcchar;
442 u32 hcsplt;
443 u32 hctsiz;
444 u32 hc_dma;
445 int i;
446
447 if (!chan)
448 return;
449
450 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
451 hcsplt = dwc2_readl(hsotg, HCSPLT(chan->hc_num));
452 hctsiz = dwc2_readl(hsotg, HCTSIZ(chan->hc_num));
453 hc_dma = dwc2_readl(hsotg, HCDMA(chan->hc_num));
454
455 dev_dbg(hsotg->dev, " Assigned to channel %p:\n", chan);
456 dev_dbg(hsotg->dev, " hcchar 0x%08x, hcsplt 0x%08x\n",
457 hcchar, hcsplt);
458 dev_dbg(hsotg->dev, " hctsiz 0x%08x, hc_dma 0x%08x\n",
459 hctsiz, hc_dma);
460 dev_dbg(hsotg->dev, " dev_addr: %d, ep_num: %d, ep_is_in: %d\n",
461 chan->dev_addr, chan->ep_num, chan->ep_is_in);
462 dev_dbg(hsotg->dev, " ep_type: %d\n", chan->ep_type);
463 dev_dbg(hsotg->dev, " max_packet: %d\n", chan->max_packet);
464 dev_dbg(hsotg->dev, " data_pid_start: %d\n", chan->data_pid_start);
465 dev_dbg(hsotg->dev, " xfer_started: %d\n", chan->xfer_started);
466 dev_dbg(hsotg->dev, " halt_status: %d\n", chan->halt_status);
467 dev_dbg(hsotg->dev, " xfer_buf: %p\n", chan->xfer_buf);
468 dev_dbg(hsotg->dev, " xfer_dma: %08lx\n",
469 (unsigned long)chan->xfer_dma);
470 dev_dbg(hsotg->dev, " xfer_len: %d\n", chan->xfer_len);
471 dev_dbg(hsotg->dev, " qh: %p\n", chan->qh);
472 dev_dbg(hsotg->dev, " NP inactive sched:\n");
473 list_for_each_entry(qh, &hsotg->non_periodic_sched_inactive,
474 qh_list_entry)
475 dev_dbg(hsotg->dev, " %p\n", qh);
476 dev_dbg(hsotg->dev, " NP waiting sched:\n");
477 list_for_each_entry(qh, &hsotg->non_periodic_sched_waiting,
478 qh_list_entry)
479 dev_dbg(hsotg->dev, " %p\n", qh);
480 dev_dbg(hsotg->dev, " NP active sched:\n");
481 list_for_each_entry(qh, &hsotg->non_periodic_sched_active,
482 qh_list_entry)
483 dev_dbg(hsotg->dev, " %p\n", qh);
484 dev_dbg(hsotg->dev, " Channels:\n");
485 for (i = 0; i < num_channels; i++) {
486 struct dwc2_host_chan *chan = hsotg->hc_ptr_array[i];
487
488 dev_dbg(hsotg->dev, " %2d: %p\n", i, chan);
489 }
490#endif
491}
492
493static int _dwc2_hcd_start(struct usb_hcd *hcd);
494
495static void dwc2_host_start(struct dwc2_hsotg *hsotg)
496{
497 struct usb_hcd *hcd = dwc2_hsotg_to_hcd(hsotg);
498
499 hcd->self.is_b_host = dwc2_hcd_is_b_host(hsotg);
500 _dwc2_hcd_start(hcd);
501}
502
503static void dwc2_host_disconnect(struct dwc2_hsotg *hsotg)
504{
505 struct usb_hcd *hcd = dwc2_hsotg_to_hcd(hsotg);
506
507 hcd->self.is_b_host = 0;
508}
509
510static void dwc2_host_hub_info(struct dwc2_hsotg *hsotg, void *context,
511 int *hub_addr, int *hub_port)
512{
513 struct urb *urb = context;
514
515 if (urb->dev->tt)
516 *hub_addr = urb->dev->tt->hub->devnum;
517 else
518 *hub_addr = 0;
519 *hub_port = urb->dev->ttport;
520}
521
522
523
524
525
526
527
528static void dwc2_hc_enable_slave_ints(struct dwc2_hsotg *hsotg,
529 struct dwc2_host_chan *chan)
530{
531 u32 hcintmsk = HCINTMSK_CHHLTD;
532
533 switch (chan->ep_type) {
534 case USB_ENDPOINT_XFER_CONTROL:
535 case USB_ENDPOINT_XFER_BULK:
536 dev_vdbg(hsotg->dev, "control/bulk\n");
537 hcintmsk |= HCINTMSK_XFERCOMPL;
538 hcintmsk |= HCINTMSK_STALL;
539 hcintmsk |= HCINTMSK_XACTERR;
540 hcintmsk |= HCINTMSK_DATATGLERR;
541 if (chan->ep_is_in) {
542 hcintmsk |= HCINTMSK_BBLERR;
543 } else {
544 hcintmsk |= HCINTMSK_NAK;
545 hcintmsk |= HCINTMSK_NYET;
546 if (chan->do_ping)
547 hcintmsk |= HCINTMSK_ACK;
548 }
549
550 if (chan->do_split) {
551 hcintmsk |= HCINTMSK_NAK;
552 if (chan->complete_split)
553 hcintmsk |= HCINTMSK_NYET;
554 else
555 hcintmsk |= HCINTMSK_ACK;
556 }
557
558 if (chan->error_state)
559 hcintmsk |= HCINTMSK_ACK;
560 break;
561
562 case USB_ENDPOINT_XFER_INT:
563 if (dbg_perio())
564 dev_vdbg(hsotg->dev, "intr\n");
565 hcintmsk |= HCINTMSK_XFERCOMPL;
566 hcintmsk |= HCINTMSK_NAK;
567 hcintmsk |= HCINTMSK_STALL;
568 hcintmsk |= HCINTMSK_XACTERR;
569 hcintmsk |= HCINTMSK_DATATGLERR;
570 hcintmsk |= HCINTMSK_FRMOVRUN;
571
572 if (chan->ep_is_in)
573 hcintmsk |= HCINTMSK_BBLERR;
574 if (chan->error_state)
575 hcintmsk |= HCINTMSK_ACK;
576 if (chan->do_split) {
577 if (chan->complete_split)
578 hcintmsk |= HCINTMSK_NYET;
579 else
580 hcintmsk |= HCINTMSK_ACK;
581 }
582 break;
583
584 case USB_ENDPOINT_XFER_ISOC:
585 if (dbg_perio())
586 dev_vdbg(hsotg->dev, "isoc\n");
587 hcintmsk |= HCINTMSK_XFERCOMPL;
588 hcintmsk |= HCINTMSK_FRMOVRUN;
589 hcintmsk |= HCINTMSK_ACK;
590
591 if (chan->ep_is_in) {
592 hcintmsk |= HCINTMSK_XACTERR;
593 hcintmsk |= HCINTMSK_BBLERR;
594 }
595 break;
596 default:
597 dev_err(hsotg->dev, "## Unknown EP type ##\n");
598 break;
599 }
600
601 dwc2_writel(hsotg, hcintmsk, HCINTMSK(chan->hc_num));
602 if (dbg_hc(chan))
603 dev_vdbg(hsotg->dev, "set HCINTMSK to %08x\n", hcintmsk);
604}
605
606static void dwc2_hc_enable_dma_ints(struct dwc2_hsotg *hsotg,
607 struct dwc2_host_chan *chan)
608{
609 u32 hcintmsk = HCINTMSK_CHHLTD;
610
611
612
613
614
615 if (!hsotg->params.dma_desc_enable) {
616 if (dbg_hc(chan))
617 dev_vdbg(hsotg->dev, "desc DMA disabled\n");
618 hcintmsk |= HCINTMSK_AHBERR;
619 } else {
620 if (dbg_hc(chan))
621 dev_vdbg(hsotg->dev, "desc DMA enabled\n");
622 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC)
623 hcintmsk |= HCINTMSK_XFERCOMPL;
624 }
625
626 if (chan->error_state && !chan->do_split &&
627 chan->ep_type != USB_ENDPOINT_XFER_ISOC) {
628 if (dbg_hc(chan))
629 dev_vdbg(hsotg->dev, "setting ACK\n");
630 hcintmsk |= HCINTMSK_ACK;
631 if (chan->ep_is_in) {
632 hcintmsk |= HCINTMSK_DATATGLERR;
633 if (chan->ep_type != USB_ENDPOINT_XFER_INT)
634 hcintmsk |= HCINTMSK_NAK;
635 }
636 }
637
638 dwc2_writel(hsotg, hcintmsk, HCINTMSK(chan->hc_num));
639 if (dbg_hc(chan))
640 dev_vdbg(hsotg->dev, "set HCINTMSK to %08x\n", hcintmsk);
641}
642
643static void dwc2_hc_enable_ints(struct dwc2_hsotg *hsotg,
644 struct dwc2_host_chan *chan)
645{
646 u32 intmsk;
647
648 if (hsotg->params.host_dma) {
649 if (dbg_hc(chan))
650 dev_vdbg(hsotg->dev, "DMA enabled\n");
651 dwc2_hc_enable_dma_ints(hsotg, chan);
652 } else {
653 if (dbg_hc(chan))
654 dev_vdbg(hsotg->dev, "DMA disabled\n");
655 dwc2_hc_enable_slave_ints(hsotg, chan);
656 }
657
658
659 intmsk = dwc2_readl(hsotg, HAINTMSK);
660 intmsk |= 1 << chan->hc_num;
661 dwc2_writel(hsotg, intmsk, HAINTMSK);
662 if (dbg_hc(chan))
663 dev_vdbg(hsotg->dev, "set HAINTMSK to %08x\n", intmsk);
664
665
666 intmsk = dwc2_readl(hsotg, GINTMSK);
667 intmsk |= GINTSTS_HCHINT;
668 dwc2_writel(hsotg, intmsk, GINTMSK);
669 if (dbg_hc(chan))
670 dev_vdbg(hsotg->dev, "set GINTMSK to %08x\n", intmsk);
671}
672
673
674
675
676
677
678
679
680
681
682
683
684static void dwc2_hc_init(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan)
685{
686 u8 hc_num = chan->hc_num;
687 u32 hcintmsk;
688 u32 hcchar;
689 u32 hcsplt = 0;
690
691 if (dbg_hc(chan))
692 dev_vdbg(hsotg->dev, "%s()\n", __func__);
693
694
695 hcintmsk = 0xffffffff;
696 hcintmsk &= ~HCINTMSK_RESERVED14_31;
697 dwc2_writel(hsotg, hcintmsk, HCINT(hc_num));
698
699
700 dwc2_hc_enable_ints(hsotg, chan);
701
702
703
704
705
706 hcchar = chan->dev_addr << HCCHAR_DEVADDR_SHIFT & HCCHAR_DEVADDR_MASK;
707 hcchar |= chan->ep_num << HCCHAR_EPNUM_SHIFT & HCCHAR_EPNUM_MASK;
708 if (chan->ep_is_in)
709 hcchar |= HCCHAR_EPDIR;
710 if (chan->speed == USB_SPEED_LOW)
711 hcchar |= HCCHAR_LSPDDEV;
712 hcchar |= chan->ep_type << HCCHAR_EPTYPE_SHIFT & HCCHAR_EPTYPE_MASK;
713 hcchar |= chan->max_packet << HCCHAR_MPS_SHIFT & HCCHAR_MPS_MASK;
714 dwc2_writel(hsotg, hcchar, HCCHAR(hc_num));
715 if (dbg_hc(chan)) {
716 dev_vdbg(hsotg->dev, "set HCCHAR(%d) to %08x\n",
717 hc_num, hcchar);
718
719 dev_vdbg(hsotg->dev, "%s: Channel %d\n",
720 __func__, hc_num);
721 dev_vdbg(hsotg->dev, " Dev Addr: %d\n",
722 chan->dev_addr);
723 dev_vdbg(hsotg->dev, " Ep Num: %d\n",
724 chan->ep_num);
725 dev_vdbg(hsotg->dev, " Is In: %d\n",
726 chan->ep_is_in);
727 dev_vdbg(hsotg->dev, " Is Low Speed: %d\n",
728 chan->speed == USB_SPEED_LOW);
729 dev_vdbg(hsotg->dev, " Ep Type: %d\n",
730 chan->ep_type);
731 dev_vdbg(hsotg->dev, " Max Pkt: %d\n",
732 chan->max_packet);
733 }
734
735
736 if (chan->do_split) {
737 if (dbg_hc(chan))
738 dev_vdbg(hsotg->dev,
739 "Programming HC %d with split --> %s\n",
740 hc_num,
741 chan->complete_split ? "CSPLIT" : "SSPLIT");
742 if (chan->complete_split)
743 hcsplt |= HCSPLT_COMPSPLT;
744 hcsplt |= chan->xact_pos << HCSPLT_XACTPOS_SHIFT &
745 HCSPLT_XACTPOS_MASK;
746 hcsplt |= chan->hub_addr << HCSPLT_HUBADDR_SHIFT &
747 HCSPLT_HUBADDR_MASK;
748 hcsplt |= chan->hub_port << HCSPLT_PRTADDR_SHIFT &
749 HCSPLT_PRTADDR_MASK;
750 if (dbg_hc(chan)) {
751 dev_vdbg(hsotg->dev, " comp split %d\n",
752 chan->complete_split);
753 dev_vdbg(hsotg->dev, " xact pos %d\n",
754 chan->xact_pos);
755 dev_vdbg(hsotg->dev, " hub addr %d\n",
756 chan->hub_addr);
757 dev_vdbg(hsotg->dev, " hub port %d\n",
758 chan->hub_port);
759 dev_vdbg(hsotg->dev, " is_in %d\n",
760 chan->ep_is_in);
761 dev_vdbg(hsotg->dev, " Max Pkt %d\n",
762 chan->max_packet);
763 dev_vdbg(hsotg->dev, " xferlen %d\n",
764 chan->xfer_len);
765 }
766 }
767
768 dwc2_writel(hsotg, hcsplt, HCSPLT(hc_num));
769}
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799void dwc2_hc_halt(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan,
800 enum dwc2_halt_status halt_status)
801{
802 u32 nptxsts, hptxsts, hcchar;
803
804 if (dbg_hc(chan))
805 dev_vdbg(hsotg->dev, "%s()\n", __func__);
806
807
808
809
810
811
812
813 if ((hsotg->params.g_dma && !hsotg->params.g_dma_desc) ||
814 hsotg->hw_params.arch == GHWCFG2_EXT_DMA_ARCH) {
815 if (!chan->do_split &&
816 (chan->ep_type == USB_ENDPOINT_XFER_ISOC ||
817 chan->ep_type == USB_ENDPOINT_XFER_INT)) {
818 dev_err(hsotg->dev, "%s() Channel can't be halted\n",
819 __func__);
820 return;
821 }
822 }
823
824 if (halt_status == DWC2_HC_XFER_NO_HALT_STATUS)
825 dev_err(hsotg->dev, "!!! halt_status = %d !!!\n", halt_status);
826
827 if (halt_status == DWC2_HC_XFER_URB_DEQUEUE ||
828 halt_status == DWC2_HC_XFER_AHB_ERR) {
829
830
831
832
833
834
835 u32 hcintmsk = HCINTMSK_CHHLTD;
836
837 dev_vdbg(hsotg->dev, "dequeue/error\n");
838 dwc2_writel(hsotg, hcintmsk, HCINTMSK(chan->hc_num));
839
840
841
842
843
844
845 dwc2_writel(hsotg, ~hcintmsk, HCINT(chan->hc_num));
846
847
848
849
850
851
852 chan->halt_status = halt_status;
853
854 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
855 if (!(hcchar & HCCHAR_CHENA)) {
856
857
858
859
860
861
862
863
864
865
866
867 return;
868 }
869 }
870 if (chan->halt_pending) {
871
872
873
874
875
876 dev_vdbg(hsotg->dev,
877 "*** %s: Channel %d, chan->halt_pending already set ***\n",
878 __func__, chan->hc_num);
879 return;
880 }
881
882 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
883
884
885
886 if (!hsotg->params.dma_desc_enable) {
887 if (dbg_hc(chan))
888 dev_vdbg(hsotg->dev, "desc DMA disabled\n");
889 hcchar |= HCCHAR_CHENA;
890 } else {
891 if (dbg_hc(chan))
892 dev_dbg(hsotg->dev, "desc DMA enabled\n");
893 }
894 hcchar |= HCCHAR_CHDIS;
895
896 if (!hsotg->params.host_dma) {
897 if (dbg_hc(chan))
898 dev_vdbg(hsotg->dev, "DMA not enabled\n");
899 hcchar |= HCCHAR_CHENA;
900
901
902 if (chan->ep_type == USB_ENDPOINT_XFER_CONTROL ||
903 chan->ep_type == USB_ENDPOINT_XFER_BULK) {
904 dev_vdbg(hsotg->dev, "control/bulk\n");
905 nptxsts = dwc2_readl(hsotg, GNPTXSTS);
906 if ((nptxsts & TXSTS_QSPCAVAIL_MASK) == 0) {
907 dev_vdbg(hsotg->dev, "Disabling channel\n");
908 hcchar &= ~HCCHAR_CHENA;
909 }
910 } else {
911 if (dbg_perio())
912 dev_vdbg(hsotg->dev, "isoc/intr\n");
913 hptxsts = dwc2_readl(hsotg, HPTXSTS);
914 if ((hptxsts & TXSTS_QSPCAVAIL_MASK) == 0 ||
915 hsotg->queuing_high_bandwidth) {
916 if (dbg_perio())
917 dev_vdbg(hsotg->dev, "Disabling channel\n");
918 hcchar &= ~HCCHAR_CHENA;
919 }
920 }
921 } else {
922 if (dbg_hc(chan))
923 dev_vdbg(hsotg->dev, "DMA enabled\n");
924 }
925
926 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
927 chan->halt_status = halt_status;
928
929 if (hcchar & HCCHAR_CHENA) {
930 if (dbg_hc(chan))
931 dev_vdbg(hsotg->dev, "Channel enabled\n");
932 chan->halt_pending = 1;
933 chan->halt_on_queue = 0;
934 } else {
935 if (dbg_hc(chan))
936 dev_vdbg(hsotg->dev, "Channel disabled\n");
937 chan->halt_on_queue = 1;
938 }
939
940 if (dbg_hc(chan)) {
941 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
942 chan->hc_num);
943 dev_vdbg(hsotg->dev, " hcchar: 0x%08x\n",
944 hcchar);
945 dev_vdbg(hsotg->dev, " halt_pending: %d\n",
946 chan->halt_pending);
947 dev_vdbg(hsotg->dev, " halt_on_queue: %d\n",
948 chan->halt_on_queue);
949 dev_vdbg(hsotg->dev, " halt_status: %d\n",
950 chan->halt_status);
951 }
952}
953
954
955
956
957
958
959
960
961
962
963void dwc2_hc_cleanup(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan)
964{
965 u32 hcintmsk;
966
967 chan->xfer_started = 0;
968
969 list_del_init(&chan->split_order_list_entry);
970
971
972
973
974
975 dwc2_writel(hsotg, 0, HCINTMSK(chan->hc_num));
976 hcintmsk = 0xffffffff;
977 hcintmsk &= ~HCINTMSK_RESERVED14_31;
978 dwc2_writel(hsotg, hcintmsk, HCINT(chan->hc_num));
979}
980
981
982
983
984
985
986
987
988
989
990
991static void dwc2_hc_set_even_odd_frame(struct dwc2_hsotg *hsotg,
992 struct dwc2_host_chan *chan, u32 *hcchar)
993{
994 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
995 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
996 int host_speed;
997 int xfer_ns;
998 int xfer_us;
999 int bytes_in_fifo;
1000 u16 fifo_space;
1001 u16 frame_number;
1002 u16 wire_frame;
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028 host_speed = (chan->speed != USB_SPEED_HIGH &&
1029 !chan->do_split) ? chan->speed : USB_SPEED_HIGH;
1030
1031
1032 fifo_space = (dwc2_readl(hsotg, HPTXSTS) &
1033 TXSTS_FSPCAVAIL_MASK) >> TXSTS_FSPCAVAIL_SHIFT;
1034 bytes_in_fifo = sizeof(u32) *
1035 (hsotg->params.host_perio_tx_fifo_size -
1036 fifo_space);
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046 xfer_ns = usb_calc_bus_time(host_speed, false, false,
1047 chan->xfer_len + bytes_in_fifo);
1048 xfer_us = NS_TO_US(xfer_ns);
1049
1050
1051 frame_number = dwc2_hcd_get_future_frame_number(hsotg, xfer_us);
1052
1053
1054 wire_frame = dwc2_frame_num_inc(chan->qh->next_active_frame, 1);
1055
1056
1057
1058
1059
1060
1061
1062
1063 if (dwc2_frame_num_gt(frame_number, wire_frame)) {
1064 dwc2_sch_vdbg(hsotg,
1065 "QH=%p EO MISS fr=%04x=>%04x (%+d)\n",
1066 chan->qh, wire_frame, frame_number,
1067 dwc2_frame_num_dec(frame_number,
1068 wire_frame));
1069 wire_frame = frame_number;
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079 chan->qh->next_active_frame =
1080 dwc2_frame_num_dec(frame_number, 1);
1081 }
1082
1083 if (wire_frame & 1)
1084 *hcchar |= HCCHAR_ODDFRM;
1085 else
1086 *hcchar &= ~HCCHAR_ODDFRM;
1087 }
1088}
1089
1090static void dwc2_set_pid_isoc(struct dwc2_host_chan *chan)
1091{
1092
1093 if (chan->speed == USB_SPEED_HIGH) {
1094 if (chan->ep_is_in) {
1095 if (chan->multi_count == 1)
1096 chan->data_pid_start = DWC2_HC_PID_DATA0;
1097 else if (chan->multi_count == 2)
1098 chan->data_pid_start = DWC2_HC_PID_DATA1;
1099 else
1100 chan->data_pid_start = DWC2_HC_PID_DATA2;
1101 } else {
1102 if (chan->multi_count == 1)
1103 chan->data_pid_start = DWC2_HC_PID_DATA0;
1104 else
1105 chan->data_pid_start = DWC2_HC_PID_MDATA;
1106 }
1107 } else {
1108 chan->data_pid_start = DWC2_HC_PID_DATA0;
1109 }
1110}
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126static void dwc2_hc_write_packet(struct dwc2_hsotg *hsotg,
1127 struct dwc2_host_chan *chan)
1128{
1129 u32 i;
1130 u32 remaining_count;
1131 u32 byte_count;
1132 u32 dword_count;
1133 u32 *data_buf = (u32 *)chan->xfer_buf;
1134
1135 if (dbg_hc(chan))
1136 dev_vdbg(hsotg->dev, "%s()\n", __func__);
1137
1138 remaining_count = chan->xfer_len - chan->xfer_count;
1139 if (remaining_count > chan->max_packet)
1140 byte_count = chan->max_packet;
1141 else
1142 byte_count = remaining_count;
1143
1144 dword_count = (byte_count + 3) / 4;
1145
1146 if (((unsigned long)data_buf & 0x3) == 0) {
1147
1148 for (i = 0; i < dword_count; i++, data_buf++)
1149 dwc2_writel(hsotg, *data_buf, HCFIFO(chan->hc_num));
1150 } else {
1151
1152 for (i = 0; i < dword_count; i++, data_buf++) {
1153 u32 data = data_buf[0] | data_buf[1] << 8 |
1154 data_buf[2] << 16 | data_buf[3] << 24;
1155 dwc2_writel(hsotg, data, HCFIFO(chan->hc_num));
1156 }
1157 }
1158
1159 chan->xfer_count += byte_count;
1160 chan->xfer_buf += byte_count;
1161}
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172static void dwc2_hc_do_ping(struct dwc2_hsotg *hsotg,
1173 struct dwc2_host_chan *chan)
1174{
1175 u32 hcchar;
1176 u32 hctsiz;
1177
1178 if (dbg_hc(chan))
1179 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1180 chan->hc_num);
1181
1182 hctsiz = TSIZ_DOPNG;
1183 hctsiz |= 1 << TSIZ_PKTCNT_SHIFT;
1184 dwc2_writel(hsotg, hctsiz, HCTSIZ(chan->hc_num));
1185
1186 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1187 hcchar |= HCCHAR_CHENA;
1188 hcchar &= ~HCCHAR_CHDIS;
1189 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1190}
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225static void dwc2_hc_start_transfer(struct dwc2_hsotg *hsotg,
1226 struct dwc2_host_chan *chan)
1227{
1228 u32 max_hc_xfer_size = hsotg->params.max_transfer_size;
1229 u16 max_hc_pkt_count = hsotg->params.max_packet_count;
1230 u32 hcchar;
1231 u32 hctsiz = 0;
1232 u16 num_packets;
1233 u32 ec_mc;
1234
1235 if (dbg_hc(chan))
1236 dev_vdbg(hsotg->dev, "%s()\n", __func__);
1237
1238 if (chan->do_ping) {
1239 if (!hsotg->params.host_dma) {
1240 if (dbg_hc(chan))
1241 dev_vdbg(hsotg->dev, "ping, no DMA\n");
1242 dwc2_hc_do_ping(hsotg, chan);
1243 chan->xfer_started = 1;
1244 return;
1245 }
1246
1247 if (dbg_hc(chan))
1248 dev_vdbg(hsotg->dev, "ping, DMA\n");
1249
1250 hctsiz |= TSIZ_DOPNG;
1251 }
1252
1253 if (chan->do_split) {
1254 if (dbg_hc(chan))
1255 dev_vdbg(hsotg->dev, "split\n");
1256 num_packets = 1;
1257
1258 if (chan->complete_split && !chan->ep_is_in)
1259
1260
1261
1262
1263 chan->xfer_len = 0;
1264 else if (chan->ep_is_in || chan->xfer_len > chan->max_packet)
1265 chan->xfer_len = chan->max_packet;
1266 else if (!chan->ep_is_in && chan->xfer_len > 188)
1267 chan->xfer_len = 188;
1268
1269 hctsiz |= chan->xfer_len << TSIZ_XFERSIZE_SHIFT &
1270 TSIZ_XFERSIZE_MASK;
1271
1272
1273 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1274 chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1275 ec_mc = 3;
1276 else
1277 ec_mc = 1;
1278 } else {
1279 if (dbg_hc(chan))
1280 dev_vdbg(hsotg->dev, "no split\n");
1281
1282
1283
1284
1285 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1286 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
1287
1288
1289
1290
1291
1292
1293
1294 u32 max_periodic_len =
1295 chan->multi_count * chan->max_packet;
1296
1297 if (chan->xfer_len > max_periodic_len)
1298 chan->xfer_len = max_periodic_len;
1299 } else if (chan->xfer_len > max_hc_xfer_size) {
1300
1301
1302
1303
1304 chan->xfer_len =
1305 max_hc_xfer_size - chan->max_packet + 1;
1306 }
1307
1308 if (chan->xfer_len > 0) {
1309 num_packets = (chan->xfer_len + chan->max_packet - 1) /
1310 chan->max_packet;
1311 if (num_packets > max_hc_pkt_count) {
1312 num_packets = max_hc_pkt_count;
1313 chan->xfer_len = num_packets * chan->max_packet;
1314 } else if (chan->ep_is_in) {
1315
1316
1317
1318
1319
1320
1321 chan->xfer_len = num_packets * chan->max_packet;
1322 }
1323 } else {
1324
1325 num_packets = 1;
1326 }
1327
1328 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1329 chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1330
1331
1332
1333
1334 chan->multi_count = num_packets;
1335
1336 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1337 dwc2_set_pid_isoc(chan);
1338
1339 hctsiz |= chan->xfer_len << TSIZ_XFERSIZE_SHIFT &
1340 TSIZ_XFERSIZE_MASK;
1341
1342
1343 ec_mc = chan->multi_count;
1344 }
1345
1346 chan->start_pkt_count = num_packets;
1347 hctsiz |= num_packets << TSIZ_PKTCNT_SHIFT & TSIZ_PKTCNT_MASK;
1348 hctsiz |= chan->data_pid_start << TSIZ_SC_MC_PID_SHIFT &
1349 TSIZ_SC_MC_PID_MASK;
1350 dwc2_writel(hsotg, hctsiz, HCTSIZ(chan->hc_num));
1351 if (dbg_hc(chan)) {
1352 dev_vdbg(hsotg->dev, "Wrote %08x to HCTSIZ(%d)\n",
1353 hctsiz, chan->hc_num);
1354
1355 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1356 chan->hc_num);
1357 dev_vdbg(hsotg->dev, " Xfer Size: %d\n",
1358 (hctsiz & TSIZ_XFERSIZE_MASK) >>
1359 TSIZ_XFERSIZE_SHIFT);
1360 dev_vdbg(hsotg->dev, " Num Pkts: %d\n",
1361 (hctsiz & TSIZ_PKTCNT_MASK) >>
1362 TSIZ_PKTCNT_SHIFT);
1363 dev_vdbg(hsotg->dev, " Start PID: %d\n",
1364 (hctsiz & TSIZ_SC_MC_PID_MASK) >>
1365 TSIZ_SC_MC_PID_SHIFT);
1366 }
1367
1368 if (hsotg->params.host_dma) {
1369 dma_addr_t dma_addr;
1370
1371 if (chan->align_buf) {
1372 if (dbg_hc(chan))
1373 dev_vdbg(hsotg->dev, "align_buf\n");
1374 dma_addr = chan->align_buf;
1375 } else {
1376 dma_addr = chan->xfer_dma;
1377 }
1378 dwc2_writel(hsotg, (u32)dma_addr, HCDMA(chan->hc_num));
1379
1380 if (dbg_hc(chan))
1381 dev_vdbg(hsotg->dev, "Wrote %08lx to HCDMA(%d)\n",
1382 (unsigned long)dma_addr, chan->hc_num);
1383 }
1384
1385
1386 if (chan->do_split) {
1387 u32 hcsplt = dwc2_readl(hsotg, HCSPLT(chan->hc_num));
1388
1389 hcsplt |= HCSPLT_SPLTENA;
1390 dwc2_writel(hsotg, hcsplt, HCSPLT(chan->hc_num));
1391 }
1392
1393 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1394 hcchar &= ~HCCHAR_MULTICNT_MASK;
1395 hcchar |= (ec_mc << HCCHAR_MULTICNT_SHIFT) & HCCHAR_MULTICNT_MASK;
1396 dwc2_hc_set_even_odd_frame(hsotg, chan, &hcchar);
1397
1398 if (hcchar & HCCHAR_CHDIS)
1399 dev_warn(hsotg->dev,
1400 "%s: chdis set, channel %d, hcchar 0x%08x\n",
1401 __func__, chan->hc_num, hcchar);
1402
1403
1404 hcchar |= HCCHAR_CHENA;
1405 hcchar &= ~HCCHAR_CHDIS;
1406
1407 if (dbg_hc(chan))
1408 dev_vdbg(hsotg->dev, " Multi Cnt: %d\n",
1409 (hcchar & HCCHAR_MULTICNT_MASK) >>
1410 HCCHAR_MULTICNT_SHIFT);
1411
1412 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1413 if (dbg_hc(chan))
1414 dev_vdbg(hsotg->dev, "Wrote %08x to HCCHAR(%d)\n", hcchar,
1415 chan->hc_num);
1416
1417 chan->xfer_started = 1;
1418 chan->requests++;
1419
1420 if (!hsotg->params.host_dma &&
1421 !chan->ep_is_in && chan->xfer_len > 0)
1422
1423 dwc2_hc_write_packet(hsotg, chan);
1424}
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440void dwc2_hc_start_transfer_ddma(struct dwc2_hsotg *hsotg,
1441 struct dwc2_host_chan *chan)
1442{
1443 u32 hcchar;
1444 u32 hctsiz = 0;
1445
1446 if (chan->do_ping)
1447 hctsiz |= TSIZ_DOPNG;
1448
1449 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC)
1450 dwc2_set_pid_isoc(chan);
1451
1452
1453 hctsiz |= chan->data_pid_start << TSIZ_SC_MC_PID_SHIFT &
1454 TSIZ_SC_MC_PID_MASK;
1455
1456
1457 hctsiz |= (chan->ntd - 1) << TSIZ_NTD_SHIFT & TSIZ_NTD_MASK;
1458
1459
1460 hctsiz |= chan->schinfo << TSIZ_SCHINFO_SHIFT & TSIZ_SCHINFO_MASK;
1461
1462 if (dbg_hc(chan)) {
1463 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1464 chan->hc_num);
1465 dev_vdbg(hsotg->dev, " Start PID: %d\n",
1466 chan->data_pid_start);
1467 dev_vdbg(hsotg->dev, " NTD: %d\n", chan->ntd - 1);
1468 }
1469
1470 dwc2_writel(hsotg, hctsiz, HCTSIZ(chan->hc_num));
1471
1472 dma_sync_single_for_device(hsotg->dev, chan->desc_list_addr,
1473 chan->desc_list_sz, DMA_TO_DEVICE);
1474
1475 dwc2_writel(hsotg, chan->desc_list_addr, HCDMA(chan->hc_num));
1476
1477 if (dbg_hc(chan))
1478 dev_vdbg(hsotg->dev, "Wrote %pad to HCDMA(%d)\n",
1479 &chan->desc_list_addr, chan->hc_num);
1480
1481 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1482 hcchar &= ~HCCHAR_MULTICNT_MASK;
1483 hcchar |= chan->multi_count << HCCHAR_MULTICNT_SHIFT &
1484 HCCHAR_MULTICNT_MASK;
1485
1486 if (hcchar & HCCHAR_CHDIS)
1487 dev_warn(hsotg->dev,
1488 "%s: chdis set, channel %d, hcchar 0x%08x\n",
1489 __func__, chan->hc_num, hcchar);
1490
1491
1492 hcchar |= HCCHAR_CHENA;
1493 hcchar &= ~HCCHAR_CHDIS;
1494
1495 if (dbg_hc(chan))
1496 dev_vdbg(hsotg->dev, " Multi Cnt: %d\n",
1497 (hcchar & HCCHAR_MULTICNT_MASK) >>
1498 HCCHAR_MULTICNT_SHIFT);
1499
1500 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1501 if (dbg_hc(chan))
1502 dev_vdbg(hsotg->dev, "Wrote %08x to HCCHAR(%d)\n", hcchar,
1503 chan->hc_num);
1504
1505 chan->xfer_started = 1;
1506 chan->requests++;
1507}
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529static int dwc2_hc_continue_transfer(struct dwc2_hsotg *hsotg,
1530 struct dwc2_host_chan *chan)
1531{
1532 if (dbg_hc(chan))
1533 dev_vdbg(hsotg->dev, "%s: Channel %d\n", __func__,
1534 chan->hc_num);
1535
1536 if (chan->do_split)
1537
1538 return 0;
1539
1540 if (chan->data_pid_start == DWC2_HC_PID_SETUP)
1541
1542 return 0;
1543
1544 if (chan->ep_is_in) {
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557 u32 hcchar = dwc2_readl(hsotg, HCCHAR(chan->hc_num));
1558
1559 dwc2_hc_set_even_odd_frame(hsotg, chan, &hcchar);
1560 hcchar |= HCCHAR_CHENA;
1561 hcchar &= ~HCCHAR_CHDIS;
1562 if (dbg_hc(chan))
1563 dev_vdbg(hsotg->dev, " IN xfer: hcchar = 0x%08x\n",
1564 hcchar);
1565 dwc2_writel(hsotg, hcchar, HCCHAR(chan->hc_num));
1566 chan->requests++;
1567 return 1;
1568 }
1569
1570
1571
1572 if (chan->xfer_count < chan->xfer_len) {
1573 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
1574 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
1575 u32 hcchar = dwc2_readl(hsotg,
1576 HCCHAR(chan->hc_num));
1577
1578 dwc2_hc_set_even_odd_frame(hsotg, chan,
1579 &hcchar);
1580 }
1581
1582
1583 dwc2_hc_write_packet(hsotg, chan);
1584 chan->requests++;
1585 return 1;
1586 }
1587
1588 return 0;
1589}
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603static void dwc2_kill_urbs_in_qh_list(struct dwc2_hsotg *hsotg,
1604 struct list_head *qh_list)
1605{
1606 struct dwc2_qh *qh, *qh_tmp;
1607 struct dwc2_qtd *qtd, *qtd_tmp;
1608
1609 list_for_each_entry_safe(qh, qh_tmp, qh_list, qh_list_entry) {
1610 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list,
1611 qtd_list_entry) {
1612 dwc2_host_complete(hsotg, qtd, -ECONNRESET);
1613 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
1614 }
1615 }
1616}
1617
1618static void dwc2_qh_list_free(struct dwc2_hsotg *hsotg,
1619 struct list_head *qh_list)
1620{
1621 struct dwc2_qtd *qtd, *qtd_tmp;
1622 struct dwc2_qh *qh, *qh_tmp;
1623 unsigned long flags;
1624
1625 if (!qh_list->next)
1626
1627 return;
1628
1629 spin_lock_irqsave(&hsotg->lock, flags);
1630
1631
1632 dwc2_kill_urbs_in_qh_list(hsotg, qh_list);
1633
1634 list_for_each_entry_safe(qh, qh_tmp, qh_list, qh_list_entry) {
1635 dwc2_hcd_qh_unlink(hsotg, qh);
1636
1637
1638 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list,
1639 qtd_list_entry)
1640 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
1641
1642 if (qh->channel && qh->channel->qh == qh)
1643 qh->channel->qh = NULL;
1644
1645 spin_unlock_irqrestore(&hsotg->lock, flags);
1646 dwc2_hcd_qh_free(hsotg, qh);
1647 spin_lock_irqsave(&hsotg->lock, flags);
1648 }
1649
1650 spin_unlock_irqrestore(&hsotg->lock, flags);
1651}
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661static void dwc2_kill_all_urbs(struct dwc2_hsotg *hsotg)
1662{
1663 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->non_periodic_sched_inactive);
1664 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->non_periodic_sched_waiting);
1665 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->non_periodic_sched_active);
1666 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_inactive);
1667 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_ready);
1668 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_assigned);
1669 dwc2_kill_urbs_in_qh_list(hsotg, &hsotg->periodic_sched_queued);
1670}
1671
1672
1673
1674
1675
1676
1677void dwc2_hcd_start(struct dwc2_hsotg *hsotg)
1678{
1679 u32 hprt0;
1680
1681 if (hsotg->op_state == OTG_STATE_B_HOST) {
1682
1683
1684
1685
1686
1687 hprt0 = dwc2_read_hprt0(hsotg);
1688 hprt0 |= HPRT0_RST;
1689 dwc2_writel(hsotg, hprt0, HPRT0);
1690 }
1691
1692 queue_delayed_work(hsotg->wq_otg, &hsotg->start_work,
1693 msecs_to_jiffies(50));
1694}
1695
1696
1697static void dwc2_hcd_cleanup_channels(struct dwc2_hsotg *hsotg)
1698{
1699 int num_channels = hsotg->params.host_channels;
1700 struct dwc2_host_chan *channel;
1701 u32 hcchar;
1702 int i;
1703
1704 if (!hsotg->params.host_dma) {
1705
1706 for (i = 0; i < num_channels; i++) {
1707 channel = hsotg->hc_ptr_array[i];
1708 if (!list_empty(&channel->hc_list_entry))
1709 continue;
1710 hcchar = dwc2_readl(hsotg, HCCHAR(i));
1711 if (hcchar & HCCHAR_CHENA) {
1712 hcchar &= ~(HCCHAR_CHENA | HCCHAR_EPDIR);
1713 hcchar |= HCCHAR_CHDIS;
1714 dwc2_writel(hsotg, hcchar, HCCHAR(i));
1715 }
1716 }
1717 }
1718
1719 for (i = 0; i < num_channels; i++) {
1720 channel = hsotg->hc_ptr_array[i];
1721 if (!list_empty(&channel->hc_list_entry))
1722 continue;
1723 hcchar = dwc2_readl(hsotg, HCCHAR(i));
1724 if (hcchar & HCCHAR_CHENA) {
1725
1726 hcchar |= HCCHAR_CHDIS;
1727 dwc2_writel(hsotg, hcchar, HCCHAR(i));
1728 }
1729
1730 dwc2_hc_cleanup(hsotg, channel);
1731 list_add_tail(&channel->hc_list_entry, &hsotg->free_hc_list);
1732
1733
1734
1735
1736
1737 channel->qh = NULL;
1738 }
1739
1740 if (hsotg->params.uframe_sched) {
1741 hsotg->available_host_channels =
1742 hsotg->params.host_channels;
1743 } else {
1744 hsotg->non_periodic_channels = 0;
1745 hsotg->periodic_channels = 0;
1746 }
1747}
1748
1749
1750
1751
1752
1753
1754
1755
1756void dwc2_hcd_connect(struct dwc2_hsotg *hsotg)
1757{
1758 if (hsotg->lx_state != DWC2_L0)
1759 usb_hcd_resume_root_hub(hsotg->priv);
1760
1761 hsotg->flags.b.port_connect_status_change = 1;
1762 hsotg->flags.b.port_connect_status = 1;
1763}
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773void dwc2_hcd_disconnect(struct dwc2_hsotg *hsotg, bool force)
1774{
1775 u32 intr;
1776 u32 hprt0;
1777
1778
1779 hsotg->flags.b.port_connect_status_change = 1;
1780 hsotg->flags.b.port_connect_status = 0;
1781
1782
1783
1784
1785
1786
1787 intr = dwc2_readl(hsotg, GINTMSK);
1788 intr &= ~(GINTSTS_NPTXFEMP | GINTSTS_PTXFEMP | GINTSTS_HCHINT);
1789 dwc2_writel(hsotg, intr, GINTMSK);
1790 intr = GINTSTS_NPTXFEMP | GINTSTS_PTXFEMP | GINTSTS_HCHINT;
1791 dwc2_writel(hsotg, intr, GINTSTS);
1792
1793
1794
1795
1796
1797
1798 if (dwc2_is_device_mode(hsotg)) {
1799 if (hsotg->op_state != OTG_STATE_A_SUSPEND) {
1800 dev_dbg(hsotg->dev, "Disconnect: PortPower off\n");
1801 dwc2_writel(hsotg, 0, HPRT0);
1802 }
1803
1804 dwc2_disable_host_interrupts(hsotg);
1805 }
1806
1807
1808 dwc2_kill_all_urbs(hsotg);
1809
1810 if (dwc2_is_host_mode(hsotg))
1811
1812 dwc2_hcd_cleanup_channels(hsotg);
1813
1814 dwc2_host_disconnect(hsotg);
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828 if (!force) {
1829 hprt0 = dwc2_readl(hsotg, HPRT0);
1830 if (!(hprt0 & HPRT0_CONNDET) && (hprt0 & HPRT0_CONNSTS))
1831 dwc2_hcd_connect(hsotg);
1832 }
1833}
1834
1835
1836
1837
1838
1839
1840static void dwc2_hcd_rem_wakeup(struct dwc2_hsotg *hsotg)
1841{
1842 if (hsotg->bus_suspended) {
1843 hsotg->flags.b.port_suspend_change = 1;
1844 usb_hcd_resume_root_hub(hsotg->priv);
1845 }
1846
1847 if (hsotg->lx_state == DWC2_L1)
1848 hsotg->flags.b.port_l1_change = 1;
1849}
1850
1851
1852
1853
1854
1855
1856
1857
1858void dwc2_hcd_stop(struct dwc2_hsotg *hsotg)
1859{
1860 dev_dbg(hsotg->dev, "DWC OTG HCD STOP\n");
1861
1862
1863
1864
1865
1866
1867
1868
1869 dwc2_disable_host_interrupts(hsotg);
1870
1871
1872 dev_dbg(hsotg->dev, "PortPower off\n");
1873 dwc2_writel(hsotg, 0, HPRT0);
1874}
1875
1876
1877static int dwc2_hcd_urb_enqueue(struct dwc2_hsotg *hsotg,
1878 struct dwc2_hcd_urb *urb, struct dwc2_qh *qh,
1879 struct dwc2_qtd *qtd)
1880{
1881 u32 intr_mask;
1882 int retval;
1883 int dev_speed;
1884
1885 if (!hsotg->flags.b.port_connect_status) {
1886
1887 dev_err(hsotg->dev, "Not connected\n");
1888 return -ENODEV;
1889 }
1890
1891 dev_speed = dwc2_host_get_speed(hsotg, urb->priv);
1892
1893
1894 if ((dev_speed == USB_SPEED_LOW) &&
1895 (hsotg->hw_params.fs_phy_type == GHWCFG2_FS_PHY_TYPE_DEDICATED) &&
1896 (hsotg->hw_params.hs_phy_type == GHWCFG2_HS_PHY_TYPE_UTMI)) {
1897 u32 hprt0 = dwc2_readl(hsotg, HPRT0);
1898 u32 prtspd = (hprt0 & HPRT0_SPD_MASK) >> HPRT0_SPD_SHIFT;
1899
1900 if (prtspd == HPRT0_SPD_FULL_SPEED)
1901 return -ENODEV;
1902 }
1903
1904 if (!qtd)
1905 return -EINVAL;
1906
1907 dwc2_hcd_qtd_init(qtd, urb);
1908 retval = dwc2_hcd_qtd_add(hsotg, qtd, qh);
1909 if (retval) {
1910 dev_err(hsotg->dev,
1911 "DWC OTG HCD URB Enqueue failed adding QTD. Error status %d\n",
1912 retval);
1913 return retval;
1914 }
1915
1916 intr_mask = dwc2_readl(hsotg, GINTMSK);
1917 if (!(intr_mask & GINTSTS_SOF)) {
1918 enum dwc2_transaction_type tr_type;
1919
1920 if (qtd->qh->ep_type == USB_ENDPOINT_XFER_BULK &&
1921 !(qtd->urb->flags & URB_GIVEBACK_ASAP))
1922
1923
1924
1925
1926 return 0;
1927
1928 tr_type = dwc2_hcd_select_transactions(hsotg);
1929 if (tr_type != DWC2_TRANSACTION_NONE)
1930 dwc2_hcd_queue_transactions(hsotg, tr_type);
1931 }
1932
1933 return 0;
1934}
1935
1936
1937static int dwc2_hcd_urb_dequeue(struct dwc2_hsotg *hsotg,
1938 struct dwc2_hcd_urb *urb)
1939{
1940 struct dwc2_qh *qh;
1941 struct dwc2_qtd *urb_qtd;
1942
1943 urb_qtd = urb->qtd;
1944 if (!urb_qtd) {
1945 dev_dbg(hsotg->dev, "## Urb QTD is NULL ##\n");
1946 return -EINVAL;
1947 }
1948
1949 qh = urb_qtd->qh;
1950 if (!qh) {
1951 dev_dbg(hsotg->dev, "## Urb QTD QH is NULL ##\n");
1952 return -EINVAL;
1953 }
1954
1955 urb->priv = NULL;
1956
1957 if (urb_qtd->in_process && qh->channel) {
1958 dwc2_dump_channel_info(hsotg, qh->channel);
1959
1960
1961 if (hsotg->flags.b.port_connect_status)
1962
1963
1964
1965
1966
1967
1968
1969 dwc2_hc_halt(hsotg, qh->channel,
1970 DWC2_HC_XFER_URB_DEQUEUE);
1971 }
1972
1973
1974
1975
1976
1977 if (!hsotg->params.dma_desc_enable) {
1978 u8 in_process = urb_qtd->in_process;
1979
1980 dwc2_hcd_qtd_unlink_and_free(hsotg, urb_qtd, qh);
1981 if (in_process) {
1982 dwc2_hcd_qh_deactivate(hsotg, qh, 0);
1983 qh->channel = NULL;
1984 } else if (list_empty(&qh->qtd_list)) {
1985 dwc2_hcd_qh_unlink(hsotg, qh);
1986 }
1987 } else {
1988 dwc2_hcd_qtd_unlink_and_free(hsotg, urb_qtd, qh);
1989 }
1990
1991 return 0;
1992}
1993
1994
1995static int dwc2_hcd_endpoint_disable(struct dwc2_hsotg *hsotg,
1996 struct usb_host_endpoint *ep, int retry)
1997{
1998 struct dwc2_qtd *qtd, *qtd_tmp;
1999 struct dwc2_qh *qh;
2000 unsigned long flags;
2001 int rc;
2002
2003 spin_lock_irqsave(&hsotg->lock, flags);
2004
2005 qh = ep->hcpriv;
2006 if (!qh) {
2007 rc = -EINVAL;
2008 goto err;
2009 }
2010
2011 while (!list_empty(&qh->qtd_list) && retry--) {
2012 if (retry == 0) {
2013 dev_err(hsotg->dev,
2014 "## timeout in dwc2_hcd_endpoint_disable() ##\n");
2015 rc = -EBUSY;
2016 goto err;
2017 }
2018
2019 spin_unlock_irqrestore(&hsotg->lock, flags);
2020 msleep(20);
2021 spin_lock_irqsave(&hsotg->lock, flags);
2022 qh = ep->hcpriv;
2023 if (!qh) {
2024 rc = -EINVAL;
2025 goto err;
2026 }
2027 }
2028
2029 dwc2_hcd_qh_unlink(hsotg, qh);
2030
2031
2032 list_for_each_entry_safe(qtd, qtd_tmp, &qh->qtd_list, qtd_list_entry)
2033 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd, qh);
2034
2035 ep->hcpriv = NULL;
2036
2037 if (qh->channel && qh->channel->qh == qh)
2038 qh->channel->qh = NULL;
2039
2040 spin_unlock_irqrestore(&hsotg->lock, flags);
2041
2042 dwc2_hcd_qh_free(hsotg, qh);
2043
2044 return 0;
2045
2046err:
2047 ep->hcpriv = NULL;
2048 spin_unlock_irqrestore(&hsotg->lock, flags);
2049
2050 return rc;
2051}
2052
2053
2054static int dwc2_hcd_endpoint_reset(struct dwc2_hsotg *hsotg,
2055 struct usb_host_endpoint *ep)
2056{
2057 struct dwc2_qh *qh = ep->hcpriv;
2058
2059 if (!qh)
2060 return -EINVAL;
2061
2062 qh->data_toggle = DWC2_HC_PID_DATA0;
2063
2064 return 0;
2065}
2066
2067
2068
2069
2070
2071
2072
2073
2074int dwc2_core_init(struct dwc2_hsotg *hsotg, bool initial_setup)
2075{
2076 u32 usbcfg, otgctl;
2077 int retval;
2078
2079 dev_dbg(hsotg->dev, "%s(%p)\n", __func__, hsotg);
2080
2081 usbcfg = dwc2_readl(hsotg, GUSBCFG);
2082
2083
2084 usbcfg &= ~GUSBCFG_ULPI_EXT_VBUS_DRV;
2085 if (hsotg->params.phy_ulpi_ext_vbus)
2086 usbcfg |= GUSBCFG_ULPI_EXT_VBUS_DRV;
2087
2088
2089 usbcfg &= ~GUSBCFG_TERMSELDLPULSE;
2090 if (hsotg->params.ts_dline)
2091 usbcfg |= GUSBCFG_TERMSELDLPULSE;
2092
2093 dwc2_writel(hsotg, usbcfg, GUSBCFG);
2094
2095
2096
2097
2098
2099
2100
2101
2102 if (!initial_setup) {
2103 retval = dwc2_core_reset(hsotg, false);
2104 if (retval) {
2105 dev_err(hsotg->dev, "%s(): Reset failed, aborting\n",
2106 __func__);
2107 return retval;
2108 }
2109 }
2110
2111
2112
2113
2114 retval = dwc2_phy_init(hsotg, initial_setup);
2115 if (retval)
2116 return retval;
2117
2118
2119 retval = dwc2_gahbcfg_init(hsotg);
2120 if (retval)
2121 return retval;
2122
2123
2124 dwc2_gusbcfg_init(hsotg);
2125
2126
2127 otgctl = dwc2_readl(hsotg, GOTGCTL);
2128 otgctl &= ~GOTGCTL_OTGVER;
2129 dwc2_writel(hsotg, otgctl, GOTGCTL);
2130
2131
2132 hsotg->srp_success = 0;
2133
2134
2135 dwc2_enable_common_interrupts(hsotg);
2136
2137
2138
2139
2140
2141 if (dwc2_is_host_mode(hsotg)) {
2142 dev_dbg(hsotg->dev, "Host Mode\n");
2143 hsotg->op_state = OTG_STATE_A_HOST;
2144 } else {
2145 dev_dbg(hsotg->dev, "Device Mode\n");
2146 hsotg->op_state = OTG_STATE_B_PERIPHERAL;
2147 }
2148
2149 return 0;
2150}
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162static void dwc2_core_host_init(struct dwc2_hsotg *hsotg)
2163{
2164 u32 hcfg, hfir, otgctl, usbcfg;
2165
2166 dev_dbg(hsotg->dev, "%s(%p)\n", __func__, hsotg);
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176 usbcfg = dwc2_readl(hsotg, GUSBCFG);
2177 usbcfg |= GUSBCFG_TOUTCAL(7);
2178 dwc2_writel(hsotg, usbcfg, GUSBCFG);
2179
2180
2181 dwc2_writel(hsotg, 0, PCGCTL);
2182
2183
2184 dwc2_init_fs_ls_pclk_sel(hsotg);
2185 if (hsotg->params.speed == DWC2_SPEED_PARAM_FULL ||
2186 hsotg->params.speed == DWC2_SPEED_PARAM_LOW) {
2187 hcfg = dwc2_readl(hsotg, HCFG);
2188 hcfg |= HCFG_FSLSSUPP;
2189 dwc2_writel(hsotg, hcfg, HCFG);
2190 }
2191
2192
2193
2194
2195
2196
2197 if (hsotg->params.reload_ctl) {
2198 hfir = dwc2_readl(hsotg, HFIR);
2199 hfir |= HFIR_RLDCTRL;
2200 dwc2_writel(hsotg, hfir, HFIR);
2201 }
2202
2203 if (hsotg->params.dma_desc_enable) {
2204 u32 op_mode = hsotg->hw_params.op_mode;
2205
2206 if (hsotg->hw_params.snpsid < DWC2_CORE_REV_2_90a ||
2207 !hsotg->hw_params.dma_desc_enable ||
2208 op_mode == GHWCFG2_OP_MODE_SRP_CAPABLE_DEVICE ||
2209 op_mode == GHWCFG2_OP_MODE_NO_SRP_CAPABLE_DEVICE ||
2210 op_mode == GHWCFG2_OP_MODE_UNDEFINED) {
2211 dev_err(hsotg->dev,
2212 "Hardware does not support descriptor DMA mode -\n");
2213 dev_err(hsotg->dev,
2214 "falling back to buffer DMA mode.\n");
2215 hsotg->params.dma_desc_enable = false;
2216 } else {
2217 hcfg = dwc2_readl(hsotg, HCFG);
2218 hcfg |= HCFG_DESCDMA;
2219 dwc2_writel(hsotg, hcfg, HCFG);
2220 }
2221 }
2222
2223
2224 dwc2_config_fifos(hsotg);
2225
2226
2227
2228 otgctl = dwc2_readl(hsotg, GOTGCTL);
2229 otgctl &= ~GOTGCTL_HSTSETHNPEN;
2230 dwc2_writel(hsotg, otgctl, GOTGCTL);
2231
2232
2233 dwc2_flush_tx_fifo(hsotg, 0x10 );
2234 dwc2_flush_rx_fifo(hsotg);
2235
2236
2237 otgctl = dwc2_readl(hsotg, GOTGCTL);
2238 otgctl &= ~GOTGCTL_HSTSETHNPEN;
2239 dwc2_writel(hsotg, otgctl, GOTGCTL);
2240
2241 if (!hsotg->params.dma_desc_enable) {
2242 int num_channels, i;
2243 u32 hcchar;
2244
2245
2246 num_channels = hsotg->params.host_channels;
2247 for (i = 0; i < num_channels; i++) {
2248 hcchar = dwc2_readl(hsotg, HCCHAR(i));
2249 if (hcchar & HCCHAR_CHENA) {
2250 hcchar &= ~HCCHAR_CHENA;
2251 hcchar |= HCCHAR_CHDIS;
2252 hcchar &= ~HCCHAR_EPDIR;
2253 dwc2_writel(hsotg, hcchar, HCCHAR(i));
2254 }
2255 }
2256
2257
2258 for (i = 0; i < num_channels; i++) {
2259 hcchar = dwc2_readl(hsotg, HCCHAR(i));
2260 if (hcchar & HCCHAR_CHENA) {
2261 hcchar |= HCCHAR_CHENA | HCCHAR_CHDIS;
2262 hcchar &= ~HCCHAR_EPDIR;
2263 dwc2_writel(hsotg, hcchar, HCCHAR(i));
2264 dev_dbg(hsotg->dev, "%s: Halt channel %d\n",
2265 __func__, i);
2266
2267 if (dwc2_hsotg_wait_bit_clear(hsotg, HCCHAR(i),
2268 HCCHAR_CHENA,
2269 1000)) {
2270 dev_warn(hsotg->dev,
2271 "Unable to clear enable on channel %d\n",
2272 i);
2273 }
2274 }
2275 }
2276 }
2277
2278
2279 dwc2_enable_acg(hsotg);
2280
2281
2282 dev_dbg(hsotg->dev, "Init: Port Power? op_state=%d\n", hsotg->op_state);
2283 if (hsotg->op_state == OTG_STATE_A_HOST) {
2284 u32 hprt0 = dwc2_read_hprt0(hsotg);
2285
2286 dev_dbg(hsotg->dev, "Init: Power Port (%d)\n",
2287 !!(hprt0 & HPRT0_PWR));
2288 if (!(hprt0 & HPRT0_PWR)) {
2289 hprt0 |= HPRT0_PWR;
2290 dwc2_writel(hsotg, hprt0, HPRT0);
2291 }
2292 }
2293
2294 dwc2_enable_host_interrupts(hsotg);
2295}
2296
2297
2298
2299
2300
2301
2302static void dwc2_hcd_reinit(struct dwc2_hsotg *hsotg)
2303{
2304 struct dwc2_host_chan *chan, *chan_tmp;
2305 int num_channels;
2306 int i;
2307
2308 hsotg->flags.d32 = 0;
2309 hsotg->non_periodic_qh_ptr = &hsotg->non_periodic_sched_active;
2310
2311 if (hsotg->params.uframe_sched) {
2312 hsotg->available_host_channels =
2313 hsotg->params.host_channels;
2314 } else {
2315 hsotg->non_periodic_channels = 0;
2316 hsotg->periodic_channels = 0;
2317 }
2318
2319
2320
2321
2322
2323 list_for_each_entry_safe(chan, chan_tmp, &hsotg->free_hc_list,
2324 hc_list_entry)
2325 list_del_init(&chan->hc_list_entry);
2326
2327 num_channels = hsotg->params.host_channels;
2328 for (i = 0; i < num_channels; i++) {
2329 chan = hsotg->hc_ptr_array[i];
2330 list_add_tail(&chan->hc_list_entry, &hsotg->free_hc_list);
2331 dwc2_hc_cleanup(hsotg, chan);
2332 }
2333
2334
2335 dwc2_core_host_init(hsotg);
2336}
2337
2338static void dwc2_hc_init_split(struct dwc2_hsotg *hsotg,
2339 struct dwc2_host_chan *chan,
2340 struct dwc2_qtd *qtd, struct dwc2_hcd_urb *urb)
2341{
2342 int hub_addr, hub_port;
2343
2344 chan->do_split = 1;
2345 chan->xact_pos = qtd->isoc_split_pos;
2346 chan->complete_split = qtd->complete_split;
2347 dwc2_host_hub_info(hsotg, urb->priv, &hub_addr, &hub_port);
2348 chan->hub_addr = (u8)hub_addr;
2349 chan->hub_port = (u8)hub_port;
2350}
2351
2352static void dwc2_hc_init_xfer(struct dwc2_hsotg *hsotg,
2353 struct dwc2_host_chan *chan,
2354 struct dwc2_qtd *qtd)
2355{
2356 struct dwc2_hcd_urb *urb = qtd->urb;
2357 struct dwc2_hcd_iso_packet_desc *frame_desc;
2358
2359 switch (dwc2_hcd_get_pipe_type(&urb->pipe_info)) {
2360 case USB_ENDPOINT_XFER_CONTROL:
2361 chan->ep_type = USB_ENDPOINT_XFER_CONTROL;
2362
2363 switch (qtd->control_phase) {
2364 case DWC2_CONTROL_SETUP:
2365 dev_vdbg(hsotg->dev, " Control setup transaction\n");
2366 chan->do_ping = 0;
2367 chan->ep_is_in = 0;
2368 chan->data_pid_start = DWC2_HC_PID_SETUP;
2369 if (hsotg->params.host_dma)
2370 chan->xfer_dma = urb->setup_dma;
2371 else
2372 chan->xfer_buf = urb->setup_packet;
2373 chan->xfer_len = 8;
2374 break;
2375
2376 case DWC2_CONTROL_DATA:
2377 dev_vdbg(hsotg->dev, " Control data transaction\n");
2378 chan->data_pid_start = qtd->data_toggle;
2379 break;
2380
2381 case DWC2_CONTROL_STATUS:
2382
2383
2384
2385
2386 dev_vdbg(hsotg->dev, " Control status transaction\n");
2387 if (urb->length == 0)
2388 chan->ep_is_in = 1;
2389 else
2390 chan->ep_is_in =
2391 dwc2_hcd_is_pipe_out(&urb->pipe_info);
2392 if (chan->ep_is_in)
2393 chan->do_ping = 0;
2394 chan->data_pid_start = DWC2_HC_PID_DATA1;
2395 chan->xfer_len = 0;
2396 if (hsotg->params.host_dma)
2397 chan->xfer_dma = hsotg->status_buf_dma;
2398 else
2399 chan->xfer_buf = hsotg->status_buf;
2400 break;
2401 }
2402 break;
2403
2404 case USB_ENDPOINT_XFER_BULK:
2405 chan->ep_type = USB_ENDPOINT_XFER_BULK;
2406 break;
2407
2408 case USB_ENDPOINT_XFER_INT:
2409 chan->ep_type = USB_ENDPOINT_XFER_INT;
2410 break;
2411
2412 case USB_ENDPOINT_XFER_ISOC:
2413 chan->ep_type = USB_ENDPOINT_XFER_ISOC;
2414 if (hsotg->params.dma_desc_enable)
2415 break;
2416
2417 frame_desc = &urb->iso_descs[qtd->isoc_frame_index];
2418 frame_desc->status = 0;
2419
2420 if (hsotg->params.host_dma) {
2421 chan->xfer_dma = urb->dma;
2422 chan->xfer_dma += frame_desc->offset +
2423 qtd->isoc_split_offset;
2424 } else {
2425 chan->xfer_buf = urb->buf;
2426 chan->xfer_buf += frame_desc->offset +
2427 qtd->isoc_split_offset;
2428 }
2429
2430 chan->xfer_len = frame_desc->length - qtd->isoc_split_offset;
2431
2432 if (chan->xact_pos == DWC2_HCSPLT_XACTPOS_ALL) {
2433 if (chan->xfer_len <= 188)
2434 chan->xact_pos = DWC2_HCSPLT_XACTPOS_ALL;
2435 else
2436 chan->xact_pos = DWC2_HCSPLT_XACTPOS_BEGIN;
2437 }
2438 break;
2439 }
2440}
2441
2442static int dwc2_alloc_split_dma_aligned_buf(struct dwc2_hsotg *hsotg,
2443 struct dwc2_qh *qh,
2444 struct dwc2_host_chan *chan)
2445{
2446 if (!hsotg->unaligned_cache ||
2447 chan->max_packet > DWC2_KMEM_UNALIGNED_BUF_SIZE)
2448 return -ENOMEM;
2449
2450 if (!qh->dw_align_buf) {
2451 qh->dw_align_buf = kmem_cache_alloc(hsotg->unaligned_cache,
2452 GFP_ATOMIC | GFP_DMA);
2453 if (!qh->dw_align_buf)
2454 return -ENOMEM;
2455 }
2456
2457 qh->dw_align_buf_dma = dma_map_single(hsotg->dev, qh->dw_align_buf,
2458 DWC2_KMEM_UNALIGNED_BUF_SIZE,
2459 DMA_FROM_DEVICE);
2460
2461 if (dma_mapping_error(hsotg->dev, qh->dw_align_buf_dma)) {
2462 dev_err(hsotg->dev, "can't map align_buf\n");
2463 chan->align_buf = 0;
2464 return -EINVAL;
2465 }
2466
2467 chan->align_buf = qh->dw_align_buf_dma;
2468 return 0;
2469}
2470
2471#define DWC2_USB_DMA_ALIGN 4
2472
2473static void dwc2_free_dma_aligned_buffer(struct urb *urb)
2474{
2475 void *stored_xfer_buffer;
2476 size_t length;
2477
2478 if (!(urb->transfer_flags & URB_ALIGNED_TEMP_BUFFER))
2479 return;
2480
2481
2482 memcpy(&stored_xfer_buffer,
2483 PTR_ALIGN(urb->transfer_buffer + urb->transfer_buffer_length,
2484 dma_get_cache_alignment()),
2485 sizeof(urb->transfer_buffer));
2486
2487 if (usb_urb_dir_in(urb)) {
2488 if (usb_pipeisoc(urb->pipe))
2489 length = urb->transfer_buffer_length;
2490 else
2491 length = urb->actual_length;
2492
2493 memcpy(stored_xfer_buffer, urb->transfer_buffer, length);
2494 }
2495 kfree(urb->transfer_buffer);
2496 urb->transfer_buffer = stored_xfer_buffer;
2497
2498 urb->transfer_flags &= ~URB_ALIGNED_TEMP_BUFFER;
2499}
2500
2501static int dwc2_alloc_dma_aligned_buffer(struct urb *urb, gfp_t mem_flags)
2502{
2503 void *kmalloc_ptr;
2504 size_t kmalloc_size;
2505
2506 if (urb->num_sgs || urb->sg ||
2507 urb->transfer_buffer_length == 0 ||
2508 !((uintptr_t)urb->transfer_buffer & (DWC2_USB_DMA_ALIGN - 1)))
2509 return 0;
2510
2511
2512
2513
2514
2515
2516 kmalloc_size = urb->transfer_buffer_length +
2517 (dma_get_cache_alignment() - 1) +
2518 sizeof(urb->transfer_buffer);
2519
2520 kmalloc_ptr = kmalloc(kmalloc_size, mem_flags);
2521 if (!kmalloc_ptr)
2522 return -ENOMEM;
2523
2524
2525
2526
2527
2528 memcpy(PTR_ALIGN(kmalloc_ptr + urb->transfer_buffer_length,
2529 dma_get_cache_alignment()),
2530 &urb->transfer_buffer, sizeof(urb->transfer_buffer));
2531
2532 if (usb_urb_dir_out(urb))
2533 memcpy(kmalloc_ptr, urb->transfer_buffer,
2534 urb->transfer_buffer_length);
2535 urb->transfer_buffer = kmalloc_ptr;
2536
2537 urb->transfer_flags |= URB_ALIGNED_TEMP_BUFFER;
2538
2539 return 0;
2540}
2541
2542static int dwc2_map_urb_for_dma(struct usb_hcd *hcd, struct urb *urb,
2543 gfp_t mem_flags)
2544{
2545 int ret;
2546
2547
2548 WARN_ON_ONCE(urb->setup_dma &&
2549 (urb->setup_dma & (DWC2_USB_DMA_ALIGN - 1)));
2550
2551 ret = dwc2_alloc_dma_aligned_buffer(urb, mem_flags);
2552 if (ret)
2553 return ret;
2554
2555 ret = usb_hcd_map_urb_for_dma(hcd, urb, mem_flags);
2556 if (ret)
2557 dwc2_free_dma_aligned_buffer(urb);
2558
2559 return ret;
2560}
2561
2562static void dwc2_unmap_urb_for_dma(struct usb_hcd *hcd, struct urb *urb)
2563{
2564 usb_hcd_unmap_urb_for_dma(hcd, urb);
2565 dwc2_free_dma_aligned_buffer(urb);
2566}
2567
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577static int dwc2_assign_and_init_hc(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh)
2578{
2579 struct dwc2_host_chan *chan;
2580 struct dwc2_hcd_urb *urb;
2581 struct dwc2_qtd *qtd;
2582
2583 if (dbg_qh(qh))
2584 dev_vdbg(hsotg->dev, "%s(%p,%p)\n", __func__, hsotg, qh);
2585
2586 if (list_empty(&qh->qtd_list)) {
2587 dev_dbg(hsotg->dev, "No QTDs in QH list\n");
2588 return -ENOMEM;
2589 }
2590
2591 if (list_empty(&hsotg->free_hc_list)) {
2592 dev_dbg(hsotg->dev, "No free channel to assign\n");
2593 return -ENOMEM;
2594 }
2595
2596 chan = list_first_entry(&hsotg->free_hc_list, struct dwc2_host_chan,
2597 hc_list_entry);
2598
2599
2600 list_del_init(&chan->hc_list_entry);
2601
2602 qtd = list_first_entry(&qh->qtd_list, struct dwc2_qtd, qtd_list_entry);
2603 urb = qtd->urb;
2604 qh->channel = chan;
2605 qtd->in_process = 1;
2606
2607
2608
2609
2610
2611 chan->dev_addr = dwc2_hcd_get_dev_addr(&urb->pipe_info);
2612 chan->ep_num = dwc2_hcd_get_ep_num(&urb->pipe_info);
2613 chan->speed = qh->dev_speed;
2614 chan->max_packet = qh->maxp;
2615
2616 chan->xfer_started = 0;
2617 chan->halt_status = DWC2_HC_XFER_NO_HALT_STATUS;
2618 chan->error_state = (qtd->error_count > 0);
2619 chan->halt_on_queue = 0;
2620 chan->halt_pending = 0;
2621 chan->requests = 0;
2622
2623
2624
2625
2626
2627
2628
2629
2630 chan->ep_is_in = (dwc2_hcd_is_pipe_in(&urb->pipe_info) != 0);
2631 if (chan->ep_is_in)
2632 chan->do_ping = 0;
2633 else
2634 chan->do_ping = qh->ping_state;
2635
2636 chan->data_pid_start = qh->data_toggle;
2637 chan->multi_count = 1;
2638
2639 if (urb->actual_length > urb->length &&
2640 !dwc2_hcd_is_pipe_in(&urb->pipe_info))
2641 urb->actual_length = urb->length;
2642
2643 if (hsotg->params.host_dma)
2644 chan->xfer_dma = urb->dma + urb->actual_length;
2645 else
2646 chan->xfer_buf = (u8 *)urb->buf + urb->actual_length;
2647
2648 chan->xfer_len = urb->length - urb->actual_length;
2649 chan->xfer_count = 0;
2650
2651
2652 if (qh->do_split)
2653 dwc2_hc_init_split(hsotg, chan, qtd, urb);
2654 else
2655 chan->do_split = 0;
2656
2657
2658 dwc2_hc_init_xfer(hsotg, chan, qtd);
2659
2660
2661 if (hsotg->params.host_dma && qh->do_split &&
2662 chan->ep_is_in && (chan->xfer_dma & 0x3)) {
2663 dev_vdbg(hsotg->dev, "Non-aligned buffer\n");
2664 if (dwc2_alloc_split_dma_aligned_buf(hsotg, qh, chan)) {
2665 dev_err(hsotg->dev,
2666 "Failed to allocate memory to handle non-aligned buffer\n");
2667
2668 chan->align_buf = 0;
2669 chan->multi_count = 0;
2670 list_add_tail(&chan->hc_list_entry,
2671 &hsotg->free_hc_list);
2672 qtd->in_process = 0;
2673 qh->channel = NULL;
2674 return -ENOMEM;
2675 }
2676 } else {
2677
2678
2679
2680
2681 WARN_ON_ONCE(hsotg->params.host_dma &&
2682 (chan->xfer_dma & 0x3));
2683 chan->align_buf = 0;
2684 }
2685
2686 if (chan->ep_type == USB_ENDPOINT_XFER_INT ||
2687 chan->ep_type == USB_ENDPOINT_XFER_ISOC)
2688
2689
2690
2691
2692 chan->multi_count = qh->maxp_mult;
2693
2694 if (hsotg->params.dma_desc_enable) {
2695 chan->desc_list_addr = qh->desc_list_dma;
2696 chan->desc_list_sz = qh->desc_list_sz;
2697 }
2698
2699 dwc2_hc_init(hsotg, chan);
2700 chan->qh = qh;
2701
2702 return 0;
2703}
2704
2705
2706
2707
2708
2709
2710
2711
2712
2713
2714enum dwc2_transaction_type dwc2_hcd_select_transactions(
2715 struct dwc2_hsotg *hsotg)
2716{
2717 enum dwc2_transaction_type ret_val = DWC2_TRANSACTION_NONE;
2718 struct list_head *qh_ptr;
2719 struct dwc2_qh *qh;
2720 int num_channels;
2721
2722#ifdef DWC2_DEBUG_SOF
2723 dev_vdbg(hsotg->dev, " Select Transactions\n");
2724#endif
2725
2726
2727 qh_ptr = hsotg->periodic_sched_ready.next;
2728 while (qh_ptr != &hsotg->periodic_sched_ready) {
2729 if (list_empty(&hsotg->free_hc_list))
2730 break;
2731 if (hsotg->params.uframe_sched) {
2732 if (hsotg->available_host_channels <= 1)
2733 break;
2734 hsotg->available_host_channels--;
2735 }
2736 qh = list_entry(qh_ptr, struct dwc2_qh, qh_list_entry);
2737 if (dwc2_assign_and_init_hc(hsotg, qh))
2738 break;
2739
2740
2741
2742
2743
2744 qh_ptr = qh_ptr->next;
2745 list_move_tail(&qh->qh_list_entry,
2746 &hsotg->periodic_sched_assigned);
2747 ret_val = DWC2_TRANSACTION_PERIODIC;
2748 }
2749
2750
2751
2752
2753
2754
2755 num_channels = hsotg->params.host_channels;
2756 qh_ptr = hsotg->non_periodic_sched_inactive.next;
2757 while (qh_ptr != &hsotg->non_periodic_sched_inactive) {
2758 if (!hsotg->params.uframe_sched &&
2759 hsotg->non_periodic_channels >= num_channels -
2760 hsotg->periodic_channels)
2761 break;
2762 if (list_empty(&hsotg->free_hc_list))
2763 break;
2764 qh = list_entry(qh_ptr, struct dwc2_qh, qh_list_entry);
2765 if (hsotg->params.uframe_sched) {
2766 if (hsotg->available_host_channels < 1)
2767 break;
2768 hsotg->available_host_channels--;
2769 }
2770
2771 if (dwc2_assign_and_init_hc(hsotg, qh))
2772 break;
2773
2774
2775
2776
2777
2778 qh_ptr = qh_ptr->next;
2779 list_move_tail(&qh->qh_list_entry,
2780 &hsotg->non_periodic_sched_active);
2781
2782 if (ret_val == DWC2_TRANSACTION_NONE)
2783 ret_val = DWC2_TRANSACTION_NON_PERIODIC;
2784 else
2785 ret_val = DWC2_TRANSACTION_ALL;
2786
2787 if (!hsotg->params.uframe_sched)
2788 hsotg->non_periodic_channels++;
2789 }
2790
2791 return ret_val;
2792}
2793
2794
2795
2796
2797
2798
2799
2800
2801
2802
2803
2804
2805
2806
2807
2808
2809
2810
2811
2812
2813
2814
2815static int dwc2_queue_transaction(struct dwc2_hsotg *hsotg,
2816 struct dwc2_host_chan *chan,
2817 u16 fifo_dwords_avail)
2818{
2819 int retval = 0;
2820
2821 if (chan->do_split)
2822
2823 list_move_tail(&chan->split_order_list_entry,
2824 &hsotg->split_order);
2825
2826 if (hsotg->params.host_dma && chan->qh) {
2827 if (hsotg->params.dma_desc_enable) {
2828 if (!chan->xfer_started ||
2829 chan->ep_type == USB_ENDPOINT_XFER_ISOC) {
2830 dwc2_hcd_start_xfer_ddma(hsotg, chan->qh);
2831 chan->qh->ping_state = 0;
2832 }
2833 } else if (!chan->xfer_started) {
2834 dwc2_hc_start_transfer(hsotg, chan);
2835 chan->qh->ping_state = 0;
2836 }
2837 } else if (chan->halt_pending) {
2838
2839 } else if (chan->halt_on_queue) {
2840 dwc2_hc_halt(hsotg, chan, chan->halt_status);
2841 } else if (chan->do_ping) {
2842 if (!chan->xfer_started)
2843 dwc2_hc_start_transfer(hsotg, chan);
2844 } else if (!chan->ep_is_in ||
2845 chan->data_pid_start == DWC2_HC_PID_SETUP) {
2846 if ((fifo_dwords_avail * 4) >= chan->max_packet) {
2847 if (!chan->xfer_started) {
2848 dwc2_hc_start_transfer(hsotg, chan);
2849 retval = 1;
2850 } else {
2851 retval = dwc2_hc_continue_transfer(hsotg, chan);
2852 }
2853 } else {
2854 retval = -1;
2855 }
2856 } else {
2857 if (!chan->xfer_started) {
2858 dwc2_hc_start_transfer(hsotg, chan);
2859 retval = 1;
2860 } else {
2861 retval = dwc2_hc_continue_transfer(hsotg, chan);
2862 }
2863 }
2864
2865 return retval;
2866}
2867
2868
2869
2870
2871
2872
2873
2874
2875
2876
2877static void dwc2_process_periodic_channels(struct dwc2_hsotg *hsotg)
2878{
2879 struct list_head *qh_ptr;
2880 struct dwc2_qh *qh;
2881 u32 tx_status;
2882 u32 fspcavail;
2883 u32 gintmsk;
2884 int status;
2885 bool no_queue_space = false;
2886 bool no_fifo_space = false;
2887 u32 qspcavail;
2888
2889
2890 if (list_empty(&hsotg->periodic_sched_assigned))
2891 goto exit;
2892
2893 if (dbg_perio())
2894 dev_vdbg(hsotg->dev, "Queue periodic transactions\n");
2895
2896 tx_status = dwc2_readl(hsotg, HPTXSTS);
2897 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
2898 TXSTS_QSPCAVAIL_SHIFT;
2899 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
2900 TXSTS_FSPCAVAIL_SHIFT;
2901
2902 if (dbg_perio()) {
2903 dev_vdbg(hsotg->dev, " P Tx Req Queue Space Avail (before queue): %d\n",
2904 qspcavail);
2905 dev_vdbg(hsotg->dev, " P Tx FIFO Space Avail (before queue): %d\n",
2906 fspcavail);
2907 }
2908
2909 qh_ptr = hsotg->periodic_sched_assigned.next;
2910 while (qh_ptr != &hsotg->periodic_sched_assigned) {
2911 tx_status = dwc2_readl(hsotg, HPTXSTS);
2912 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
2913 TXSTS_QSPCAVAIL_SHIFT;
2914 if (qspcavail == 0) {
2915 no_queue_space = true;
2916 break;
2917 }
2918
2919 qh = list_entry(qh_ptr, struct dwc2_qh, qh_list_entry);
2920 if (!qh->channel) {
2921 qh_ptr = qh_ptr->next;
2922 continue;
2923 }
2924
2925
2926 if (qh->tt_buffer_dirty) {
2927 qh_ptr = qh_ptr->next;
2928 continue;
2929 }
2930
2931
2932
2933
2934
2935
2936 if (!hsotg->params.host_dma &&
2937 qh->channel->multi_count > 1)
2938 hsotg->queuing_high_bandwidth = 1;
2939
2940 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
2941 TXSTS_FSPCAVAIL_SHIFT;
2942 status = dwc2_queue_transaction(hsotg, qh->channel, fspcavail);
2943 if (status < 0) {
2944 no_fifo_space = true;
2945 break;
2946 }
2947
2948
2949
2950
2951
2952
2953
2954
2955 if (hsotg->params.host_dma || status == 0 ||
2956 qh->channel->requests == qh->channel->multi_count) {
2957 qh_ptr = qh_ptr->next;
2958
2959
2960
2961
2962 list_move_tail(&qh->qh_list_entry,
2963 &hsotg->periodic_sched_queued);
2964
2965
2966 hsotg->queuing_high_bandwidth = 0;
2967 }
2968 }
2969
2970exit:
2971 if (no_queue_space || no_fifo_space ||
2972 (!hsotg->params.host_dma &&
2973 !list_empty(&hsotg->periodic_sched_assigned))) {
2974
2975
2976
2977
2978
2979
2980
2981 gintmsk = dwc2_readl(hsotg, GINTMSK);
2982 if (!(gintmsk & GINTSTS_PTXFEMP)) {
2983 gintmsk |= GINTSTS_PTXFEMP;
2984 dwc2_writel(hsotg, gintmsk, GINTMSK);
2985 }
2986 } else {
2987
2988
2989
2990
2991
2992
2993
2994 gintmsk = dwc2_readl(hsotg, GINTMSK);
2995 if (gintmsk & GINTSTS_PTXFEMP) {
2996 gintmsk &= ~GINTSTS_PTXFEMP;
2997 dwc2_writel(hsotg, gintmsk, GINTMSK);
2998 }
2999 }
3000}
3001
3002
3003
3004
3005
3006
3007
3008
3009
3010
3011static void dwc2_process_non_periodic_channels(struct dwc2_hsotg *hsotg)
3012{
3013 struct list_head *orig_qh_ptr;
3014 struct dwc2_qh *qh;
3015 u32 tx_status;
3016 u32 qspcavail;
3017 u32 fspcavail;
3018 u32 gintmsk;
3019 int status;
3020 int no_queue_space = 0;
3021 int no_fifo_space = 0;
3022 int more_to_do = 0;
3023
3024 dev_vdbg(hsotg->dev, "Queue non-periodic transactions\n");
3025
3026 tx_status = dwc2_readl(hsotg, GNPTXSTS);
3027 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
3028 TXSTS_QSPCAVAIL_SHIFT;
3029 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
3030 TXSTS_FSPCAVAIL_SHIFT;
3031 dev_vdbg(hsotg->dev, " NP Tx Req Queue Space Avail (before queue): %d\n",
3032 qspcavail);
3033 dev_vdbg(hsotg->dev, " NP Tx FIFO Space Avail (before queue): %d\n",
3034 fspcavail);
3035
3036
3037
3038
3039
3040 if (hsotg->non_periodic_qh_ptr == &hsotg->non_periodic_sched_active)
3041 hsotg->non_periodic_qh_ptr = hsotg->non_periodic_qh_ptr->next;
3042 orig_qh_ptr = hsotg->non_periodic_qh_ptr;
3043
3044
3045
3046
3047
3048 do {
3049 tx_status = dwc2_readl(hsotg, GNPTXSTS);
3050 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
3051 TXSTS_QSPCAVAIL_SHIFT;
3052 if (!hsotg->params.host_dma && qspcavail == 0) {
3053 no_queue_space = 1;
3054 break;
3055 }
3056
3057 qh = list_entry(hsotg->non_periodic_qh_ptr, struct dwc2_qh,
3058 qh_list_entry);
3059 if (!qh->channel)
3060 goto next;
3061
3062
3063 if (qh->tt_buffer_dirty)
3064 goto next;
3065
3066 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
3067 TXSTS_FSPCAVAIL_SHIFT;
3068 status = dwc2_queue_transaction(hsotg, qh->channel, fspcavail);
3069
3070 if (status > 0) {
3071 more_to_do = 1;
3072 } else if (status < 0) {
3073 no_fifo_space = 1;
3074 break;
3075 }
3076next:
3077
3078 hsotg->non_periodic_qh_ptr = hsotg->non_periodic_qh_ptr->next;
3079 if (hsotg->non_periodic_qh_ptr ==
3080 &hsotg->non_periodic_sched_active)
3081 hsotg->non_periodic_qh_ptr =
3082 hsotg->non_periodic_qh_ptr->next;
3083 } while (hsotg->non_periodic_qh_ptr != orig_qh_ptr);
3084
3085 if (!hsotg->params.host_dma) {
3086 tx_status = dwc2_readl(hsotg, GNPTXSTS);
3087 qspcavail = (tx_status & TXSTS_QSPCAVAIL_MASK) >>
3088 TXSTS_QSPCAVAIL_SHIFT;
3089 fspcavail = (tx_status & TXSTS_FSPCAVAIL_MASK) >>
3090 TXSTS_FSPCAVAIL_SHIFT;
3091 dev_vdbg(hsotg->dev,
3092 " NP Tx Req Queue Space Avail (after queue): %d\n",
3093 qspcavail);
3094 dev_vdbg(hsotg->dev,
3095 " NP Tx FIFO Space Avail (after queue): %d\n",
3096 fspcavail);
3097
3098 if (more_to_do || no_queue_space || no_fifo_space) {
3099
3100
3101
3102
3103
3104
3105
3106 gintmsk = dwc2_readl(hsotg, GINTMSK);
3107 gintmsk |= GINTSTS_NPTXFEMP;
3108 dwc2_writel(hsotg, gintmsk, GINTMSK);
3109 } else {
3110
3111
3112
3113
3114
3115
3116
3117 gintmsk = dwc2_readl(hsotg, GINTMSK);
3118 gintmsk &= ~GINTSTS_NPTXFEMP;
3119 dwc2_writel(hsotg, gintmsk, GINTMSK);
3120 }
3121 }
3122}
3123
3124
3125
3126
3127
3128
3129
3130
3131
3132
3133
3134
3135void dwc2_hcd_queue_transactions(struct dwc2_hsotg *hsotg,
3136 enum dwc2_transaction_type tr_type)
3137{
3138#ifdef DWC2_DEBUG_SOF
3139 dev_vdbg(hsotg->dev, "Queue Transactions\n");
3140#endif
3141
3142 if (tr_type == DWC2_TRANSACTION_PERIODIC ||
3143 tr_type == DWC2_TRANSACTION_ALL)
3144 dwc2_process_periodic_channels(hsotg);
3145
3146
3147 if (tr_type == DWC2_TRANSACTION_NON_PERIODIC ||
3148 tr_type == DWC2_TRANSACTION_ALL) {
3149 if (!list_empty(&hsotg->non_periodic_sched_active)) {
3150 dwc2_process_non_periodic_channels(hsotg);
3151 } else {
3152
3153
3154
3155
3156 u32 gintmsk = dwc2_readl(hsotg, GINTMSK);
3157
3158 gintmsk &= ~GINTSTS_NPTXFEMP;
3159 dwc2_writel(hsotg, gintmsk, GINTMSK);
3160 }
3161 }
3162}
3163
3164static void dwc2_conn_id_status_change(struct work_struct *work)
3165{
3166 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
3167 wf_otg);
3168 u32 count = 0;
3169 u32 gotgctl;
3170 unsigned long flags;
3171
3172 dev_dbg(hsotg->dev, "%s()\n", __func__);
3173
3174 gotgctl = dwc2_readl(hsotg, GOTGCTL);
3175 dev_dbg(hsotg->dev, "gotgctl=%0x\n", gotgctl);
3176 dev_dbg(hsotg->dev, "gotgctl.b.conidsts=%d\n",
3177 !!(gotgctl & GOTGCTL_CONID_B));
3178
3179
3180 if (gotgctl & GOTGCTL_CONID_B) {
3181 dwc2_vbus_supply_exit(hsotg);
3182
3183 dev_dbg(hsotg->dev, "connId B\n");
3184 if (hsotg->bus_suspended) {
3185 dev_info(hsotg->dev,
3186 "Do port resume before switching to device mode\n");
3187 dwc2_port_resume(hsotg);
3188 }
3189 while (!dwc2_is_device_mode(hsotg)) {
3190 dev_info(hsotg->dev,
3191 "Waiting for Peripheral Mode, Mode=%s\n",
3192 dwc2_is_host_mode(hsotg) ? "Host" :
3193 "Peripheral");
3194 msleep(20);
3195
3196
3197
3198
3199
3200 gotgctl = dwc2_readl(hsotg, GOTGCTL);
3201 if (!(gotgctl & GOTGCTL_CONID_B))
3202 goto host;
3203 if (++count > 250)
3204 break;
3205 }
3206 if (count > 250)
3207 dev_err(hsotg->dev,
3208 "Connection id status change timed out\n");
3209
3210
3211
3212
3213
3214
3215 if (hsotg->in_ppd && hsotg->lx_state == DWC2_L2)
3216 dwc2_exit_partial_power_down(hsotg, 0, false);
3217
3218 hsotg->op_state = OTG_STATE_B_PERIPHERAL;
3219 dwc2_core_init(hsotg, false);
3220 dwc2_enable_global_interrupts(hsotg);
3221 spin_lock_irqsave(&hsotg->lock, flags);
3222 dwc2_hsotg_core_init_disconnected(hsotg, false);
3223 spin_unlock_irqrestore(&hsotg->lock, flags);
3224
3225 dwc2_enable_acg(hsotg);
3226 dwc2_hsotg_core_connect(hsotg);
3227 } else {
3228host:
3229
3230 dev_dbg(hsotg->dev, "connId A\n");
3231 while (!dwc2_is_host_mode(hsotg)) {
3232 dev_info(hsotg->dev, "Waiting for Host Mode, Mode=%s\n",
3233 dwc2_is_host_mode(hsotg) ?
3234 "Host" : "Peripheral");
3235 msleep(20);
3236 if (++count > 250)
3237 break;
3238 }
3239 if (count > 250)
3240 dev_err(hsotg->dev,
3241 "Connection id status change timed out\n");
3242
3243 spin_lock_irqsave(&hsotg->lock, flags);
3244 dwc2_hsotg_disconnect(hsotg);
3245 spin_unlock_irqrestore(&hsotg->lock, flags);
3246
3247 hsotg->op_state = OTG_STATE_A_HOST;
3248
3249 dwc2_core_init(hsotg, false);
3250 dwc2_enable_global_interrupts(hsotg);
3251 dwc2_hcd_start(hsotg);
3252 }
3253}
3254
3255static void dwc2_wakeup_detected(struct timer_list *t)
3256{
3257 struct dwc2_hsotg *hsotg = from_timer(hsotg, t, wkp_timer);
3258 u32 hprt0;
3259
3260 dev_dbg(hsotg->dev, "%s()\n", __func__);
3261
3262
3263
3264
3265
3266 hprt0 = dwc2_read_hprt0(hsotg);
3267 dev_dbg(hsotg->dev, "Resume: HPRT0=%0x\n", hprt0);
3268 hprt0 &= ~HPRT0_RES;
3269 dwc2_writel(hsotg, hprt0, HPRT0);
3270 dev_dbg(hsotg->dev, "Clear Resume: HPRT0=%0x\n",
3271 dwc2_readl(hsotg, HPRT0));
3272
3273 dwc2_hcd_rem_wakeup(hsotg);
3274 hsotg->bus_suspended = false;
3275
3276
3277 hsotg->lx_state = DWC2_L0;
3278}
3279
3280static int dwc2_host_is_b_hnp_enabled(struct dwc2_hsotg *hsotg)
3281{
3282 struct usb_hcd *hcd = dwc2_hsotg_to_hcd(hsotg);
3283
3284 return hcd->self.b_hnp_enable;
3285}
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295
3296
3297
3298int dwc2_port_suspend(struct dwc2_hsotg *hsotg, u16 windex)
3299{
3300 unsigned long flags;
3301 u32 pcgctl;
3302 u32 gotgctl;
3303 int ret = 0;
3304
3305 dev_dbg(hsotg->dev, "%s()\n", __func__);
3306
3307 spin_lock_irqsave(&hsotg->lock, flags);
3308
3309 if (windex == hsotg->otg_port && dwc2_host_is_b_hnp_enabled(hsotg)) {
3310 gotgctl = dwc2_readl(hsotg, GOTGCTL);
3311 gotgctl |= GOTGCTL_HSTSETHNPEN;
3312 dwc2_writel(hsotg, gotgctl, GOTGCTL);
3313 hsotg->op_state = OTG_STATE_A_SUSPEND;
3314 }
3315
3316 switch (hsotg->params.power_down) {
3317 case DWC2_POWER_DOWN_PARAM_PARTIAL:
3318 ret = dwc2_enter_partial_power_down(hsotg);
3319 if (ret)
3320 dev_err(hsotg->dev,
3321 "enter partial_power_down failed.\n");
3322 break;
3323 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
3324
3325
3326
3327
3328
3329
3330 spin_unlock_irqrestore(&hsotg->lock, flags);
3331 ret = dwc2_enter_hibernation(hsotg, 1);
3332 if (ret)
3333 dev_err(hsotg->dev, "enter hibernation failed.\n");
3334 spin_lock_irqsave(&hsotg->lock, flags);
3335 break;
3336 case DWC2_POWER_DOWN_PARAM_NONE:
3337
3338
3339
3340
3341 dwc2_host_enter_clock_gating(hsotg);
3342 break;
3343 }
3344
3345
3346 if (dwc2_host_is_b_hnp_enabled(hsotg)) {
3347 pcgctl = dwc2_readl(hsotg, PCGCTL);
3348 pcgctl &= ~PCGCTL_STOPPCLK;
3349 dwc2_writel(hsotg, pcgctl, PCGCTL);
3350
3351 spin_unlock_irqrestore(&hsotg->lock, flags);
3352
3353 msleep(200);
3354 } else {
3355 spin_unlock_irqrestore(&hsotg->lock, flags);
3356 }
3357
3358 return ret;
3359}
3360
3361
3362
3363
3364
3365
3366
3367
3368
3369
3370
3371int dwc2_port_resume(struct dwc2_hsotg *hsotg)
3372{
3373 unsigned long flags;
3374 int ret = 0;
3375
3376 spin_lock_irqsave(&hsotg->lock, flags);
3377
3378 switch (hsotg->params.power_down) {
3379 case DWC2_POWER_DOWN_PARAM_PARTIAL:
3380 ret = dwc2_exit_partial_power_down(hsotg, 0, true);
3381 if (ret)
3382 dev_err(hsotg->dev,
3383 "exit partial_power_down failed.\n");
3384 break;
3385 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
3386
3387 ret = dwc2_exit_hibernation(hsotg, 0, 0, 1);
3388 if (ret)
3389 dev_err(hsotg->dev, "exit hibernation failed.\n");
3390 break;
3391 case DWC2_POWER_DOWN_PARAM_NONE:
3392
3393
3394
3395
3396 spin_unlock_irqrestore(&hsotg->lock, flags);
3397 dwc2_host_exit_clock_gating(hsotg, 0);
3398 spin_lock_irqsave(&hsotg->lock, flags);
3399 break;
3400 }
3401
3402 spin_unlock_irqrestore(&hsotg->lock, flags);
3403
3404 return ret;
3405}
3406
3407
3408static int dwc2_hcd_hub_control(struct dwc2_hsotg *hsotg, u16 typereq,
3409 u16 wvalue, u16 windex, char *buf, u16 wlength)
3410{
3411 struct usb_hub_descriptor *hub_desc;
3412 int retval = 0;
3413 u32 hprt0;
3414 u32 port_status;
3415 u32 speed;
3416 u32 pcgctl;
3417 u32 pwr;
3418
3419 switch (typereq) {
3420 case ClearHubFeature:
3421 dev_dbg(hsotg->dev, "ClearHubFeature %1xh\n", wvalue);
3422
3423 switch (wvalue) {
3424 case C_HUB_LOCAL_POWER:
3425 case C_HUB_OVER_CURRENT:
3426
3427 break;
3428
3429 default:
3430 retval = -EINVAL;
3431 dev_err(hsotg->dev,
3432 "ClearHubFeature request %1xh unknown\n",
3433 wvalue);
3434 }
3435 break;
3436
3437 case ClearPortFeature:
3438 if (wvalue != USB_PORT_FEAT_L1)
3439 if (!windex || windex > 1)
3440 goto error;
3441 switch (wvalue) {
3442 case USB_PORT_FEAT_ENABLE:
3443 dev_dbg(hsotg->dev,
3444 "ClearPortFeature USB_PORT_FEAT_ENABLE\n");
3445 hprt0 = dwc2_read_hprt0(hsotg);
3446 hprt0 |= HPRT0_ENA;
3447 dwc2_writel(hsotg, hprt0, HPRT0);
3448 break;
3449
3450 case USB_PORT_FEAT_SUSPEND:
3451 dev_dbg(hsotg->dev,
3452 "ClearPortFeature USB_PORT_FEAT_SUSPEND\n");
3453
3454 if (hsotg->bus_suspended)
3455 retval = dwc2_port_resume(hsotg);
3456 break;
3457
3458 case USB_PORT_FEAT_POWER:
3459 dev_dbg(hsotg->dev,
3460 "ClearPortFeature USB_PORT_FEAT_POWER\n");
3461 hprt0 = dwc2_read_hprt0(hsotg);
3462 pwr = hprt0 & HPRT0_PWR;
3463 hprt0 &= ~HPRT0_PWR;
3464 dwc2_writel(hsotg, hprt0, HPRT0);
3465 if (pwr)
3466 dwc2_vbus_supply_exit(hsotg);
3467 break;
3468
3469 case USB_PORT_FEAT_INDICATOR:
3470 dev_dbg(hsotg->dev,
3471 "ClearPortFeature USB_PORT_FEAT_INDICATOR\n");
3472
3473 break;
3474
3475 case USB_PORT_FEAT_C_CONNECTION:
3476
3477
3478
3479 dev_dbg(hsotg->dev,
3480 "ClearPortFeature USB_PORT_FEAT_C_CONNECTION\n");
3481 hsotg->flags.b.port_connect_status_change = 0;
3482 break;
3483
3484 case USB_PORT_FEAT_C_RESET:
3485
3486 dev_dbg(hsotg->dev,
3487 "ClearPortFeature USB_PORT_FEAT_C_RESET\n");
3488 hsotg->flags.b.port_reset_change = 0;
3489 break;
3490
3491 case USB_PORT_FEAT_C_ENABLE:
3492
3493
3494
3495
3496 dev_dbg(hsotg->dev,
3497 "ClearPortFeature USB_PORT_FEAT_C_ENABLE\n");
3498 hsotg->flags.b.port_enable_change = 0;
3499 break;
3500
3501 case USB_PORT_FEAT_C_SUSPEND:
3502
3503
3504
3505
3506
3507 dev_dbg(hsotg->dev,
3508 "ClearPortFeature USB_PORT_FEAT_C_SUSPEND\n");
3509 hsotg->flags.b.port_suspend_change = 0;
3510 break;
3511
3512 case USB_PORT_FEAT_C_PORT_L1:
3513 dev_dbg(hsotg->dev,
3514 "ClearPortFeature USB_PORT_FEAT_C_PORT_L1\n");
3515 hsotg->flags.b.port_l1_change = 0;
3516 break;
3517
3518 case USB_PORT_FEAT_C_OVER_CURRENT:
3519 dev_dbg(hsotg->dev,
3520 "ClearPortFeature USB_PORT_FEAT_C_OVER_CURRENT\n");
3521 hsotg->flags.b.port_over_current_change = 0;
3522 break;
3523
3524 default:
3525 retval = -EINVAL;
3526 dev_err(hsotg->dev,
3527 "ClearPortFeature request %1xh unknown or unsupported\n",
3528 wvalue);
3529 }
3530 break;
3531
3532 case GetHubDescriptor:
3533 dev_dbg(hsotg->dev, "GetHubDescriptor\n");
3534 hub_desc = (struct usb_hub_descriptor *)buf;
3535 hub_desc->bDescLength = 9;
3536 hub_desc->bDescriptorType = USB_DT_HUB;
3537 hub_desc->bNbrPorts = 1;
3538 hub_desc->wHubCharacteristics =
3539 cpu_to_le16(HUB_CHAR_COMMON_LPSM |
3540 HUB_CHAR_INDV_PORT_OCPM);
3541 hub_desc->bPwrOn2PwrGood = 1;
3542 hub_desc->bHubContrCurrent = 0;
3543 hub_desc->u.hs.DeviceRemovable[0] = 0;
3544 hub_desc->u.hs.DeviceRemovable[1] = 0xff;
3545 break;
3546
3547 case GetHubStatus:
3548 dev_dbg(hsotg->dev, "GetHubStatus\n");
3549 memset(buf, 0, 4);
3550 break;
3551
3552 case GetPortStatus:
3553 dev_vdbg(hsotg->dev,
3554 "GetPortStatus wIndex=0x%04x flags=0x%08x\n", windex,
3555 hsotg->flags.d32);
3556 if (!windex || windex > 1)
3557 goto error;
3558
3559 port_status = 0;
3560 if (hsotg->flags.b.port_connect_status_change)
3561 port_status |= USB_PORT_STAT_C_CONNECTION << 16;
3562 if (hsotg->flags.b.port_enable_change)
3563 port_status |= USB_PORT_STAT_C_ENABLE << 16;
3564 if (hsotg->flags.b.port_suspend_change)
3565 port_status |= USB_PORT_STAT_C_SUSPEND << 16;
3566 if (hsotg->flags.b.port_l1_change)
3567 port_status |= USB_PORT_STAT_C_L1 << 16;
3568 if (hsotg->flags.b.port_reset_change)
3569 port_status |= USB_PORT_STAT_C_RESET << 16;
3570 if (hsotg->flags.b.port_over_current_change) {
3571 dev_warn(hsotg->dev, "Overcurrent change detected\n");
3572 port_status |= USB_PORT_STAT_C_OVERCURRENT << 16;
3573 }
3574
3575 if (!hsotg->flags.b.port_connect_status) {
3576
3577
3578
3579
3580
3581
3582
3583 *(__le32 *)buf = cpu_to_le32(port_status);
3584 break;
3585 }
3586
3587 hprt0 = dwc2_readl(hsotg, HPRT0);
3588 dev_vdbg(hsotg->dev, " HPRT0: 0x%08x\n", hprt0);
3589
3590 if (hprt0 & HPRT0_CONNSTS)
3591 port_status |= USB_PORT_STAT_CONNECTION;
3592 if (hprt0 & HPRT0_ENA)
3593 port_status |= USB_PORT_STAT_ENABLE;
3594 if (hprt0 & HPRT0_SUSP)
3595 port_status |= USB_PORT_STAT_SUSPEND;
3596 if (hprt0 & HPRT0_OVRCURRACT)
3597 port_status |= USB_PORT_STAT_OVERCURRENT;
3598 if (hprt0 & HPRT0_RST)
3599 port_status |= USB_PORT_STAT_RESET;
3600 if (hprt0 & HPRT0_PWR)
3601 port_status |= USB_PORT_STAT_POWER;
3602
3603 speed = (hprt0 & HPRT0_SPD_MASK) >> HPRT0_SPD_SHIFT;
3604 if (speed == HPRT0_SPD_HIGH_SPEED)
3605 port_status |= USB_PORT_STAT_HIGH_SPEED;
3606 else if (speed == HPRT0_SPD_LOW_SPEED)
3607 port_status |= USB_PORT_STAT_LOW_SPEED;
3608
3609 if (hprt0 & HPRT0_TSTCTL_MASK)
3610 port_status |= USB_PORT_STAT_TEST;
3611
3612
3613 if (hsotg->params.dma_desc_fs_enable) {
3614
3615
3616
3617
3618 if (hsotg->new_connection &&
3619 ((port_status &
3620 (USB_PORT_STAT_CONNECTION |
3621 USB_PORT_STAT_HIGH_SPEED |
3622 USB_PORT_STAT_LOW_SPEED)) ==
3623 USB_PORT_STAT_CONNECTION)) {
3624 u32 hcfg;
3625
3626 dev_info(hsotg->dev, "Enabling descriptor DMA mode\n");
3627 hsotg->params.dma_desc_enable = true;
3628 hcfg = dwc2_readl(hsotg, HCFG);
3629 hcfg |= HCFG_DESCDMA;
3630 dwc2_writel(hsotg, hcfg, HCFG);
3631 hsotg->new_connection = false;
3632 }
3633 }
3634
3635 dev_vdbg(hsotg->dev, "port_status=%08x\n", port_status);
3636 *(__le32 *)buf = cpu_to_le32(port_status);
3637 break;
3638
3639 case SetHubFeature:
3640 dev_dbg(hsotg->dev, "SetHubFeature\n");
3641
3642 break;
3643
3644 case SetPortFeature:
3645 dev_dbg(hsotg->dev, "SetPortFeature\n");
3646 if (wvalue != USB_PORT_FEAT_TEST && (!windex || windex > 1))
3647 goto error;
3648
3649 if (!hsotg->flags.b.port_connect_status) {
3650
3651
3652
3653
3654
3655
3656
3657 break;
3658 }
3659
3660 switch (wvalue) {
3661 case USB_PORT_FEAT_SUSPEND:
3662 dev_dbg(hsotg->dev,
3663 "SetPortFeature - USB_PORT_FEAT_SUSPEND\n");
3664 if (windex != hsotg->otg_port)
3665 goto error;
3666 if (!hsotg->bus_suspended)
3667 retval = dwc2_port_suspend(hsotg, windex);
3668 break;
3669
3670 case USB_PORT_FEAT_POWER:
3671 dev_dbg(hsotg->dev,
3672 "SetPortFeature - USB_PORT_FEAT_POWER\n");
3673 hprt0 = dwc2_read_hprt0(hsotg);
3674 pwr = hprt0 & HPRT0_PWR;
3675 hprt0 |= HPRT0_PWR;
3676 dwc2_writel(hsotg, hprt0, HPRT0);
3677 if (!pwr)
3678 dwc2_vbus_supply_init(hsotg);
3679 break;
3680
3681 case USB_PORT_FEAT_RESET:
3682 dev_dbg(hsotg->dev,
3683 "SetPortFeature - USB_PORT_FEAT_RESET\n");
3684
3685 hprt0 = dwc2_read_hprt0(hsotg);
3686
3687 if (hsotg->hibernated) {
3688 retval = dwc2_exit_hibernation(hsotg, 0, 1, 1);
3689 if (retval)
3690 dev_err(hsotg->dev,
3691 "exit hibernation failed\n");
3692 }
3693
3694 if (hsotg->in_ppd) {
3695 retval = dwc2_exit_partial_power_down(hsotg, 1,
3696 true);
3697 if (retval)
3698 dev_err(hsotg->dev,
3699 "exit partial_power_down failed\n");
3700 }
3701
3702 if (hsotg->params.power_down ==
3703 DWC2_POWER_DOWN_PARAM_NONE && hsotg->bus_suspended)
3704 dwc2_host_exit_clock_gating(hsotg, 0);
3705
3706 pcgctl = dwc2_readl(hsotg, PCGCTL);
3707 pcgctl &= ~(PCGCTL_ENBL_SLEEP_GATING | PCGCTL_STOPPCLK);
3708 dwc2_writel(hsotg, pcgctl, PCGCTL);
3709
3710 dwc2_writel(hsotg, 0, PCGCTL);
3711
3712 hprt0 = dwc2_read_hprt0(hsotg);
3713 pwr = hprt0 & HPRT0_PWR;
3714
3715 hprt0 &= ~HPRT0_SUSP;
3716
3717
3718
3719
3720
3721
3722 if (!dwc2_hcd_is_b_host(hsotg)) {
3723 hprt0 |= HPRT0_PWR | HPRT0_RST;
3724 dev_dbg(hsotg->dev,
3725 "In host mode, hprt0=%08x\n", hprt0);
3726 dwc2_writel(hsotg, hprt0, HPRT0);
3727 if (!pwr)
3728 dwc2_vbus_supply_init(hsotg);
3729 }
3730
3731
3732 msleep(50);
3733 hprt0 &= ~HPRT0_RST;
3734 dwc2_writel(hsotg, hprt0, HPRT0);
3735 hsotg->lx_state = DWC2_L0;
3736 break;
3737
3738 case USB_PORT_FEAT_INDICATOR:
3739 dev_dbg(hsotg->dev,
3740 "SetPortFeature - USB_PORT_FEAT_INDICATOR\n");
3741
3742 break;
3743
3744 case USB_PORT_FEAT_TEST:
3745 hprt0 = dwc2_read_hprt0(hsotg);
3746 dev_dbg(hsotg->dev,
3747 "SetPortFeature - USB_PORT_FEAT_TEST\n");
3748 hprt0 &= ~HPRT0_TSTCTL_MASK;
3749 hprt0 |= (windex >> 8) << HPRT0_TSTCTL_SHIFT;
3750 dwc2_writel(hsotg, hprt0, HPRT0);
3751 break;
3752
3753 default:
3754 retval = -EINVAL;
3755 dev_err(hsotg->dev,
3756 "SetPortFeature %1xh unknown or unsupported\n",
3757 wvalue);
3758 break;
3759 }
3760 break;
3761
3762 default:
3763error:
3764 retval = -EINVAL;
3765 dev_dbg(hsotg->dev,
3766 "Unknown hub control request: %1xh wIndex: %1xh wValue: %1xh\n",
3767 typereq, windex, wvalue);
3768 break;
3769 }
3770
3771 return retval;
3772}
3773
3774static int dwc2_hcd_is_status_changed(struct dwc2_hsotg *hsotg, int port)
3775{
3776 int retval;
3777
3778 if (port != 1)
3779 return -EINVAL;
3780
3781 retval = (hsotg->flags.b.port_connect_status_change ||
3782 hsotg->flags.b.port_reset_change ||
3783 hsotg->flags.b.port_enable_change ||
3784 hsotg->flags.b.port_suspend_change ||
3785 hsotg->flags.b.port_over_current_change);
3786
3787 if (retval) {
3788 dev_dbg(hsotg->dev,
3789 "DWC OTG HCD HUB STATUS DATA: Root port status changed\n");
3790 dev_dbg(hsotg->dev, " port_connect_status_change: %d\n",
3791 hsotg->flags.b.port_connect_status_change);
3792 dev_dbg(hsotg->dev, " port_reset_change: %d\n",
3793 hsotg->flags.b.port_reset_change);
3794 dev_dbg(hsotg->dev, " port_enable_change: %d\n",
3795 hsotg->flags.b.port_enable_change);
3796 dev_dbg(hsotg->dev, " port_suspend_change: %d\n",
3797 hsotg->flags.b.port_suspend_change);
3798 dev_dbg(hsotg->dev, " port_over_current_change: %d\n",
3799 hsotg->flags.b.port_over_current_change);
3800 }
3801
3802 return retval;
3803}
3804
3805int dwc2_hcd_get_frame_number(struct dwc2_hsotg *hsotg)
3806{
3807 u32 hfnum = dwc2_readl(hsotg, HFNUM);
3808
3809#ifdef DWC2_DEBUG_SOF
3810 dev_vdbg(hsotg->dev, "DWC OTG HCD GET FRAME NUMBER %d\n",
3811 (hfnum & HFNUM_FRNUM_MASK) >> HFNUM_FRNUM_SHIFT);
3812#endif
3813 return (hfnum & HFNUM_FRNUM_MASK) >> HFNUM_FRNUM_SHIFT;
3814}
3815
3816int dwc2_hcd_get_future_frame_number(struct dwc2_hsotg *hsotg, int us)
3817{
3818 u32 hprt = dwc2_readl(hsotg, HPRT0);
3819 u32 hfir = dwc2_readl(hsotg, HFIR);
3820 u32 hfnum = dwc2_readl(hsotg, HFNUM);
3821 unsigned int us_per_frame;
3822 unsigned int frame_number;
3823 unsigned int remaining;
3824 unsigned int interval;
3825 unsigned int phy_clks;
3826
3827
3828 us_per_frame = (hprt & HPRT0_SPD_MASK) ? 1000 : 125;
3829
3830
3831 frame_number = (hfnum & HFNUM_FRNUM_MASK) >> HFNUM_FRNUM_SHIFT;
3832 remaining = (hfnum & HFNUM_FRREM_MASK) >> HFNUM_FRREM_SHIFT;
3833 interval = (hfir & HFIR_FRINT_MASK) >> HFIR_FRINT_SHIFT;
3834
3835
3836
3837
3838
3839 phy_clks = (interval - remaining) +
3840 DIV_ROUND_UP(interval * us, us_per_frame);
3841
3842 return dwc2_frame_num_inc(frame_number, phy_clks / interval);
3843}
3844
3845int dwc2_hcd_is_b_host(struct dwc2_hsotg *hsotg)
3846{
3847 return hsotg->op_state == OTG_STATE_B_HOST;
3848}
3849
3850static struct dwc2_hcd_urb *dwc2_hcd_urb_alloc(struct dwc2_hsotg *hsotg,
3851 int iso_desc_count,
3852 gfp_t mem_flags)
3853{
3854 struct dwc2_hcd_urb *urb;
3855
3856 urb = kzalloc(struct_size(urb, iso_descs, iso_desc_count), mem_flags);
3857 if (urb)
3858 urb->packet_count = iso_desc_count;
3859 return urb;
3860}
3861
3862static void dwc2_hcd_urb_set_pipeinfo(struct dwc2_hsotg *hsotg,
3863 struct dwc2_hcd_urb *urb, u8 dev_addr,
3864 u8 ep_num, u8 ep_type, u8 ep_dir,
3865 u16 maxp, u16 maxp_mult)
3866{
3867 if (dbg_perio() ||
3868 ep_type == USB_ENDPOINT_XFER_BULK ||
3869 ep_type == USB_ENDPOINT_XFER_CONTROL)
3870 dev_vdbg(hsotg->dev,
3871 "addr=%d, ep_num=%d, ep_dir=%1x, ep_type=%1x, maxp=%d (%d mult)\n",
3872 dev_addr, ep_num, ep_dir, ep_type, maxp, maxp_mult);
3873 urb->pipe_info.dev_addr = dev_addr;
3874 urb->pipe_info.ep_num = ep_num;
3875 urb->pipe_info.pipe_type = ep_type;
3876 urb->pipe_info.pipe_dir = ep_dir;
3877 urb->pipe_info.maxp = maxp;
3878 urb->pipe_info.maxp_mult = maxp_mult;
3879}
3880
3881
3882
3883
3884
3885void dwc2_hcd_dump_state(struct dwc2_hsotg *hsotg)
3886{
3887#ifdef DEBUG
3888 struct dwc2_host_chan *chan;
3889 struct dwc2_hcd_urb *urb;
3890 struct dwc2_qtd *qtd;
3891 int num_channels;
3892 u32 np_tx_status;
3893 u32 p_tx_status;
3894 int i;
3895
3896 num_channels = hsotg->params.host_channels;
3897 dev_dbg(hsotg->dev, "\n");
3898 dev_dbg(hsotg->dev,
3899 "************************************************************\n");
3900 dev_dbg(hsotg->dev, "HCD State:\n");
3901 dev_dbg(hsotg->dev, " Num channels: %d\n", num_channels);
3902
3903 for (i = 0; i < num_channels; i++) {
3904 chan = hsotg->hc_ptr_array[i];
3905 dev_dbg(hsotg->dev, " Channel %d:\n", i);
3906 dev_dbg(hsotg->dev,
3907 " dev_addr: %d, ep_num: %d, ep_is_in: %d\n",
3908 chan->dev_addr, chan->ep_num, chan->ep_is_in);
3909 dev_dbg(hsotg->dev, " speed: %d\n", chan->speed);
3910 dev_dbg(hsotg->dev, " ep_type: %d\n", chan->ep_type);
3911 dev_dbg(hsotg->dev, " max_packet: %d\n", chan->max_packet);
3912 dev_dbg(hsotg->dev, " data_pid_start: %d\n",
3913 chan->data_pid_start);
3914 dev_dbg(hsotg->dev, " multi_count: %d\n", chan->multi_count);
3915 dev_dbg(hsotg->dev, " xfer_started: %d\n",
3916 chan->xfer_started);
3917 dev_dbg(hsotg->dev, " xfer_buf: %p\n", chan->xfer_buf);
3918 dev_dbg(hsotg->dev, " xfer_dma: %08lx\n",
3919 (unsigned long)chan->xfer_dma);
3920 dev_dbg(hsotg->dev, " xfer_len: %d\n", chan->xfer_len);
3921 dev_dbg(hsotg->dev, " xfer_count: %d\n", chan->xfer_count);
3922 dev_dbg(hsotg->dev, " halt_on_queue: %d\n",
3923 chan->halt_on_queue);
3924 dev_dbg(hsotg->dev, " halt_pending: %d\n",
3925 chan->halt_pending);
3926 dev_dbg(hsotg->dev, " halt_status: %d\n", chan->halt_status);
3927 dev_dbg(hsotg->dev, " do_split: %d\n", chan->do_split);
3928 dev_dbg(hsotg->dev, " complete_split: %d\n",
3929 chan->complete_split);
3930 dev_dbg(hsotg->dev, " hub_addr: %d\n", chan->hub_addr);
3931 dev_dbg(hsotg->dev, " hub_port: %d\n", chan->hub_port);
3932 dev_dbg(hsotg->dev, " xact_pos: %d\n", chan->xact_pos);
3933 dev_dbg(hsotg->dev, " requests: %d\n", chan->requests);
3934 dev_dbg(hsotg->dev, " qh: %p\n", chan->qh);
3935
3936 if (chan->xfer_started) {
3937 u32 hfnum, hcchar, hctsiz, hcint, hcintmsk;
3938
3939 hfnum = dwc2_readl(hsotg, HFNUM);
3940 hcchar = dwc2_readl(hsotg, HCCHAR(i));
3941 hctsiz = dwc2_readl(hsotg, HCTSIZ(i));
3942 hcint = dwc2_readl(hsotg, HCINT(i));
3943 hcintmsk = dwc2_readl(hsotg, HCINTMSK(i));
3944 dev_dbg(hsotg->dev, " hfnum: 0x%08x\n", hfnum);
3945 dev_dbg(hsotg->dev, " hcchar: 0x%08x\n", hcchar);
3946 dev_dbg(hsotg->dev, " hctsiz: 0x%08x\n", hctsiz);
3947 dev_dbg(hsotg->dev, " hcint: 0x%08x\n", hcint);
3948 dev_dbg(hsotg->dev, " hcintmsk: 0x%08x\n", hcintmsk);
3949 }
3950
3951 if (!(chan->xfer_started && chan->qh))
3952 continue;
3953
3954 list_for_each_entry(qtd, &chan->qh->qtd_list, qtd_list_entry) {
3955 if (!qtd->in_process)
3956 break;
3957 urb = qtd->urb;
3958 dev_dbg(hsotg->dev, " URB Info:\n");
3959 dev_dbg(hsotg->dev, " qtd: %p, urb: %p\n",
3960 qtd, urb);
3961 if (urb) {
3962 dev_dbg(hsotg->dev,
3963 " Dev: %d, EP: %d %s\n",
3964 dwc2_hcd_get_dev_addr(&urb->pipe_info),
3965 dwc2_hcd_get_ep_num(&urb->pipe_info),
3966 dwc2_hcd_is_pipe_in(&urb->pipe_info) ?
3967 "IN" : "OUT");
3968 dev_dbg(hsotg->dev,
3969 " Max packet size: %d (%d mult)\n",
3970 dwc2_hcd_get_maxp(&urb->pipe_info),
3971 dwc2_hcd_get_maxp_mult(&urb->pipe_info));
3972 dev_dbg(hsotg->dev,
3973 " transfer_buffer: %p\n",
3974 urb->buf);
3975 dev_dbg(hsotg->dev,
3976 " transfer_dma: %08lx\n",
3977 (unsigned long)urb->dma);
3978 dev_dbg(hsotg->dev,
3979 " transfer_buffer_length: %d\n",
3980 urb->length);
3981 dev_dbg(hsotg->dev, " actual_length: %d\n",
3982 urb->actual_length);
3983 }
3984 }
3985 }
3986
3987 dev_dbg(hsotg->dev, " non_periodic_channels: %d\n",
3988 hsotg->non_periodic_channels);
3989 dev_dbg(hsotg->dev, " periodic_channels: %d\n",
3990 hsotg->periodic_channels);
3991 dev_dbg(hsotg->dev, " periodic_usecs: %d\n", hsotg->periodic_usecs);
3992 np_tx_status = dwc2_readl(hsotg, GNPTXSTS);
3993 dev_dbg(hsotg->dev, " NP Tx Req Queue Space Avail: %d\n",
3994 (np_tx_status & TXSTS_QSPCAVAIL_MASK) >> TXSTS_QSPCAVAIL_SHIFT);
3995 dev_dbg(hsotg->dev, " NP Tx FIFO Space Avail: %d\n",
3996 (np_tx_status & TXSTS_FSPCAVAIL_MASK) >> TXSTS_FSPCAVAIL_SHIFT);
3997 p_tx_status = dwc2_readl(hsotg, HPTXSTS);
3998 dev_dbg(hsotg->dev, " P Tx Req Queue Space Avail: %d\n",
3999 (p_tx_status & TXSTS_QSPCAVAIL_MASK) >> TXSTS_QSPCAVAIL_SHIFT);
4000 dev_dbg(hsotg->dev, " P Tx FIFO Space Avail: %d\n",
4001 (p_tx_status & TXSTS_FSPCAVAIL_MASK) >> TXSTS_FSPCAVAIL_SHIFT);
4002 dwc2_dump_global_registers(hsotg);
4003 dwc2_dump_host_registers(hsotg);
4004 dev_dbg(hsotg->dev,
4005 "************************************************************\n");
4006 dev_dbg(hsotg->dev, "\n");
4007#endif
4008}
4009
4010struct wrapper_priv_data {
4011 struct dwc2_hsotg *hsotg;
4012};
4013
4014
4015static struct dwc2_hsotg *dwc2_hcd_to_hsotg(struct usb_hcd *hcd)
4016{
4017 struct wrapper_priv_data *p;
4018
4019 p = (struct wrapper_priv_data *)&hcd->hcd_priv;
4020 return p->hsotg;
4021}
4022
4023
4024
4025
4026
4027
4028
4029
4030
4031
4032
4033
4034
4035
4036
4037
4038
4039
4040
4041
4042
4043struct dwc2_tt *dwc2_host_get_tt_info(struct dwc2_hsotg *hsotg, void *context,
4044 gfp_t mem_flags, int *ttport)
4045{
4046 struct urb *urb = context;
4047 struct dwc2_tt *dwc_tt = NULL;
4048
4049 if (urb->dev->tt) {
4050 *ttport = urb->dev->ttport;
4051
4052 dwc_tt = urb->dev->tt->hcpriv;
4053 if (!dwc_tt) {
4054 size_t bitmap_size;
4055
4056
4057
4058
4059
4060 bitmap_size = DWC2_ELEMENTS_PER_LS_BITMAP *
4061 sizeof(dwc_tt->periodic_bitmaps[0]);
4062 if (urb->dev->tt->multi)
4063 bitmap_size *= urb->dev->tt->hub->maxchild;
4064
4065 dwc_tt = kzalloc(sizeof(*dwc_tt) + bitmap_size,
4066 mem_flags);
4067 if (!dwc_tt)
4068 return NULL;
4069
4070 dwc_tt->usb_tt = urb->dev->tt;
4071 dwc_tt->usb_tt->hcpriv = dwc_tt;
4072 }
4073
4074 dwc_tt->refcount++;
4075 }
4076
4077 return dwc_tt;
4078}
4079
4080
4081
4082
4083
4084
4085
4086
4087
4088
4089
4090
4091void dwc2_host_put_tt_info(struct dwc2_hsotg *hsotg, struct dwc2_tt *dwc_tt)
4092{
4093
4094 if (!dwc_tt)
4095 return;
4096
4097 WARN_ON(dwc_tt->refcount < 1);
4098
4099 dwc_tt->refcount--;
4100 if (!dwc_tt->refcount) {
4101 dwc_tt->usb_tt->hcpriv = NULL;
4102 kfree(dwc_tt);
4103 }
4104}
4105
4106int dwc2_host_get_speed(struct dwc2_hsotg *hsotg, void *context)
4107{
4108 struct urb *urb = context;
4109
4110 return urb->dev->speed;
4111}
4112
4113static void dwc2_allocate_bus_bandwidth(struct usb_hcd *hcd, u16 bw,
4114 struct urb *urb)
4115{
4116 struct usb_bus *bus = hcd_to_bus(hcd);
4117
4118 if (urb->interval)
4119 bus->bandwidth_allocated += bw / urb->interval;
4120 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS)
4121 bus->bandwidth_isoc_reqs++;
4122 else
4123 bus->bandwidth_int_reqs++;
4124}
4125
4126static void dwc2_free_bus_bandwidth(struct usb_hcd *hcd, u16 bw,
4127 struct urb *urb)
4128{
4129 struct usb_bus *bus = hcd_to_bus(hcd);
4130
4131 if (urb->interval)
4132 bus->bandwidth_allocated -= bw / urb->interval;
4133 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS)
4134 bus->bandwidth_isoc_reqs--;
4135 else
4136 bus->bandwidth_int_reqs--;
4137}
4138
4139
4140
4141
4142
4143
4144
4145void dwc2_host_complete(struct dwc2_hsotg *hsotg, struct dwc2_qtd *qtd,
4146 int status)
4147{
4148 struct urb *urb;
4149 int i;
4150
4151 if (!qtd) {
4152 dev_dbg(hsotg->dev, "## %s: qtd is NULL ##\n", __func__);
4153 return;
4154 }
4155
4156 if (!qtd->urb) {
4157 dev_dbg(hsotg->dev, "## %s: qtd->urb is NULL ##\n", __func__);
4158 return;
4159 }
4160
4161 urb = qtd->urb->priv;
4162 if (!urb) {
4163 dev_dbg(hsotg->dev, "## %s: urb->priv is NULL ##\n", __func__);
4164 return;
4165 }
4166
4167 urb->actual_length = dwc2_hcd_urb_get_actual_length(qtd->urb);
4168
4169 if (dbg_urb(urb))
4170 dev_vdbg(hsotg->dev,
4171 "%s: urb %p device %d ep %d-%s status %d actual %d\n",
4172 __func__, urb, usb_pipedevice(urb->pipe),
4173 usb_pipeendpoint(urb->pipe),
4174 usb_pipein(urb->pipe) ? "IN" : "OUT", status,
4175 urb->actual_length);
4176
4177 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS) {
4178 urb->error_count = dwc2_hcd_urb_get_error_count(qtd->urb);
4179 for (i = 0; i < urb->number_of_packets; ++i) {
4180 urb->iso_frame_desc[i].actual_length =
4181 dwc2_hcd_urb_get_iso_desc_actual_length(
4182 qtd->urb, i);
4183 urb->iso_frame_desc[i].status =
4184 dwc2_hcd_urb_get_iso_desc_status(qtd->urb, i);
4185 }
4186 }
4187
4188 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS && dbg_perio()) {
4189 for (i = 0; i < urb->number_of_packets; i++)
4190 dev_vdbg(hsotg->dev, " ISO Desc %d status %d\n",
4191 i, urb->iso_frame_desc[i].status);
4192 }
4193
4194 urb->status = status;
4195 if (!status) {
4196 if ((urb->transfer_flags & URB_SHORT_NOT_OK) &&
4197 urb->actual_length < urb->transfer_buffer_length)
4198 urb->status = -EREMOTEIO;
4199 }
4200
4201 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS ||
4202 usb_pipetype(urb->pipe) == PIPE_INTERRUPT) {
4203 struct usb_host_endpoint *ep = urb->ep;
4204
4205 if (ep)
4206 dwc2_free_bus_bandwidth(dwc2_hsotg_to_hcd(hsotg),
4207 dwc2_hcd_get_ep_bandwidth(hsotg, ep),
4208 urb);
4209 }
4210
4211 usb_hcd_unlink_urb_from_ep(dwc2_hsotg_to_hcd(hsotg), urb);
4212 urb->hcpriv = NULL;
4213 kfree(qtd->urb);
4214 qtd->urb = NULL;
4215
4216 usb_hcd_giveback_urb(dwc2_hsotg_to_hcd(hsotg), urb, status);
4217}
4218
4219
4220
4221
4222static void dwc2_hcd_start_func(struct work_struct *work)
4223{
4224 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
4225 start_work.work);
4226
4227 dev_dbg(hsotg->dev, "%s() %p\n", __func__, hsotg);
4228 dwc2_host_start(hsotg);
4229}
4230
4231
4232
4233
4234static void dwc2_hcd_reset_func(struct work_struct *work)
4235{
4236 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
4237 reset_work.work);
4238 unsigned long flags;
4239 u32 hprt0;
4240
4241 dev_dbg(hsotg->dev, "USB RESET function called\n");
4242
4243 spin_lock_irqsave(&hsotg->lock, flags);
4244
4245 hprt0 = dwc2_read_hprt0(hsotg);
4246 hprt0 &= ~HPRT0_RST;
4247 dwc2_writel(hsotg, hprt0, HPRT0);
4248 hsotg->flags.b.port_reset_change = 1;
4249
4250 spin_unlock_irqrestore(&hsotg->lock, flags);
4251}
4252
4253static void dwc2_hcd_phy_reset_func(struct work_struct *work)
4254{
4255 struct dwc2_hsotg *hsotg = container_of(work, struct dwc2_hsotg,
4256 phy_reset_work);
4257 int ret;
4258
4259 ret = phy_reset(hsotg->phy);
4260 if (ret)
4261 dev_warn(hsotg->dev, "PHY reset failed\n");
4262}
4263
4264
4265
4266
4267
4268
4269
4270
4271
4272
4273
4274
4275static int _dwc2_hcd_start(struct usb_hcd *hcd)
4276{
4277 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4278 struct usb_bus *bus = hcd_to_bus(hcd);
4279 unsigned long flags;
4280 u32 hprt0;
4281 int ret;
4282
4283 dev_dbg(hsotg->dev, "DWC OTG HCD START\n");
4284
4285 spin_lock_irqsave(&hsotg->lock, flags);
4286 hsotg->lx_state = DWC2_L0;
4287 hcd->state = HC_STATE_RUNNING;
4288 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4289
4290 if (dwc2_is_device_mode(hsotg)) {
4291 spin_unlock_irqrestore(&hsotg->lock, flags);
4292 return 0;
4293 }
4294
4295 dwc2_hcd_reinit(hsotg);
4296
4297 hprt0 = dwc2_read_hprt0(hsotg);
4298
4299 if (hprt0 & HPRT0_PWR) {
4300
4301 spin_unlock_irqrestore(&hsotg->lock, flags);
4302 ret = dwc2_vbus_supply_init(hsotg);
4303 if (ret)
4304 return ret;
4305 spin_lock_irqsave(&hsotg->lock, flags);
4306 }
4307
4308
4309 if (bus->root_hub) {
4310 dev_dbg(hsotg->dev, "DWC OTG HCD Has Root Hub\n");
4311
4312 usb_hcd_resume_root_hub(hcd);
4313 }
4314
4315 spin_unlock_irqrestore(&hsotg->lock, flags);
4316
4317 return 0;
4318}
4319
4320
4321
4322
4323
4324static void _dwc2_hcd_stop(struct usb_hcd *hcd)
4325{
4326 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4327 unsigned long flags;
4328 u32 hprt0;
4329
4330
4331 dwc2_disable_host_interrupts(hsotg);
4332
4333
4334 synchronize_irq(hcd->irq);
4335
4336 spin_lock_irqsave(&hsotg->lock, flags);
4337 hprt0 = dwc2_read_hprt0(hsotg);
4338
4339 dwc2_hcd_disconnect(hsotg, true);
4340 dwc2_hcd_stop(hsotg);
4341 hsotg->lx_state = DWC2_L3;
4342 hcd->state = HC_STATE_HALT;
4343 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4344 spin_unlock_irqrestore(&hsotg->lock, flags);
4345
4346
4347 if (hprt0 & HPRT0_PWR)
4348 dwc2_vbus_supply_exit(hsotg);
4349
4350 usleep_range(1000, 3000);
4351}
4352
4353static int _dwc2_hcd_suspend(struct usb_hcd *hcd)
4354{
4355 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4356 unsigned long flags;
4357 int ret = 0;
4358
4359 spin_lock_irqsave(&hsotg->lock, flags);
4360
4361 if (dwc2_is_device_mode(hsotg))
4362 goto unlock;
4363
4364 if (hsotg->lx_state != DWC2_L0)
4365 goto unlock;
4366
4367 if (!HCD_HW_ACCESSIBLE(hcd))
4368 goto unlock;
4369
4370 if (hsotg->op_state == OTG_STATE_B_PERIPHERAL)
4371 goto unlock;
4372
4373 if (hsotg->bus_suspended)
4374 goto skip_power_saving;
4375
4376 if (hsotg->flags.b.port_connect_status == 0)
4377 goto skip_power_saving;
4378
4379 switch (hsotg->params.power_down) {
4380 case DWC2_POWER_DOWN_PARAM_PARTIAL:
4381
4382 ret = dwc2_enter_partial_power_down(hsotg);
4383 if (ret)
4384 dev_err(hsotg->dev,
4385 "enter partial_power_down failed\n");
4386
4387 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4388 break;
4389 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
4390
4391 spin_unlock_irqrestore(&hsotg->lock, flags);
4392 ret = dwc2_enter_hibernation(hsotg, 1);
4393 if (ret)
4394 dev_err(hsotg->dev, "enter hibernation failed\n");
4395 spin_lock_irqsave(&hsotg->lock, flags);
4396
4397
4398 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4399 break;
4400 case DWC2_POWER_DOWN_PARAM_NONE:
4401
4402
4403
4404
4405 dwc2_host_enter_clock_gating(hsotg);
4406
4407
4408 clear_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4409 break;
4410 default:
4411 goto skip_power_saving;
4412 }
4413
4414 spin_unlock_irqrestore(&hsotg->lock, flags);
4415 dwc2_vbus_supply_exit(hsotg);
4416 spin_lock_irqsave(&hsotg->lock, flags);
4417
4418
4419 if (!IS_ERR_OR_NULL(hsotg->uphy)) {
4420 spin_unlock_irqrestore(&hsotg->lock, flags);
4421 usb_phy_set_suspend(hsotg->uphy, true);
4422 spin_lock_irqsave(&hsotg->lock, flags);
4423 }
4424
4425skip_power_saving:
4426 hsotg->lx_state = DWC2_L2;
4427unlock:
4428 spin_unlock_irqrestore(&hsotg->lock, flags);
4429
4430 return ret;
4431}
4432
4433static int _dwc2_hcd_resume(struct usb_hcd *hcd)
4434{
4435 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4436 unsigned long flags;
4437 u32 hprt0;
4438 int ret = 0;
4439
4440 spin_lock_irqsave(&hsotg->lock, flags);
4441
4442 if (dwc2_is_device_mode(hsotg))
4443 goto unlock;
4444
4445 if (hsotg->lx_state != DWC2_L2)
4446 goto unlock;
4447
4448 hprt0 = dwc2_read_hprt0(hsotg);
4449
4450
4451
4452
4453
4454
4455 if (hprt0 & HPRT0_CONNSTS) {
4456 hsotg->lx_state = DWC2_L0;
4457 goto unlock;
4458 }
4459
4460 switch (hsotg->params.power_down) {
4461 case DWC2_POWER_DOWN_PARAM_PARTIAL:
4462 ret = dwc2_exit_partial_power_down(hsotg, 0, true);
4463 if (ret)
4464 dev_err(hsotg->dev,
4465 "exit partial_power_down failed\n");
4466
4467
4468
4469
4470 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4471 break;
4472 case DWC2_POWER_DOWN_PARAM_HIBERNATION:
4473 ret = dwc2_exit_hibernation(hsotg, 0, 0, 1);
4474 if (ret)
4475 dev_err(hsotg->dev, "exit hibernation failed.\n");
4476
4477
4478
4479
4480
4481 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4482 break;
4483 case DWC2_POWER_DOWN_PARAM_NONE:
4484
4485
4486
4487
4488 spin_unlock_irqrestore(&hsotg->lock, flags);
4489 dwc2_host_exit_clock_gating(hsotg, 0);
4490
4491
4492
4493
4494
4495 dwc2_core_init(hsotg, false);
4496 dwc2_enable_global_interrupts(hsotg);
4497 dwc2_hcd_reinit(hsotg);
4498 spin_lock_irqsave(&hsotg->lock, flags);
4499
4500
4501
4502
4503
4504 set_bit(HCD_FLAG_HW_ACCESSIBLE, &hcd->flags);
4505 break;
4506 default:
4507 hsotg->lx_state = DWC2_L0;
4508 goto unlock;
4509 }
4510
4511
4512 hsotg->flags.b.port_suspend_change = 1;
4513
4514
4515
4516
4517
4518
4519 if (!IS_ERR_OR_NULL(hsotg->uphy)) {
4520 spin_unlock_irqrestore(&hsotg->lock, flags);
4521 usb_phy_set_suspend(hsotg->uphy, false);
4522 spin_lock_irqsave(&hsotg->lock, flags);
4523 }
4524
4525
4526 spin_unlock_irqrestore(&hsotg->lock, flags);
4527 dwc2_vbus_supply_init(hsotg);
4528
4529
4530 usleep_range(3000, 5000);
4531 spin_lock_irqsave(&hsotg->lock, flags);
4532
4533
4534
4535
4536
4537 dwc2_writel(hsotg, HPRT0_PWR | HPRT0_CONNDET |
4538 HPRT0_ENACHG, HPRT0);
4539
4540
4541 spin_unlock_irqrestore(&hsotg->lock, flags);
4542 usleep_range(5000, 7000);
4543 spin_lock_irqsave(&hsotg->lock, flags);
4544unlock:
4545 spin_unlock_irqrestore(&hsotg->lock, flags);
4546
4547 return ret;
4548}
4549
4550
4551static int _dwc2_hcd_get_frame_number(struct usb_hcd *hcd)
4552{
4553 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4554
4555 return dwc2_hcd_get_frame_number(hsotg);
4556}
4557
4558static void dwc2_dump_urb_info(struct usb_hcd *hcd, struct urb *urb,
4559 char *fn_name)
4560{
4561#ifdef VERBOSE_DEBUG
4562 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4563 char *pipetype = NULL;
4564 char *speed = NULL;
4565
4566 dev_vdbg(hsotg->dev, "%s, urb %p\n", fn_name, urb);
4567 dev_vdbg(hsotg->dev, " Device address: %d\n",
4568 usb_pipedevice(urb->pipe));
4569 dev_vdbg(hsotg->dev, " Endpoint: %d, %s\n",
4570 usb_pipeendpoint(urb->pipe),
4571 usb_pipein(urb->pipe) ? "IN" : "OUT");
4572
4573 switch (usb_pipetype(urb->pipe)) {
4574 case PIPE_CONTROL:
4575 pipetype = "CONTROL";
4576 break;
4577 case PIPE_BULK:
4578 pipetype = "BULK";
4579 break;
4580 case PIPE_INTERRUPT:
4581 pipetype = "INTERRUPT";
4582 break;
4583 case PIPE_ISOCHRONOUS:
4584 pipetype = "ISOCHRONOUS";
4585 break;
4586 }
4587
4588 dev_vdbg(hsotg->dev, " Endpoint type: %s %s (%s)\n", pipetype,
4589 usb_urb_dir_in(urb) ? "IN" : "OUT", usb_pipein(urb->pipe) ?
4590 "IN" : "OUT");
4591
4592 switch (urb->dev->speed) {
4593 case USB_SPEED_HIGH:
4594 speed = "HIGH";
4595 break;
4596 case USB_SPEED_FULL:
4597 speed = "FULL";
4598 break;
4599 case USB_SPEED_LOW:
4600 speed = "LOW";
4601 break;
4602 default:
4603 speed = "UNKNOWN";
4604 break;
4605 }
4606
4607 dev_vdbg(hsotg->dev, " Speed: %s\n", speed);
4608 dev_vdbg(hsotg->dev, " Max packet size: %d (%d mult)\n",
4609 usb_endpoint_maxp(&urb->ep->desc),
4610 usb_endpoint_maxp_mult(&urb->ep->desc));
4611
4612 dev_vdbg(hsotg->dev, " Data buffer length: %d\n",
4613 urb->transfer_buffer_length);
4614 dev_vdbg(hsotg->dev, " Transfer buffer: %p, Transfer DMA: %08lx\n",
4615 urb->transfer_buffer, (unsigned long)urb->transfer_dma);
4616 dev_vdbg(hsotg->dev, " Setup buffer: %p, Setup DMA: %08lx\n",
4617 urb->setup_packet, (unsigned long)urb->setup_dma);
4618 dev_vdbg(hsotg->dev, " Interval: %d\n", urb->interval);
4619
4620 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS) {
4621 int i;
4622
4623 for (i = 0; i < urb->number_of_packets; i++) {
4624 dev_vdbg(hsotg->dev, " ISO Desc %d:\n", i);
4625 dev_vdbg(hsotg->dev, " offset: %d, length %d\n",
4626 urb->iso_frame_desc[i].offset,
4627 urb->iso_frame_desc[i].length);
4628 }
4629 }
4630#endif
4631}
4632
4633
4634
4635
4636
4637
4638static int _dwc2_hcd_urb_enqueue(struct usb_hcd *hcd, struct urb *urb,
4639 gfp_t mem_flags)
4640{
4641 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4642 struct usb_host_endpoint *ep = urb->ep;
4643 struct dwc2_hcd_urb *dwc2_urb;
4644 int i;
4645 int retval;
4646 int alloc_bandwidth = 0;
4647 u8 ep_type = 0;
4648 u32 tflags = 0;
4649 void *buf;
4650 unsigned long flags;
4651 struct dwc2_qh *qh;
4652 bool qh_allocated = false;
4653 struct dwc2_qtd *qtd;
4654 struct dwc2_gregs_backup *gr;
4655
4656 gr = &hsotg->gr_backup;
4657
4658 if (dbg_urb(urb)) {
4659 dev_vdbg(hsotg->dev, "DWC OTG HCD URB Enqueue\n");
4660 dwc2_dump_urb_info(hcd, urb, "urb_enqueue");
4661 }
4662
4663 if (hsotg->hibernated) {
4664 if (gr->gotgctl & GOTGCTL_CURMODE_HOST)
4665 retval = dwc2_exit_hibernation(hsotg, 0, 0, 1);
4666 else
4667 retval = dwc2_exit_hibernation(hsotg, 0, 0, 0);
4668
4669 if (retval)
4670 dev_err(hsotg->dev,
4671 "exit hibernation failed.\n");
4672 }
4673
4674 if (hsotg->in_ppd) {
4675 retval = dwc2_exit_partial_power_down(hsotg, 0, true);
4676 if (retval)
4677 dev_err(hsotg->dev,
4678 "exit partial_power_down failed\n");
4679 }
4680
4681 if (hsotg->params.power_down == DWC2_POWER_DOWN_PARAM_NONE &&
4682 hsotg->bus_suspended) {
4683 if (dwc2_is_device_mode(hsotg))
4684 dwc2_gadget_exit_clock_gating(hsotg, 0);
4685 else
4686 dwc2_host_exit_clock_gating(hsotg, 0);
4687 }
4688
4689 if (!ep)
4690 return -EINVAL;
4691
4692 if (usb_pipetype(urb->pipe) == PIPE_ISOCHRONOUS ||
4693 usb_pipetype(urb->pipe) == PIPE_INTERRUPT) {
4694 spin_lock_irqsave(&hsotg->lock, flags);
4695 if (!dwc2_hcd_is_bandwidth_allocated(hsotg, ep))
4696 alloc_bandwidth = 1;
4697 spin_unlock_irqrestore(&hsotg->lock, flags);
4698 }
4699
4700 switch (usb_pipetype(urb->pipe)) {
4701 case PIPE_CONTROL:
4702 ep_type = USB_ENDPOINT_XFER_CONTROL;
4703 break;
4704 case PIPE_ISOCHRONOUS:
4705 ep_type = USB_ENDPOINT_XFER_ISOC;
4706 break;
4707 case PIPE_BULK:
4708 ep_type = USB_ENDPOINT_XFER_BULK;
4709 break;
4710 case PIPE_INTERRUPT:
4711 ep_type = USB_ENDPOINT_XFER_INT;
4712 break;
4713 }
4714
4715 dwc2_urb = dwc2_hcd_urb_alloc(hsotg, urb->number_of_packets,
4716 mem_flags);
4717 if (!dwc2_urb)
4718 return -ENOMEM;
4719
4720 dwc2_hcd_urb_set_pipeinfo(hsotg, dwc2_urb, usb_pipedevice(urb->pipe),
4721 usb_pipeendpoint(urb->pipe), ep_type,
4722 usb_pipein(urb->pipe),
4723 usb_endpoint_maxp(&ep->desc),
4724 usb_endpoint_maxp_mult(&ep->desc));
4725
4726 buf = urb->transfer_buffer;
4727
4728 if (hcd_uses_dma(hcd)) {
4729 if (!buf && (urb->transfer_dma & 3)) {
4730 dev_err(hsotg->dev,
4731 "%s: unaligned transfer with no transfer_buffer",
4732 __func__);
4733 retval = -EINVAL;
4734 goto fail0;
4735 }
4736 }
4737
4738 if (!(urb->transfer_flags & URB_NO_INTERRUPT))
4739 tflags |= URB_GIVEBACK_ASAP;
4740 if (urb->transfer_flags & URB_ZERO_PACKET)
4741 tflags |= URB_SEND_ZERO_PACKET;
4742
4743 dwc2_urb->priv = urb;
4744 dwc2_urb->buf = buf;
4745 dwc2_urb->dma = urb->transfer_dma;
4746 dwc2_urb->length = urb->transfer_buffer_length;
4747 dwc2_urb->setup_packet = urb->setup_packet;
4748 dwc2_urb->setup_dma = urb->setup_dma;
4749 dwc2_urb->flags = tflags;
4750 dwc2_urb->interval = urb->interval;
4751 dwc2_urb->status = -EINPROGRESS;
4752
4753 for (i = 0; i < urb->number_of_packets; ++i)
4754 dwc2_hcd_urb_set_iso_desc_params(dwc2_urb, i,
4755 urb->iso_frame_desc[i].offset,
4756 urb->iso_frame_desc[i].length);
4757
4758 urb->hcpriv = dwc2_urb;
4759 qh = (struct dwc2_qh *)ep->hcpriv;
4760
4761 if (!qh) {
4762 qh = dwc2_hcd_qh_create(hsotg, dwc2_urb, mem_flags);
4763 if (!qh) {
4764 retval = -ENOMEM;
4765 goto fail0;
4766 }
4767 ep->hcpriv = qh;
4768 qh_allocated = true;
4769 }
4770
4771 qtd = kzalloc(sizeof(*qtd), mem_flags);
4772 if (!qtd) {
4773 retval = -ENOMEM;
4774 goto fail1;
4775 }
4776
4777 spin_lock_irqsave(&hsotg->lock, flags);
4778 retval = usb_hcd_link_urb_to_ep(hcd, urb);
4779 if (retval)
4780 goto fail2;
4781
4782 retval = dwc2_hcd_urb_enqueue(hsotg, dwc2_urb, qh, qtd);
4783 if (retval)
4784 goto fail3;
4785
4786 if (alloc_bandwidth) {
4787 dwc2_allocate_bus_bandwidth(hcd,
4788 dwc2_hcd_get_ep_bandwidth(hsotg, ep),
4789 urb);
4790 }
4791
4792 spin_unlock_irqrestore(&hsotg->lock, flags);
4793
4794 return 0;
4795
4796fail3:
4797 dwc2_urb->priv = NULL;
4798 usb_hcd_unlink_urb_from_ep(hcd, urb);
4799 if (qh_allocated && qh->channel && qh->channel->qh == qh)
4800 qh->channel->qh = NULL;
4801fail2:
4802 spin_unlock_irqrestore(&hsotg->lock, flags);
4803 urb->hcpriv = NULL;
4804 kfree(qtd);
4805fail1:
4806 if (qh_allocated) {
4807 struct dwc2_qtd *qtd2, *qtd2_tmp;
4808
4809 ep->hcpriv = NULL;
4810 dwc2_hcd_qh_unlink(hsotg, qh);
4811
4812 list_for_each_entry_safe(qtd2, qtd2_tmp, &qh->qtd_list,
4813 qtd_list_entry)
4814 dwc2_hcd_qtd_unlink_and_free(hsotg, qtd2, qh);
4815 dwc2_hcd_qh_free(hsotg, qh);
4816 }
4817fail0:
4818 kfree(dwc2_urb);
4819
4820 return retval;
4821}
4822
4823
4824
4825
4826static int _dwc2_hcd_urb_dequeue(struct usb_hcd *hcd, struct urb *urb,
4827 int status)
4828{
4829 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4830 int rc;
4831 unsigned long flags;
4832
4833 dev_dbg(hsotg->dev, "DWC OTG HCD URB Dequeue\n");
4834 dwc2_dump_urb_info(hcd, urb, "urb_dequeue");
4835
4836 spin_lock_irqsave(&hsotg->lock, flags);
4837
4838 rc = usb_hcd_check_unlink_urb(hcd, urb, status);
4839 if (rc)
4840 goto out;
4841
4842 if (!urb->hcpriv) {
4843 dev_dbg(hsotg->dev, "## urb->hcpriv is NULL ##\n");
4844 goto out;
4845 }
4846
4847 rc = dwc2_hcd_urb_dequeue(hsotg, urb->hcpriv);
4848
4849 usb_hcd_unlink_urb_from_ep(hcd, urb);
4850
4851 kfree(urb->hcpriv);
4852 urb->hcpriv = NULL;
4853
4854
4855 spin_unlock(&hsotg->lock);
4856 usb_hcd_giveback_urb(hcd, urb, status);
4857 spin_lock(&hsotg->lock);
4858
4859 dev_dbg(hsotg->dev, "Called usb_hcd_giveback_urb()\n");
4860 dev_dbg(hsotg->dev, " urb->status = %d\n", urb->status);
4861out:
4862 spin_unlock_irqrestore(&hsotg->lock, flags);
4863
4864 return rc;
4865}
4866
4867
4868
4869
4870
4871
4872static void _dwc2_hcd_endpoint_disable(struct usb_hcd *hcd,
4873 struct usb_host_endpoint *ep)
4874{
4875 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4876
4877 dev_dbg(hsotg->dev,
4878 "DWC OTG HCD EP DISABLE: bEndpointAddress=0x%02x, ep->hcpriv=%p\n",
4879 ep->desc.bEndpointAddress, ep->hcpriv);
4880 dwc2_hcd_endpoint_disable(hsotg, ep, 250);
4881}
4882
4883
4884
4885
4886
4887
4888static void _dwc2_hcd_endpoint_reset(struct usb_hcd *hcd,
4889 struct usb_host_endpoint *ep)
4890{
4891 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4892 unsigned long flags;
4893
4894 dev_dbg(hsotg->dev,
4895 "DWC OTG HCD EP RESET: bEndpointAddress=0x%02x\n",
4896 ep->desc.bEndpointAddress);
4897
4898 spin_lock_irqsave(&hsotg->lock, flags);
4899 dwc2_hcd_endpoint_reset(hsotg, ep);
4900 spin_unlock_irqrestore(&hsotg->lock, flags);
4901}
4902
4903
4904
4905
4906
4907
4908
4909
4910static irqreturn_t _dwc2_hcd_irq(struct usb_hcd *hcd)
4911{
4912 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4913
4914 return dwc2_handle_hcd_intr(hsotg);
4915}
4916
4917
4918
4919
4920
4921
4922
4923static int _dwc2_hcd_hub_status_data(struct usb_hcd *hcd, char *buf)
4924{
4925 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4926
4927 buf[0] = dwc2_hcd_is_status_changed(hsotg, 1) << 1;
4928 return buf[0] != 0;
4929}
4930
4931
4932static int _dwc2_hcd_hub_control(struct usb_hcd *hcd, u16 typereq, u16 wvalue,
4933 u16 windex, char *buf, u16 wlength)
4934{
4935 int retval = dwc2_hcd_hub_control(dwc2_hcd_to_hsotg(hcd), typereq,
4936 wvalue, windex, buf, wlength);
4937 return retval;
4938}
4939
4940
4941static void _dwc2_hcd_clear_tt_buffer_complete(struct usb_hcd *hcd,
4942 struct usb_host_endpoint *ep)
4943{
4944 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4945 struct dwc2_qh *qh;
4946 unsigned long flags;
4947
4948 qh = ep->hcpriv;
4949 if (!qh)
4950 return;
4951
4952 spin_lock_irqsave(&hsotg->lock, flags);
4953 qh->tt_buffer_dirty = 0;
4954
4955 if (hsotg->flags.b.port_connect_status)
4956 dwc2_hcd_queue_transactions(hsotg, DWC2_TRANSACTION_ALL);
4957
4958 spin_unlock_irqrestore(&hsotg->lock, flags);
4959}
4960
4961
4962
4963
4964
4965static void dwc2_change_bus_speed(struct usb_hcd *hcd, int speed)
4966{
4967 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4968
4969 if (hsotg->params.speed == speed)
4970 return;
4971
4972 hsotg->params.speed = speed;
4973 queue_work(hsotg->wq_otg, &hsotg->wf_otg);
4974}
4975
4976static void dwc2_free_dev(struct usb_hcd *hcd, struct usb_device *udev)
4977{
4978 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4979
4980 if (!hsotg->params.change_speed_quirk)
4981 return;
4982
4983
4984
4985
4986 if (udev->parent && udev->parent->speed > USB_SPEED_UNKNOWN &&
4987 udev->parent->speed < USB_SPEED_HIGH) {
4988 dev_info(hsotg->dev, "Set speed to default high-speed\n");
4989 dwc2_change_bus_speed(hcd, HPRT0_SPD_HIGH_SPEED);
4990 }
4991}
4992
4993static int dwc2_reset_device(struct usb_hcd *hcd, struct usb_device *udev)
4994{
4995 struct dwc2_hsotg *hsotg = dwc2_hcd_to_hsotg(hcd);
4996
4997 if (!hsotg->params.change_speed_quirk)
4998 return 0;
4999
5000 if (udev->speed == USB_SPEED_HIGH) {
5001 dev_info(hsotg->dev, "Set speed to high-speed\n");
5002 dwc2_change_bus_speed(hcd, HPRT0_SPD_HIGH_SPEED);
5003 } else if ((udev->speed == USB_SPEED_FULL ||
5004 udev->speed == USB_SPEED_LOW)) {
5005
5006
5007
5008
5009 dev_info(hsotg->dev, "Set speed to full-speed\n");
5010 dwc2_change_bus_speed(hcd, HPRT0_SPD_FULL_SPEED);
5011 }
5012
5013 return 0;
5014}
5015
5016static struct hc_driver dwc2_hc_driver = {
5017 .description = "dwc2_hsotg",
5018 .product_desc = "DWC OTG Controller",
5019 .hcd_priv_size = sizeof(struct wrapper_priv_data),
5020
5021 .irq = _dwc2_hcd_irq,
5022 .flags = HCD_MEMORY | HCD_USB2 | HCD_BH,
5023
5024 .start = _dwc2_hcd_start,
5025 .stop = _dwc2_hcd_stop,
5026 .urb_enqueue = _dwc2_hcd_urb_enqueue,
5027 .urb_dequeue = _dwc2_hcd_urb_dequeue,
5028 .endpoint_disable = _dwc2_hcd_endpoint_disable,
5029 .endpoint_reset = _dwc2_hcd_endpoint_reset,
5030 .get_frame_number = _dwc2_hcd_get_frame_number,
5031
5032 .hub_status_data = _dwc2_hcd_hub_status_data,
5033 .hub_control = _dwc2_hcd_hub_control,
5034 .clear_tt_buffer_complete = _dwc2_hcd_clear_tt_buffer_complete,
5035
5036 .bus_suspend = _dwc2_hcd_suspend,
5037 .bus_resume = _dwc2_hcd_resume,
5038
5039 .map_urb_for_dma = dwc2_map_urb_for_dma,
5040 .unmap_urb_for_dma = dwc2_unmap_urb_for_dma,
5041};
5042
5043
5044
5045
5046
5047static void dwc2_hcd_free(struct dwc2_hsotg *hsotg)
5048{
5049 u32 ahbcfg;
5050 u32 dctl;
5051 int i;
5052
5053 dev_dbg(hsotg->dev, "DWC OTG HCD FREE\n");
5054
5055
5056 dwc2_qh_list_free(hsotg, &hsotg->non_periodic_sched_inactive);
5057 dwc2_qh_list_free(hsotg, &hsotg->non_periodic_sched_waiting);
5058 dwc2_qh_list_free(hsotg, &hsotg->non_periodic_sched_active);
5059 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_inactive);
5060 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_ready);
5061 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_assigned);
5062 dwc2_qh_list_free(hsotg, &hsotg->periodic_sched_queued);
5063
5064
5065 for (i = 0; i < MAX_EPS_CHANNELS; i++) {
5066 struct dwc2_host_chan *chan = hsotg->hc_ptr_array[i];
5067
5068 if (chan) {
5069 dev_dbg(hsotg->dev, "HCD Free channel #%i, chan=%p\n",
5070 i, chan);
5071 hsotg->hc_ptr_array[i] = NULL;
5072 kfree(chan);
5073 }
5074 }
5075
5076 if (hsotg->params.host_dma) {
5077 if (hsotg->status_buf) {
5078 dma_free_coherent(hsotg->dev, DWC2_HCD_STATUS_BUF_SIZE,
5079 hsotg->status_buf,
5080 hsotg->status_buf_dma);
5081 hsotg->status_buf = NULL;
5082 }
5083 } else {
5084 kfree(hsotg->status_buf);
5085 hsotg->status_buf = NULL;
5086 }
5087
5088 ahbcfg = dwc2_readl(hsotg, GAHBCFG);
5089
5090
5091 ahbcfg &= ~GAHBCFG_GLBL_INTR_EN;
5092 dwc2_writel(hsotg, ahbcfg, GAHBCFG);
5093 dwc2_writel(hsotg, 0, GINTMSK);
5094
5095 if (hsotg->hw_params.snpsid >= DWC2_CORE_REV_3_00a) {
5096 dctl = dwc2_readl(hsotg, DCTL);
5097 dctl |= DCTL_SFTDISCON;
5098 dwc2_writel(hsotg, dctl, DCTL);
5099 }
5100
5101 if (hsotg->wq_otg) {
5102 if (!cancel_work_sync(&hsotg->wf_otg))
5103 flush_workqueue(hsotg->wq_otg);
5104 destroy_workqueue(hsotg->wq_otg);
5105 }
5106
5107 cancel_work_sync(&hsotg->phy_reset_work);
5108
5109 del_timer(&hsotg->wkp_timer);
5110}
5111
5112static void dwc2_hcd_release(struct dwc2_hsotg *hsotg)
5113{
5114
5115 dwc2_disable_host_interrupts(hsotg);
5116
5117 dwc2_hcd_free(hsotg);
5118}
5119
5120
5121
5122
5123
5124
5125
5126int dwc2_hcd_init(struct dwc2_hsotg *hsotg)
5127{
5128 struct platform_device *pdev = to_platform_device(hsotg->dev);
5129 struct resource *res;
5130 struct usb_hcd *hcd;
5131 struct dwc2_host_chan *channel;
5132 u32 hcfg;
5133 int i, num_channels;
5134 int retval;
5135
5136 if (usb_disabled())
5137 return -ENODEV;
5138
5139 dev_dbg(hsotg->dev, "DWC OTG HCD INIT\n");
5140
5141 retval = -ENOMEM;
5142
5143 hcfg = dwc2_readl(hsotg, HCFG);
5144 dev_dbg(hsotg->dev, "hcfg=%08x\n", hcfg);
5145
5146#ifdef CONFIG_USB_DWC2_TRACK_MISSED_SOFS
5147 hsotg->frame_num_array = kcalloc(FRAME_NUM_ARRAY_SIZE,
5148 sizeof(*hsotg->frame_num_array),
5149 GFP_KERNEL);
5150 if (!hsotg->frame_num_array)
5151 goto error1;
5152 hsotg->last_frame_num_array =
5153 kcalloc(FRAME_NUM_ARRAY_SIZE,
5154 sizeof(*hsotg->last_frame_num_array), GFP_KERNEL);
5155 if (!hsotg->last_frame_num_array)
5156 goto error1;
5157#endif
5158 hsotg->last_frame_num = HFNUM_MAX_FRNUM;
5159
5160
5161 if (hsotg->params.host_dma &&
5162 !hsotg->dev->dma_mask) {
5163 dev_warn(hsotg->dev,
5164 "dma_mask not set, disabling DMA\n");
5165 hsotg->params.host_dma = false;
5166 hsotg->params.dma_desc_enable = false;
5167 }
5168
5169
5170 if (hsotg->params.host_dma) {
5171 if (dma_set_mask(hsotg->dev, DMA_BIT_MASK(32)) < 0)
5172 dev_warn(hsotg->dev, "can't set DMA mask\n");
5173 if (dma_set_coherent_mask(hsotg->dev, DMA_BIT_MASK(32)) < 0)
5174 dev_warn(hsotg->dev, "can't set coherent DMA mask\n");
5175 }
5176
5177 if (hsotg->params.change_speed_quirk) {
5178 dwc2_hc_driver.free_dev = dwc2_free_dev;
5179 dwc2_hc_driver.reset_device = dwc2_reset_device;
5180 }
5181
5182 if (hsotg->params.host_dma)
5183 dwc2_hc_driver.flags |= HCD_DMA;
5184
5185 hcd = usb_create_hcd(&dwc2_hc_driver, hsotg->dev, dev_name(hsotg->dev));
5186 if (!hcd)
5187 goto error1;
5188
5189 hcd->has_tt = 1;
5190
5191 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
5192 hcd->rsrc_start = res->start;
5193 hcd->rsrc_len = resource_size(res);
5194
5195 ((struct wrapper_priv_data *)&hcd->hcd_priv)->hsotg = hsotg;
5196 hsotg->priv = hcd;
5197
5198
5199
5200
5201
5202 dwc2_disable_global_interrupts(hsotg);
5203
5204
5205 retval = dwc2_core_init(hsotg, true);
5206 if (retval)
5207 goto error2;
5208
5209
5210 retval = -ENOMEM;
5211 hsotg->wq_otg = alloc_ordered_workqueue("dwc2", 0);
5212 if (!hsotg->wq_otg) {
5213 dev_err(hsotg->dev, "Failed to create workqueue\n");
5214 goto error2;
5215 }
5216 INIT_WORK(&hsotg->wf_otg, dwc2_conn_id_status_change);
5217
5218 timer_setup(&hsotg->wkp_timer, dwc2_wakeup_detected, 0);
5219
5220
5221 INIT_LIST_HEAD(&hsotg->non_periodic_sched_inactive);
5222 INIT_LIST_HEAD(&hsotg->non_periodic_sched_waiting);
5223 INIT_LIST_HEAD(&hsotg->non_periodic_sched_active);
5224
5225
5226 INIT_LIST_HEAD(&hsotg->periodic_sched_inactive);
5227 INIT_LIST_HEAD(&hsotg->periodic_sched_ready);
5228 INIT_LIST_HEAD(&hsotg->periodic_sched_assigned);
5229 INIT_LIST_HEAD(&hsotg->periodic_sched_queued);
5230
5231 INIT_LIST_HEAD(&hsotg->split_order);
5232
5233
5234
5235
5236
5237 INIT_LIST_HEAD(&hsotg->free_hc_list);
5238 num_channels = hsotg->params.host_channels;
5239 memset(&hsotg->hc_ptr_array[0], 0, sizeof(hsotg->hc_ptr_array));
5240
5241 for (i = 0; i < num_channels; i++) {
5242 channel = kzalloc(sizeof(*channel), GFP_KERNEL);
5243 if (!channel)
5244 goto error3;
5245 channel->hc_num = i;
5246 INIT_LIST_HEAD(&channel->split_order_list_entry);
5247 hsotg->hc_ptr_array[i] = channel;
5248 }
5249
5250
5251 INIT_DELAYED_WORK(&hsotg->start_work, dwc2_hcd_start_func);
5252 INIT_DELAYED_WORK(&hsotg->reset_work, dwc2_hcd_reset_func);
5253 INIT_WORK(&hsotg->phy_reset_work, dwc2_hcd_phy_reset_func);
5254
5255
5256
5257
5258
5259
5260
5261 if (hsotg->params.host_dma)
5262 hsotg->status_buf = dma_alloc_coherent(hsotg->dev,
5263 DWC2_HCD_STATUS_BUF_SIZE,
5264 &hsotg->status_buf_dma, GFP_KERNEL);
5265 else
5266 hsotg->status_buf = kzalloc(DWC2_HCD_STATUS_BUF_SIZE,
5267 GFP_KERNEL);
5268
5269 if (!hsotg->status_buf)
5270 goto error3;
5271
5272
5273
5274
5275
5276
5277 if (hsotg->params.dma_desc_enable ||
5278 hsotg->params.dma_desc_fs_enable) {
5279 hsotg->desc_gen_cache = kmem_cache_create("dwc2-gen-desc",
5280 sizeof(struct dwc2_dma_desc) *
5281 MAX_DMA_DESC_NUM_GENERIC, 512, SLAB_CACHE_DMA,
5282 NULL);
5283 if (!hsotg->desc_gen_cache) {
5284 dev_err(hsotg->dev,
5285 "unable to create dwc2 generic desc cache\n");
5286
5287
5288
5289
5290
5291 hsotg->params.dma_desc_enable = false;
5292 hsotg->params.dma_desc_fs_enable = false;
5293 }
5294
5295 hsotg->desc_hsisoc_cache = kmem_cache_create("dwc2-hsisoc-desc",
5296 sizeof(struct dwc2_dma_desc) *
5297 MAX_DMA_DESC_NUM_HS_ISOC, 512, 0, NULL);
5298 if (!hsotg->desc_hsisoc_cache) {
5299 dev_err(hsotg->dev,
5300 "unable to create dwc2 hs isoc desc cache\n");
5301
5302 kmem_cache_destroy(hsotg->desc_gen_cache);
5303
5304
5305
5306
5307
5308 hsotg->params.dma_desc_enable = false;
5309 hsotg->params.dma_desc_fs_enable = false;
5310 }
5311 }
5312
5313 if (hsotg->params.host_dma) {
5314
5315
5316
5317
5318 hsotg->unaligned_cache = kmem_cache_create("dwc2-unaligned-dma",
5319 DWC2_KMEM_UNALIGNED_BUF_SIZE, 4,
5320 SLAB_CACHE_DMA, NULL);
5321 if (!hsotg->unaligned_cache)
5322 dev_err(hsotg->dev,
5323 "unable to create dwc2 unaligned cache\n");
5324 }
5325
5326 hsotg->otg_port = 1;
5327 hsotg->frame_list = NULL;
5328 hsotg->frame_list_dma = 0;
5329 hsotg->periodic_qh_count = 0;
5330
5331
5332 hsotg->lx_state = DWC2_L3;
5333
5334 hcd->self.otg_port = hsotg->otg_port;
5335
5336
5337 hcd->self.sg_tablesize = 0;
5338
5339 if (!IS_ERR_OR_NULL(hsotg->uphy))
5340 otg_set_host(hsotg->uphy->otg, &hcd->self);
5341
5342
5343
5344
5345
5346
5347 retval = usb_add_hcd(hcd, hsotg->irq, IRQF_SHARED);
5348 if (retval < 0)
5349 goto error4;
5350
5351 device_wakeup_enable(hcd->self.controller);
5352
5353 dwc2_hcd_dump_state(hsotg);
5354
5355 dwc2_enable_global_interrupts(hsotg);
5356
5357 return 0;
5358
5359error4:
5360 kmem_cache_destroy(hsotg->unaligned_cache);
5361 kmem_cache_destroy(hsotg->desc_hsisoc_cache);
5362 kmem_cache_destroy(hsotg->desc_gen_cache);
5363error3:
5364 dwc2_hcd_release(hsotg);
5365error2:
5366 usb_put_hcd(hcd);
5367error1:
5368
5369#ifdef CONFIG_USB_DWC2_TRACK_MISSED_SOFS
5370 kfree(hsotg->last_frame_num_array);
5371 kfree(hsotg->frame_num_array);
5372#endif
5373
5374 dev_err(hsotg->dev, "%s() FAILED, returning %d\n", __func__, retval);
5375 return retval;
5376}
5377
5378
5379
5380
5381
5382void dwc2_hcd_remove(struct dwc2_hsotg *hsotg)
5383{
5384 struct usb_hcd *hcd;
5385
5386 dev_dbg(hsotg->dev, "DWC OTG HCD REMOVE\n");
5387
5388 hcd = dwc2_hsotg_to_hcd(hsotg);
5389 dev_dbg(hsotg->dev, "hsotg->hcd = %p\n", hcd);
5390
5391 if (!hcd) {
5392 dev_dbg(hsotg->dev, "%s: dwc2_hsotg_to_hcd(hsotg) NULL!\n",
5393 __func__);
5394 return;
5395 }
5396
5397 if (!IS_ERR_OR_NULL(hsotg->uphy))
5398 otg_set_host(hsotg->uphy->otg, NULL);
5399
5400 usb_remove_hcd(hcd);
5401 hsotg->priv = NULL;
5402
5403 kmem_cache_destroy(hsotg->unaligned_cache);
5404 kmem_cache_destroy(hsotg->desc_hsisoc_cache);
5405 kmem_cache_destroy(hsotg->desc_gen_cache);
5406
5407 dwc2_hcd_release(hsotg);
5408 usb_put_hcd(hcd);
5409
5410#ifdef CONFIG_USB_DWC2_TRACK_MISSED_SOFS
5411 kfree(hsotg->last_frame_num_array);
5412 kfree(hsotg->frame_num_array);
5413#endif
5414}
5415
5416
5417
5418
5419
5420
5421
5422
5423int dwc2_backup_host_registers(struct dwc2_hsotg *hsotg)
5424{
5425 struct dwc2_hregs_backup *hr;
5426 int i;
5427
5428 dev_dbg(hsotg->dev, "%s\n", __func__);
5429
5430
5431 hr = &hsotg->hr_backup;
5432 hr->hcfg = dwc2_readl(hsotg, HCFG);
5433 hr->haintmsk = dwc2_readl(hsotg, HAINTMSK);
5434 for (i = 0; i < hsotg->params.host_channels; ++i)
5435 hr->hcintmsk[i] = dwc2_readl(hsotg, HCINTMSK(i));
5436
5437 hr->hprt0 = dwc2_read_hprt0(hsotg);
5438 hr->hfir = dwc2_readl(hsotg, HFIR);
5439 hr->hptxfsiz = dwc2_readl(hsotg, HPTXFSIZ);
5440 hr->valid = true;
5441
5442 return 0;
5443}
5444
5445
5446
5447
5448
5449
5450
5451
5452int dwc2_restore_host_registers(struct dwc2_hsotg *hsotg)
5453{
5454 struct dwc2_hregs_backup *hr;
5455 int i;
5456
5457 dev_dbg(hsotg->dev, "%s\n", __func__);
5458
5459
5460 hr = &hsotg->hr_backup;
5461 if (!hr->valid) {
5462 dev_err(hsotg->dev, "%s: no host registers to restore\n",
5463 __func__);
5464 return -EINVAL;
5465 }
5466 hr->valid = false;
5467
5468 dwc2_writel(hsotg, hr->hcfg, HCFG);
5469 dwc2_writel(hsotg, hr->haintmsk, HAINTMSK);
5470
5471 for (i = 0; i < hsotg->params.host_channels; ++i)
5472 dwc2_writel(hsotg, hr->hcintmsk[i], HCINTMSK(i));
5473
5474 dwc2_writel(hsotg, hr->hprt0, HPRT0);
5475 dwc2_writel(hsotg, hr->hfir, HFIR);
5476 dwc2_writel(hsotg, hr->hptxfsiz, HPTXFSIZ);
5477 hsotg->frame_number = 0;
5478
5479 return 0;
5480}
5481
5482
5483
5484
5485
5486
5487int dwc2_host_enter_hibernation(struct dwc2_hsotg *hsotg)
5488{
5489 unsigned long flags;
5490 int ret = 0;
5491 u32 hprt0;
5492 u32 pcgcctl;
5493 u32 gusbcfg;
5494 u32 gpwrdn;
5495
5496 dev_dbg(hsotg->dev, "Preparing host for hibernation\n");
5497 ret = dwc2_backup_global_registers(hsotg);
5498 if (ret) {
5499 dev_err(hsotg->dev, "%s: failed to backup global registers\n",
5500 __func__);
5501 return ret;
5502 }
5503 ret = dwc2_backup_host_registers(hsotg);
5504 if (ret) {
5505 dev_err(hsotg->dev, "%s: failed to backup host registers\n",
5506 __func__);
5507 return ret;
5508 }
5509
5510
5511 hprt0 = dwc2_readl(hsotg, HPRT0);
5512 hprt0 |= HPRT0_SUSP;
5513 hprt0 &= ~HPRT0_ENA;
5514 dwc2_writel(hsotg, hprt0, HPRT0);
5515
5516
5517 if (dwc2_hsotg_wait_bit_set(hsotg, HPRT0, HPRT0_SUSP, 5000))
5518 dev_warn(hsotg->dev, "Suspend wasn't generated\n");
5519
5520
5521
5522
5523
5524 spin_lock_irqsave(&hsotg->lock, flags);
5525 hsotg->lx_state = DWC2_L2;
5526
5527 gusbcfg = dwc2_readl(hsotg, GUSBCFG);
5528 if (gusbcfg & GUSBCFG_ULPI_UTMI_SEL) {
5529
5530
5531 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5532 pcgcctl |= PCGCTL_STOPPCLK;
5533 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5534 udelay(10);
5535
5536 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5537 gpwrdn |= GPWRDN_PMUACTV;
5538 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5539 udelay(10);
5540 } else {
5541
5542 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5543 gpwrdn |= GPWRDN_PMUACTV;
5544 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5545 udelay(10);
5546
5547 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5548 pcgcctl |= PCGCTL_STOPPCLK;
5549 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5550 udelay(10);
5551 }
5552
5553
5554 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5555 gpwrdn |= GPWRDN_PMUINTSEL;
5556 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5557 udelay(10);
5558
5559
5560 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5561 gpwrdn |= GPWRDN_DISCONN_DET_MSK;
5562 gpwrdn |= GPWRDN_LNSTSCHG_MSK;
5563 gpwrdn |= GPWRDN_STS_CHGINT_MSK;
5564 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5565 udelay(10);
5566
5567
5568 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5569 gpwrdn |= GPWRDN_PWRDNCLMP;
5570 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5571 udelay(10);
5572
5573
5574 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5575 gpwrdn |= GPWRDN_PWRDNSWTCH;
5576 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5577
5578 hsotg->hibernated = 1;
5579 hsotg->bus_suspended = 1;
5580 dev_dbg(hsotg->dev, "Host hibernation completed\n");
5581 spin_unlock_irqrestore(&hsotg->lock, flags);
5582 return ret;
5583}
5584
5585
5586
5587
5588
5589
5590
5591
5592
5593
5594
5595
5596
5597int dwc2_host_exit_hibernation(struct dwc2_hsotg *hsotg, int rem_wakeup,
5598 int reset)
5599{
5600 u32 gpwrdn;
5601 u32 hprt0;
5602 int ret = 0;
5603 struct dwc2_gregs_backup *gr;
5604 struct dwc2_hregs_backup *hr;
5605
5606 gr = &hsotg->gr_backup;
5607 hr = &hsotg->hr_backup;
5608
5609 dev_dbg(hsotg->dev,
5610 "%s: called with rem_wakeup = %d reset = %d\n",
5611 __func__, rem_wakeup, reset);
5612
5613 dwc2_hib_restore_common(hsotg, rem_wakeup, 1);
5614 hsotg->hibernated = 0;
5615
5616
5617
5618
5619
5620
5621 mdelay(100);
5622
5623
5624 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5625
5626
5627 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5628 gpwrdn &= ~GPWRDN_RESTORE;
5629 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5630 udelay(10);
5631
5632
5633 dwc2_writel(hsotg, gr->gusbcfg, GUSBCFG);
5634 dwc2_writel(hsotg, hr->hcfg, HCFG);
5635
5636
5637 gpwrdn = dwc2_readl(hsotg, GPWRDN);
5638 gpwrdn &= ~GPWRDN_PMUACTV;
5639 dwc2_writel(hsotg, gpwrdn, GPWRDN);
5640 udelay(10);
5641
5642 hprt0 = hr->hprt0;
5643 hprt0 |= HPRT0_PWR;
5644 hprt0 &= ~HPRT0_ENA;
5645 hprt0 &= ~HPRT0_SUSP;
5646 dwc2_writel(hsotg, hprt0, HPRT0);
5647
5648 hprt0 = hr->hprt0;
5649 hprt0 |= HPRT0_PWR;
5650 hprt0 &= ~HPRT0_ENA;
5651 hprt0 &= ~HPRT0_SUSP;
5652
5653 if (reset) {
5654 hprt0 |= HPRT0_RST;
5655 dwc2_writel(hsotg, hprt0, HPRT0);
5656
5657
5658 mdelay(60);
5659 hprt0 &= ~HPRT0_RST;
5660 dwc2_writel(hsotg, hprt0, HPRT0);
5661 } else {
5662 hprt0 |= HPRT0_RES;
5663 dwc2_writel(hsotg, hprt0, HPRT0);
5664
5665
5666 mdelay(100);
5667 hprt0 &= ~HPRT0_RES;
5668 dwc2_writel(hsotg, hprt0, HPRT0);
5669 }
5670
5671 hprt0 = dwc2_readl(hsotg, HPRT0);
5672 hprt0 |= HPRT0_CONNDET;
5673 hprt0 |= HPRT0_ENACHG;
5674 hprt0 &= ~HPRT0_ENA;
5675 dwc2_writel(hsotg, hprt0, HPRT0);
5676
5677 hprt0 = dwc2_readl(hsotg, HPRT0);
5678
5679
5680 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5681
5682
5683 ret = dwc2_restore_global_registers(hsotg);
5684 if (ret) {
5685 dev_err(hsotg->dev, "%s: failed to restore registers\n",
5686 __func__);
5687 return ret;
5688 }
5689
5690
5691 ret = dwc2_restore_host_registers(hsotg);
5692 if (ret) {
5693 dev_err(hsotg->dev, "%s: failed to restore host registers\n",
5694 __func__);
5695 return ret;
5696 }
5697
5698 if (rem_wakeup) {
5699 dwc2_hcd_rem_wakeup(hsotg);
5700
5701
5702
5703
5704
5705 hsotg->flags.b.port_connect_status_change = 1;
5706 }
5707
5708 hsotg->hibernated = 0;
5709 hsotg->bus_suspended = 0;
5710 hsotg->lx_state = DWC2_L0;
5711 dev_dbg(hsotg->dev, "Host hibernation restore complete\n");
5712 return ret;
5713}
5714
5715bool dwc2_host_can_poweroff_phy(struct dwc2_hsotg *dwc2)
5716{
5717 struct usb_device *root_hub = dwc2_hsotg_to_hcd(dwc2)->self.root_hub;
5718
5719
5720 if (!device_may_wakeup(dwc2->dev))
5721 return true;
5722
5723
5724
5725
5726
5727 if (usb_wakeup_enabled_descendants(root_hub))
5728 return false;
5729
5730
5731 return true;
5732}
5733
5734
5735
5736
5737
5738
5739
5740
5741
5742
5743
5744int dwc2_host_enter_partial_power_down(struct dwc2_hsotg *hsotg)
5745{
5746 u32 pcgcctl;
5747 u32 hprt0;
5748 int ret = 0;
5749
5750 dev_dbg(hsotg->dev, "Entering host partial power down started.\n");
5751
5752
5753 hprt0 = dwc2_read_hprt0(hsotg);
5754 hprt0 |= HPRT0_SUSP;
5755 dwc2_writel(hsotg, hprt0, HPRT0);
5756 udelay(5);
5757
5758
5759 if (dwc2_hsotg_wait_bit_set(hsotg, HPRT0, HPRT0_SUSP, 3000))
5760 dev_warn(hsotg->dev, "Suspend wasn't generated\n");
5761
5762
5763 ret = dwc2_backup_global_registers(hsotg);
5764 if (ret) {
5765 dev_err(hsotg->dev, "%s: failed to backup global registers\n",
5766 __func__);
5767 return ret;
5768 }
5769
5770 ret = dwc2_backup_host_registers(hsotg);
5771 if (ret) {
5772 dev_err(hsotg->dev, "%s: failed to backup host registers\n",
5773 __func__);
5774 return ret;
5775 }
5776
5777
5778
5779
5780
5781 dwc2_writel(hsotg, 0xffffffff, GINTSTS);
5782
5783
5784 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5785
5786 pcgcctl |= PCGCTL_PWRCLMP;
5787 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5788 udelay(5);
5789
5790 pcgcctl |= PCGCTL_RSTPDWNMODULE;
5791 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5792 udelay(5);
5793
5794 pcgcctl |= PCGCTL_STOPPCLK;
5795 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5796
5797
5798 hsotg->in_ppd = 1;
5799 hsotg->lx_state = DWC2_L2;
5800 hsotg->bus_suspended = true;
5801
5802 dev_dbg(hsotg->dev, "Entering host partial power down completed.\n");
5803
5804 return ret;
5805}
5806
5807
5808
5809
5810
5811
5812
5813
5814
5815
5816
5817
5818
5819int dwc2_host_exit_partial_power_down(struct dwc2_hsotg *hsotg,
5820 int rem_wakeup, bool restore)
5821{
5822 u32 pcgcctl;
5823 int ret = 0;
5824 u32 hprt0;
5825
5826 dev_dbg(hsotg->dev, "Exiting host partial power down started.\n");
5827
5828 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5829 pcgcctl &= ~PCGCTL_STOPPCLK;
5830 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5831 udelay(5);
5832
5833 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5834 pcgcctl &= ~PCGCTL_PWRCLMP;
5835 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5836 udelay(5);
5837
5838 pcgcctl = dwc2_readl(hsotg, PCGCTL);
5839 pcgcctl &= ~PCGCTL_RSTPDWNMODULE;
5840 dwc2_writel(hsotg, pcgcctl, PCGCTL);
5841
5842 udelay(100);
5843 if (restore) {
5844 ret = dwc2_restore_global_registers(hsotg);
5845 if (ret) {
5846 dev_err(hsotg->dev, "%s: failed to restore registers\n",
5847 __func__);
5848 return ret;
5849 }
5850
5851 ret = dwc2_restore_host_registers(hsotg);
5852 if (ret) {
5853 dev_err(hsotg->dev, "%s: failed to restore host registers\n",
5854 __func__);
5855 return ret;
5856 }
5857 }
5858
5859
5860 hprt0 = dwc2_read_hprt0(hsotg);
5861 hprt0 |= HPRT0_RES;
5862 hprt0 &= ~HPRT0_SUSP;
5863 dwc2_writel(hsotg, hprt0, HPRT0);
5864 udelay(5);
5865
5866 if (!rem_wakeup) {
5867
5868 hprt0 = dwc2_read_hprt0(hsotg);
5869 hprt0 &= ~HPRT0_RES;
5870 dwc2_writel(hsotg, hprt0, HPRT0);
5871
5872 hsotg->bus_suspended = false;
5873 } else {
5874
5875 hprt0 = dwc2_read_hprt0(hsotg);
5876 hprt0 |= HPRT0_PWR;
5877 dwc2_writel(hsotg, hprt0, HPRT0);
5878
5879
5880 dwc2_hcd_connect(hsotg);
5881
5882 mod_timer(&hsotg->wkp_timer,
5883 jiffies + msecs_to_jiffies(71));
5884 }
5885
5886
5887 hsotg->in_ppd = 0;
5888 hsotg->lx_state = DWC2_L0;
5889
5890 dev_dbg(hsotg->dev, "Exiting host partial power down completed.\n");
5891 return ret;
5892}
5893
5894
5895
5896
5897
5898
5899
5900
5901void dwc2_host_enter_clock_gating(struct dwc2_hsotg *hsotg)
5902{
5903 u32 hprt0;
5904 u32 pcgctl;
5905
5906 dev_dbg(hsotg->dev, "Entering host clock gating.\n");
5907
5908
5909 hprt0 = dwc2_read_hprt0(hsotg);
5910 hprt0 |= HPRT0_SUSP;
5911 dwc2_writel(hsotg, hprt0, HPRT0);
5912
5913
5914 pcgctl = dwc2_readl(hsotg, PCGCTL);
5915 pcgctl |= PCGCTL_STOPPCLK;
5916 dwc2_writel(hsotg, pcgctl, PCGCTL);
5917 udelay(5);
5918
5919
5920 pcgctl = dwc2_readl(hsotg, PCGCTL);
5921 pcgctl |= PCGCTL_GATEHCLK;
5922 dwc2_writel(hsotg, pcgctl, PCGCTL);
5923 udelay(5);
5924
5925 hsotg->bus_suspended = true;
5926 hsotg->lx_state = DWC2_L2;
5927}
5928
5929
5930
5931
5932
5933
5934
5935
5936
5937void dwc2_host_exit_clock_gating(struct dwc2_hsotg *hsotg, int rem_wakeup)
5938{
5939 u32 hprt0;
5940 u32 pcgctl;
5941
5942 dev_dbg(hsotg->dev, "Exiting host clock gating.\n");
5943
5944
5945 pcgctl = dwc2_readl(hsotg, PCGCTL);
5946 pcgctl &= ~PCGCTL_GATEHCLK;
5947 dwc2_writel(hsotg, pcgctl, PCGCTL);
5948 udelay(5);
5949
5950
5951 pcgctl = dwc2_readl(hsotg, PCGCTL);
5952 pcgctl &= ~PCGCTL_STOPPCLK;
5953 dwc2_writel(hsotg, pcgctl, PCGCTL);
5954 udelay(5);
5955
5956
5957 hprt0 = dwc2_read_hprt0(hsotg);
5958 hprt0 |= HPRT0_RES;
5959 hprt0 &= ~HPRT0_SUSP;
5960 dwc2_writel(hsotg, hprt0, HPRT0);
5961 udelay(5);
5962
5963 if (!rem_wakeup) {
5964
5965 msleep(USB_RESUME_TIMEOUT);
5966
5967
5968 hprt0 = dwc2_read_hprt0(hsotg);
5969 hprt0 &= ~HPRT0_RES;
5970 dwc2_writel(hsotg, hprt0, HPRT0);
5971
5972 hsotg->bus_suspended = false;
5973 hsotg->lx_state = DWC2_L0;
5974 } else {
5975 mod_timer(&hsotg->wkp_timer,
5976 jiffies + msecs_to_jiffies(71));
5977 }
5978}
5979