1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25#ifndef __INTEL_MID_DMAC_REGS_H__
26#define __INTEL_MID_DMAC_REGS_H__
27
28#include <linux/dmaengine.h>
29#include <linux/dmapool.h>
30#include <linux/pci_ids.h>
31
32#define INTEL_MID_DMA_DRIVER_VERSION "1.1.0"
33
34#define REG_BIT0 0x00000001
35#define REG_BIT8 0x00000100
36#define INT_MASK_WE 0x8
37#define CLEAR_DONE 0xFFFFEFFF
38#define UNMASK_INTR_REG(chan_num) \
39 ((REG_BIT0 << chan_num) | (REG_BIT8 << chan_num))
40#define MASK_INTR_REG(chan_num) (REG_BIT8 << chan_num)
41
42#define ENABLE_CHANNEL(chan_num) \
43 ((REG_BIT0 << chan_num) | (REG_BIT8 << chan_num))
44
45#define DISABLE_CHANNEL(chan_num) \
46 (REG_BIT8 << chan_num)
47
48#define DESCS_PER_CHANNEL 16
49
50
51#define DMA_REG_SIZE 0x400
52#define DMA_CH_SIZE 0x58
53
54
55#define SAR 0x00
56#define DAR 0x08
57#define LLP 0x10
58#define CTL_LOW 0x18
59#define CTL_HIGH 0x1C
60#define CFG_LOW 0x40
61#define CFG_HIGH 0x44
62
63#define STATUS_TFR 0x2E8
64#define STATUS_BLOCK 0x2F0
65#define STATUS_ERR 0x308
66
67#define RAW_TFR 0x2C0
68#define RAW_BLOCK 0x2C8
69#define RAW_ERR 0x2E0
70
71#define MASK_TFR 0x310
72#define MASK_BLOCK 0x318
73#define MASK_SRC_TRAN 0x320
74#define MASK_DST_TRAN 0x328
75#define MASK_ERR 0x330
76
77#define CLEAR_TFR 0x338
78#define CLEAR_BLOCK 0x340
79#define CLEAR_SRC_TRAN 0x348
80#define CLEAR_DST_TRAN 0x350
81#define CLEAR_ERR 0x358
82
83#define INTR_STATUS 0x360
84#define DMA_CFG 0x398
85#define DMA_CHAN_EN 0x3A0
86
87
88union intel_mid_dma_ctl_lo {
89 struct {
90 u32 int_en:1;
91
92 u32 dst_tr_width:3;
93
94 u32 src_tr_width:3;
95
96 u32 dinc:2;
97
98 u32 sinc:2;
99 u32 dst_msize:3;
100
101 u32 src_msize:3;
102
103 u32 reser1:3;
104 u32 tt_fc:3;
105
106
107
108 u32 dms:2;
109 u32 sms:2;
110 u32 llp_dst_en:1;
111 u32 llp_src_en:1;
112 u32 reser2:3;
113 } ctlx;
114 u32 ctl_lo;
115};
116
117union intel_mid_dma_ctl_hi {
118 struct {
119 u32 block_ts:12;
120 u32 done:1;
121 u32 reser:19;
122 } ctlx;
123 u32 ctl_hi;
124
125};
126
127
128union intel_mid_dma_cfg_lo {
129 struct {
130 u32 reser1:5;
131 u32 ch_prior:3;
132 u32 ch_susp:1;
133 u32 fifo_empty:1;
134 u32 hs_sel_dst:1;
135
136 u32 hs_sel_src:1;
137 u32 reser2:6;
138 u32 dst_hs_pol:1;
139 u32 src_hs_pol:1;
140 u32 max_abrst:10;
141 u32 reload_src:1;
142 u32 reload_dst:1;
143 } cfgx;
144 u32 cfg_lo;
145};
146
147union intel_mid_dma_cfg_hi {
148 struct {
149 u32 fcmode:1;
150 u32 fifo_mode:1;
151 u32 protctl:3;
152 u32 rsvd:2;
153 u32 src_per:4;
154 u32 dst_per:4;
155 u32 reser2:17;
156 } cfgx;
157 u32 cfg_hi;
158};
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179struct intel_mid_dma_chan {
180 struct dma_chan chan;
181 void __iomem *ch_regs;
182 void __iomem *dma_base;
183 int ch_id;
184 spinlock_t lock;
185 struct list_head active_list;
186 struct list_head queue;
187 struct list_head free_list;
188 unsigned int descs_allocated;
189 struct middma_device *dma;
190 bool busy;
191 bool in_use;
192 u32 raw_tfr;
193 u32 raw_block;
194 struct intel_mid_dma_slave *mid_slave;
195};
196
197static inline struct intel_mid_dma_chan *to_intel_mid_dma_chan(
198 struct dma_chan *chan)
199{
200 return container_of(chan, struct intel_mid_dma_chan, chan);
201}
202
203enum intel_mid_dma_state {
204 RUNNING = 0,
205 SUSPENDED,
206};
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224struct middma_device {
225 struct pci_dev *pdev;
226 void __iomem *dma_base;
227 struct pci_pool *dma_pool;
228 struct dma_device common;
229 struct tasklet_struct tasklet;
230 struct intel_mid_dma_chan ch[MAX_CHAN];
231 unsigned int pci_id;
232 unsigned int intr_mask;
233 void __iomem *mask_reg;
234 int chan_base;
235 int max_chan;
236 int block_size;
237 unsigned int pimr_mask;
238 enum intel_mid_dma_state state;
239};
240
241static inline struct middma_device *to_middma_device(struct dma_device *common)
242{
243 return container_of(common, struct middma_device, common);
244}
245
246struct intel_mid_dma_desc {
247 void __iomem *block;
248 struct list_head desc_node;
249 struct dma_async_tx_descriptor txd;
250 size_t len;
251 dma_addr_t sar;
252 dma_addr_t dar;
253 u32 cfg_hi;
254 u32 cfg_lo;
255 u32 ctl_lo;
256 u32 ctl_hi;
257 struct pci_pool *lli_pool;
258 struct intel_mid_dma_lli *lli;
259 dma_addr_t lli_phys;
260 unsigned int lli_length;
261 unsigned int current_lli;
262 dma_addr_t next;
263 enum dma_transfer_direction dirn;
264 enum dma_status status;
265 enum dma_slave_buswidth width;
266 enum intel_mid_dma_mode cfg_mode;
267
268};
269
270struct intel_mid_dma_lli {
271 dma_addr_t sar;
272 dma_addr_t dar;
273 dma_addr_t llp;
274 u32 ctl_lo;
275 u32 ctl_hi;
276} __attribute__ ((packed));
277
278static inline int test_ch_en(void __iomem *dma, u32 ch_no)
279{
280 u32 en_reg = ioread32(dma + DMA_CHAN_EN);
281 return (en_reg >> ch_no) & 0x1;
282}
283
284static inline struct intel_mid_dma_desc *to_intel_mid_dma_desc
285 (struct dma_async_tx_descriptor *txd)
286{
287 return container_of(txd, struct intel_mid_dma_desc, txd);
288}
289
290static inline struct intel_mid_dma_slave *to_intel_mid_dma_slave
291 (struct dma_slave_config *slave)
292{
293 return container_of(slave, struct intel_mid_dma_slave, dma_slave);
294}
295
296
297int dma_resume(struct device *dev);
298
299#endif
300