13 #define STANDARD_TIMEOUT_MICROSECOND (1000)
14 #define ONE_MICROSECOND (1)
17 #define UPDATE_REG_BITS(reg, value, offset, mask) \
20 reg |= (value << offset); \
25 #define CHANNEL_MASK_ALL (BIT(QM_DMA_CHANNEL_NUM) - 1)
31 QM_DMA_ADDRESS_INCREMENT = 0x0,
32 QM_DMA_ADDRESS_DECREMENT = 0x1,
33 QM_DMA_ADDRESS_NO_CHANGE = 0x2
34 } qm_dma_address_increment_t;
39 typedef struct dma_cfg_prv_t {
41 void *callback_context;
44 void (*client_callback)(
void *callback_context, uint32_t len,
48 qm_dma_linked_list_item_t *lli_tail;
58 uint16_t num_blocks_per_buffer;
66 uint16_t num_blocks_int_pending;
72 bool transfer_type_ll_circular;
79 static __inline__ uint32_t
81 const dma_cfg_prv_t *prv_cfg)
83 uint32_t source_transfer_width;
84 uint32_t transfer_length;
88 QM_ASSERT(prv_cfg != NULL);
89 if (NULL == prv_cfg->lli_tail) {
92 &QM_DMA[dma]->chan_reg[channel_id];
97 ctrl_low = prv_cfg->lli_tail->ctrl_low;
98 ctrl_high = prv_cfg->lli_tail->ctrl_high;
102 source_transfer_width = ((ctrl_low & QM_DMA_CTL_L_SRC_TR_WIDTH_MASK) >>
103 QM_DMA_CTL_L_SRC_TR_WIDTH_OFFSET);
107 transfer_length = ((ctrl_high & QM_DMA_CTL_H_BLOCK_TS_MASK) >>
108 QM_DMA_CTL_H_BLOCK_TS_OFFSET) *
109 prv_cfg->num_blocks_per_buffer;
114 return (transfer_length << source_transfer_width);
117 static __inline__
int dma_controller_disable(
const qm_dma_t dma)
129 static __inline__
void dma_controller_enable(
const qm_dma_t dma)
131 QM_DMA[dma]->misc_reg.cfg_low = QM_DMA_MISC_CFG_DMA_EN;
134 static int dma_channel_disable(
const qm_dma_t dma,
137 uint8_t channel_mask = BIT(channel_id);
141 &QM_DMA[dma]->chan_reg[channel_id];
149 chan_reg->
cfg_low |= QM_DMA_CFG_L_CH_SUSP_MASK;
152 timeout_us = STANDARD_TIMEOUT_MICROSECOND;
153 while ((!(chan_reg->
cfg_low & QM_DMA_CFG_L_CH_SUSP_MASK)) &&
159 if (!(chan_reg->
cfg_low & QM_DMA_CFG_L_CH_SUSP_MASK)) {
164 timeout_us = STANDARD_TIMEOUT_MICROSECOND;
165 while ((!(chan_reg->
cfg_low & QM_DMA_CFG_L_FIFO_EMPTY_MASK)) &&
172 misc_reg->
chan_en_low = (channel_mask << QM_DMA_MISC_CHAN_EN_WE_OFFSET);
174 timeout_us = STANDARD_TIMEOUT_MICROSECOND;
175 while ((misc_reg->
chan_en_low & channel_mask) && timeout_us) {
185 chan_reg->
cfg_low &= ~QM_DMA_CFG_L_CH_SUSP_MASK;
190 static __inline__
void dma_channel_enable(
const qm_dma_t dma,
193 uint8_t channel_mask = BIT(channel_id);
195 QM_DMA[dma]->misc_reg.chan_en_low =
196 (channel_mask << QM_DMA_MISC_CHAN_EN_WE_OFFSET) | channel_mask;
199 static __inline__
void
203 &QM_DMA[dma]->chan_reg[channel_id];
205 chan_reg->
ctrl_low &= ~QM_DMA_CTL_L_INT_EN_MASK;
208 static __inline__
void
212 &QM_DMA[dma]->chan_reg[channel_id];
214 chan_reg->
ctrl_low |= QM_DMA_CTL_L_INT_EN_MASK;
217 static __inline__
int
223 &QM_DMA[dma]->chan_reg[channel_id];
232 switch (transfer_type) {
234 chan_reg->
ctrl_low &= ~QM_DMA_CTL_L_LLP_SRC_EN_MASK;
235 chan_reg->
ctrl_low &= ~QM_DMA_CTL_L_LLP_DST_EN_MASK;
236 chan_reg->
cfg_low &= ~QM_DMA_CFG_L_RELOAD_SRC_MASK;
237 chan_reg->
cfg_low &= ~QM_DMA_CFG_L_RELOAD_DST_MASK;
249 chan_reg->
cfg_low |= QM_DMA_CFG_L_RELOAD_SRC_MASK;
250 chan_reg->
cfg_low &= ~QM_DMA_CFG_L_RELOAD_DST_MASK;
253 chan_reg->
cfg_low |= QM_DMA_CFG_L_RELOAD_DST_MASK;
254 chan_reg->
cfg_low &= ~QM_DMA_CFG_L_RELOAD_SRC_MASK;
258 chan_reg->
ctrl_low &= ~QM_DMA_CTL_L_LLP_SRC_EN_MASK;
259 chan_reg->
ctrl_low &= ~QM_DMA_CTL_L_LLP_DST_EN_MASK;
265 chan_reg->
cfg_high &= ~QM_DMA_CFG_H_DS_UPD_EN_MASK;
268 chan_reg->
cfg_high &= ~QM_DMA_CFG_H_SS_UPD_EN_MASK;
271 chan_reg->
ctrl_low |= QM_DMA_CTL_L_LLP_SRC_EN_MASK;
272 chan_reg->
cfg_low &= ~QM_DMA_CFG_L_RELOAD_SRC_MASK;
275 chan_reg->
ctrl_low |= QM_DMA_CTL_L_LLP_DST_EN_MASK;
276 chan_reg->
cfg_low &= ~QM_DMA_CFG_L_RELOAD_DST_MASK;
288 const dma_cfg_prv_t *prv_cfg)
292 &QM_DMA[dma]->chan_reg[channel_id];
294 if (0 == (chan_reg->
ctrl_low & (QM_DMA_CTL_L_LLP_SRC_EN_MASK |
295 QM_DMA_CTL_L_LLP_DST_EN_MASK))) {
297 if (0 == (chan_reg->
cfg_low & (QM_DMA_CFG_L_RELOAD_SRC_MASK |
298 QM_DMA_CFG_L_RELOAD_DST_MASK))) {
307 transfer_type = (prv_cfg->transfer_type_ll_circular)
312 return transfer_type;
315 static __inline__
void
316 dma_set_source_transfer_width(
const qm_dma_t dma,
321 &QM_DMA[dma]->chan_reg[channel_id];
323 UPDATE_REG_BITS(chan_reg->
ctrl_low, transfer_width,
324 QM_DMA_CTL_L_SRC_TR_WIDTH_OFFSET,
325 QM_DMA_CTL_L_SRC_TR_WIDTH_MASK);
328 static __inline__
void
329 dma_set_destination_transfer_width(
const qm_dma_t dma,
334 &QM_DMA[dma]->chan_reg[channel_id];
336 UPDATE_REG_BITS(chan_reg->
ctrl_low, transfer_width,
337 QM_DMA_CTL_L_DST_TR_WIDTH_OFFSET,
338 QM_DMA_CTL_L_DST_TR_WIDTH_MASK);
341 static __inline__
void
342 dma_set_source_burst_length(
const qm_dma_t dma,
347 &QM_DMA[dma]->chan_reg[channel_id];
349 UPDATE_REG_BITS(chan_reg->
ctrl_low, burst_length,
350 QM_DMA_CTL_L_SRC_MSIZE_OFFSET,
351 QM_DMA_CTL_L_SRC_MSIZE_MASK);
354 static __inline__
void
355 dma_set_destination_burst_length(
const qm_dma_t dma,
360 &QM_DMA[dma]->chan_reg[channel_id];
362 UPDATE_REG_BITS(chan_reg->
ctrl_low, burst_length,
363 QM_DMA_CTL_L_DEST_MSIZE_OFFSET,
364 QM_DMA_CTL_L_DEST_MSIZE_MASK);
367 static __inline__
void
368 dma_set_transfer_direction(
const qm_dma_t dma,
373 &QM_DMA[dma]->chan_reg[channel_id];
375 UPDATE_REG_BITS(chan_reg->
ctrl_low, transfer_direction,
376 QM_DMA_CTL_L_TT_FC_OFFSET, QM_DMA_CTL_L_TT_FC_MASK);
379 static __inline__
void
380 dma_set_source_increment(
const qm_dma_t dma,
382 const qm_dma_address_increment_t address_increment)
385 &QM_DMA[dma]->chan_reg[channel_id];
387 UPDATE_REG_BITS(chan_reg->
ctrl_low, address_increment,
388 QM_DMA_CTL_L_SINC_OFFSET, QM_DMA_CTL_L_SINC_MASK);
391 static __inline__
void dma_set_destination_increment(
393 const qm_dma_address_increment_t address_increment)
396 &QM_DMA[dma]->chan_reg[channel_id];
398 UPDATE_REG_BITS(chan_reg->
ctrl_low, address_increment,
399 QM_DMA_CTL_L_DINC_OFFSET, QM_DMA_CTL_L_DINC_MASK);
402 static __inline__
void dma_set_handshake_interface(
407 &QM_DMA[dma]->chan_reg[channel_id];
409 UPDATE_REG_BITS(chan_reg->
cfg_high, handshake_interface,
410 QM_DMA_CFG_H_SRC_PER_OFFSET, QM_DMA_CFG_H_SRC_PER_MASK);
412 UPDATE_REG_BITS(chan_reg->
cfg_high, handshake_interface,
413 QM_DMA_CFG_H_DEST_PER_OFFSET,
414 QM_DMA_CFG_H_DEST_PER_MASK);
417 static __inline__
void
419 const uint8_t handshake_type)
422 &QM_DMA[dma]->chan_reg[channel_id];
424 UPDATE_REG_BITS(chan_reg->
cfg_low, handshake_type,
425 QM_DMA_CFG_L_HS_SEL_SRC_OFFSET,
426 QM_DMA_CFG_L_HS_SEL_SRC_MASK);
428 UPDATE_REG_BITS(chan_reg->
cfg_low, handshake_type,
429 QM_DMA_CFG_L_HS_SEL_DST_OFFSET,
430 QM_DMA_CFG_L_HS_SEL_DST_MASK);
433 static __inline__
void
434 dma_set_handshake_polarity(
const qm_dma_t dma,
439 &QM_DMA[dma]->chan_reg[channel_id];
441 UPDATE_REG_BITS(chan_reg->
cfg_low, handshake_polarity,
442 QM_DMA_CFG_L_SRC_HS_POL_OFFSET,
443 QM_DMA_CFG_L_SRC_HS_POL_MASK);
445 UPDATE_REG_BITS(chan_reg->
cfg_low, handshake_polarity,
446 QM_DMA_CFG_L_DST_HS_POL_OFFSET,
447 QM_DMA_CFG_L_DST_HS_POL_MASK);
450 static __inline__
void
452 const uint32_t source_address)
454 QM_DMA[dma]->chan_reg[channel_id].sar_low = source_address;
457 static __inline__
void
458 dma_set_destination_address(
const qm_dma_t dma,
460 const uint32_t destination_address)
462 QM_DMA[dma]->chan_reg[channel_id].dar_low = destination_address;
465 static __inline__
void dma_set_block_size(
const qm_dma_t dma,
467 const uint32_t block_size)
470 &QM_DMA[dma]->chan_reg[channel_id];
472 UPDATE_REG_BITS(chan_reg->
ctrl_high, block_size,
473 QM_DMA_CTL_H_BLOCK_TS_OFFSET,
474 QM_DMA_CTL_H_BLOCK_TS_MASK);
QM_RW uint32_t cfg_low
CFG.
QM_RW uint32_t ctrl_low
CTL.
Link list multiblock mode.
Link list multiblock mode with cyclic operation.
qm_dma_handshake_polarity_t
DMA Handshake Polarity.
DMA miscellaneous register map.
Memory to memory transfer.
qm_dma_burst_length_t
DMA Burst Transfer Length.
QM_RW uint32_t llp_low
LLP.
QM_RW uint32_t chan_en_low
ChEnReg.
QM_RW uint32_t cfg_low
DmaCfgReg.
Peripheral to memory transfer.
void clk_sys_udelay(uint32_t microseconds)
Idle loop the processor for at least the value given in microseconds.
QM_RW uint32_t ctrl_high
CTL.
qm_dma_channel_direction_t
DMA channel direction.
QM_RW uint32_t cfg_high
CFG.
DMA channel register map.
qm_dma_transfer_width_t
DMA Transfer Width.
Contiguous multiblock mode.
qm_dma_channel_id_t
DMA channel IDs.
qm_dma_handshake_interface_t
DMA hardware handshake interfaces.