dma: stm32f4x: Use terminology found in the datasheet

Each controller contains a number of streams.  Subsequently each
stream contains a number of channels.  Channels are selectable
via the stream's control register.

Change-Id: Ib75d1377cb8fc38f27e4566b275343b2ffb8da65
Signed-off-by: Lee Jones <lee.jones@linaro.org>
This commit is contained in:
Lee Jones 2017-02-17 08:57:04 +00:00
parent ec60772103
commit 45fc0ac496

View file

@ -18,21 +18,21 @@
#include <clock_control/stm32_clock_control.h>
#define DMA_STM32_MAX_CHANNELS 8 /* Number of channels per controller */
#define DMA_STM32_MAX_STREAMS 8 /* Number of streams per controller */
#define DMA_STM32_MAX_DEVS 2 /* Number of controllers */
#define DMA_STM32_1 0 /* First DMA controller */
#define DMA_STM32_2 1 /* Second DMA controller */
#define DMA_STM32_IRQ_PRI CONFIG_DMA_0_IRQ_PRI
struct dma_stm32_chan_reg {
struct dma_stm32_stream_reg {
/* Shared registers */
uint32_t lisr;
uint32_t hisr;
uint32_t lifcr;
uint32_t hifcr;
/* Per channel registers */
/* Per stream registers */
uint32_t scr;
uint32_t sndtr;
uint32_t spar;
@ -41,21 +41,21 @@ struct dma_stm32_chan_reg {
uint32_t sfcr;
};
struct dma_stm32_chan {
struct dma_stm32_stream {
uint32_t id;
uint32_t direction;
struct device *dev;
struct dma_stm32_chan_reg regs;
struct dma_stm32_stream_reg regs;
bool busy;
void (*dma_callback)(struct device *dev, uint32_t channel,
void (*dma_callback)(struct device *dev, uint32_t id,
int error_code);
};
static struct dma_stm32_device {
uint32_t base;
struct device *clk;
struct dma_stm32_chan chan[DMA_STM32_MAX_CHANNELS];
struct dma_stm32_stream stream[DMA_STM32_MAX_STREAMS];
bool mem2mem;
} ddata[DMA_STM32_MAX_DEVS];
@ -180,16 +180,16 @@ static void dma_stm32_write(struct dma_stm32_device *ddata,
sys_write32(val, ddata->base + reg);
}
static void dma_stm32_dump_reg(struct dma_stm32_device *ddata, uint32_t channel)
static void dma_stm32_dump_reg(struct dma_stm32_device *ddata, uint32_t id)
{
uint32_t scr = dma_stm32_read(ddata, DMA_STM32_SCR(channel));
uint32_t ndtr = dma_stm32_read(ddata, DMA_STM32_SNDTR(channel));
uint32_t spar = dma_stm32_read(ddata, DMA_STM32_SPAR(channel));
uint32_t sm0ar = dma_stm32_read(ddata, DMA_STM32_SM0AR(channel));
uint32_t sm1ar = dma_stm32_read(ddata, DMA_STM32_SM1AR(channel));
uint32_t sfcr = dma_stm32_read(ddata, DMA_STM32_SFCR(channel));
uint32_t scr = dma_stm32_read(ddata, DMA_STM32_SCR(id));
uint32_t ndtr = dma_stm32_read(ddata, DMA_STM32_SNDTR(id));
uint32_t spar = dma_stm32_read(ddata, DMA_STM32_SPAR(id));
uint32_t sm0ar = dma_stm32_read(ddata, DMA_STM32_SM0AR(id));
uint32_t sm1ar = dma_stm32_read(ddata, DMA_STM32_SM1AR(id));
uint32_t sfcr = dma_stm32_read(ddata, DMA_STM32_SFCR(id));
SYS_LOG_INF("Using channel: %d\n", channel);
SYS_LOG_INF("Using stream: %d\n", id);
SYS_LOG_INF("SCR: 0x%x \t(config)\n", scr);
SYS_LOG_INF("NDTR: 0x%x \t(length)\n", ndtr);
SYS_LOG_INF("SPAR: 0x%x \t(source)\n", spar);
@ -199,84 +199,84 @@ static void dma_stm32_dump_reg(struct dma_stm32_device *ddata, uint32_t channel)
}
static uint32_t dma_stm32_irq_status(struct dma_stm32_device *ddata,
uint32_t channel)
uint32_t id)
{
uint32_t irqs;
if (channel & 4) {
if (id & 4) {
irqs = dma_stm32_read(ddata, DMA_STM32_HISR);
} else {
irqs = dma_stm32_read(ddata, DMA_STM32_LISR);
}
return (irqs >> (((channel & 2) << 3) | ((channel & 1) * 6)));
return (irqs >> (((id & 2) << 3) | ((id & 1) * 6)));
}
static void dma_stm32_irq_clear(struct dma_stm32_device *ddata,
uint32_t channel, uint32_t irqs)
uint32_t id, uint32_t irqs)
{
irqs = irqs << (((channel & 2) << 3) | ((channel & 1) * 6));
irqs = irqs << (((id & 2) << 3) | ((id & 1) * 6));
if (channel & 4) {
if (id & 4) {
dma_stm32_write(ddata, DMA_STM32_HIFCR, irqs);
} else {
dma_stm32_write(ddata, DMA_STM32_LIFCR, irqs);
}
}
static void dma_stm32_irq_handler(void *arg, uint32_t channel)
static void dma_stm32_irq_handler(void *arg, uint32_t id)
{
struct device *dev = arg;
struct dma_stm32_device *ddata = dev->driver_data;
struct dma_stm32_chan *chan = &ddata->chan[channel];
struct dma_stm32_stream *stream = &ddata->stream[id];
uint32_t irqstatus, config, sfcr;
irqstatus = dma_stm32_irq_status(ddata, channel);
config = dma_stm32_read(ddata, DMA_STM32_SCR(channel));
sfcr = dma_stm32_read(ddata, DMA_STM32_SFCR(channel));
irqstatus = dma_stm32_irq_status(ddata, id);
config = dma_stm32_read(ddata, DMA_STM32_SCR(id));
sfcr = dma_stm32_read(ddata, DMA_STM32_SFCR(id));
/* Silently ignore spurious transfer half complete IRQ */
if (irqstatus & DMA_STM32_HTI) {
dma_stm32_irq_clear(ddata, channel, DMA_STM32_HTI);
dma_stm32_irq_clear(ddata, id, DMA_STM32_HTI);
return;
}
chan->busy = false;
stream->busy = false;
if ((irqstatus & DMA_STM32_TCI) && (config & DMA_STM32_SCR_TCIE)) {
dma_stm32_irq_clear(ddata, channel, DMA_STM32_TCI);
dma_stm32_irq_clear(ddata, id, DMA_STM32_TCI);
chan->dma_callback(chan->dev, channel, 0);
stream->dma_callback(stream->dev, id, 0);
} else {
SYS_LOG_ERR("Internal error: IRQ status: 0x%x\n", irqstatus);
dma_stm32_irq_clear(ddata, channel, irqstatus);
dma_stm32_irq_clear(ddata, id, irqstatus);
chan->dma_callback(chan->dev, channel, -EIO);
stream->dma_callback(stream->dev, id, -EIO);
}
}
static int dma_stm32_disable_chan(struct dma_stm32_device *ddata,
uint32_t channel)
static int dma_stm32_disable_stream(struct dma_stm32_device *ddata,
uint32_t id)
{
uint32_t config;
int count = 0;
int ret = 0;
for (;;) {
config = dma_stm32_read(ddata, DMA_STM32_SCR(channel));
/* Channel already disabled */
config = dma_stm32_read(ddata, DMA_STM32_SCR(id));
/* Stream already disabled */
if (!(config & DMA_STM32_SCR_EN)) {
return 0;
}
/* Try to disable channel */
dma_stm32_write(ddata, DMA_STM32_SCR(channel),
/* Try to disable stream */
dma_stm32_write(ddata, DMA_STM32_SCR(id),
config &= ~DMA_STM32_SCR_EN);
/* After trying for 5 seconds, give up */
k_sleep(K_SECONDS(5));
if (count++ > (5 * 1000) / 50) {
SYS_LOG_ERR("DMA error: Channel in use\n");
SYS_LOG_ERR("DMA error: Stream in use\n");
return -EBUSY;
}
}
@ -284,12 +284,12 @@ static int dma_stm32_disable_chan(struct dma_stm32_device *ddata,
return ret;
}
static int dma_stm32_config_devcpy(struct device *dev, uint32_t channel,
static int dma_stm32_config_devcpy(struct device *dev, uint32_t id,
struct dma_config *config)
{
struct dma_stm32_device *ddata = dev->driver_data;
struct dma_stm32_chan_reg *regs = &ddata->chan[channel].regs;
struct dma_stm32_stream_reg *regs = &ddata->stream[id].regs;
uint32_t src_bus_width = dma_width_index(config->source_data_size);
uint32_t dst_bus_width = dma_width_index(config->dest_data_size);
uint32_t src_burst_size = dma_burst_index(config->source_burst_length);
@ -331,10 +331,10 @@ static int dma_stm32_config_devcpy(struct device *dev, uint32_t channel,
return 0;
}
static int dma_stm32_config_memcpy(struct device *dev, uint32_t channel)
static int dma_stm32_config_memcpy(struct device *dev, uint32_t id)
{
struct dma_stm32_device *ddata = dev->driver_data;
struct dma_stm32_chan_reg *regs = &ddata->chan[channel].regs;
struct dma_stm32_stream_reg *regs = &ddata->stream[id].regs;
regs->scr = DMA_STM32_SCR_DIR(DMA_STM32_MEM_TO_MEM) |
DMA_STM32_SCR_MINC | /* Memory increment mode */
@ -349,15 +349,15 @@ static int dma_stm32_config_memcpy(struct device *dev, uint32_t channel)
return 0;
}
static int dma_stm32_config(struct device *dev, uint32_t channel,
static int dma_stm32_config(struct device *dev, uint32_t id,
struct dma_config *config)
{
struct dma_stm32_device *ddata = dev->driver_data;
struct dma_stm32_chan *chan = &ddata->chan[channel];
struct dma_stm32_chan_reg *regs = &ddata->chan[channel].regs;
struct dma_stm32_stream *stream = &ddata->stream[id];
struct dma_stm32_stream_reg *regs = &ddata->stream[id].regs;
int ret;
if (chan->busy) {
if (stream->busy) {
return -EBUSY;
}
@ -367,11 +367,11 @@ static int dma_stm32_config(struct device *dev, uint32_t channel,
return -EINVAL;
}
chan->busy = true;
chan->dma_callback = config->dma_callback;
chan->direction = config->channel_direction;
stream->busy = true;
stream->dma_callback = config->dma_callback;
stream->direction = config->channel_direction;
if (chan->direction == MEMORY_TO_PERIPHERAL) {
if (stream->direction == MEMORY_TO_PERIPHERAL) {
regs->sm0ar = (uint32_t)config->head_block->source_address;
regs->spar = (uint32_t)config->head_block->dest_address;
} else {
@ -379,10 +379,10 @@ static int dma_stm32_config(struct device *dev, uint32_t channel,
regs->sm0ar = (uint32_t)config->head_block->dest_address;
}
if (chan->direction == MEMORY_TO_MEMORY) {
ret = dma_stm32_config_memcpy(dev, stream);
if (stream->direction == MEMORY_TO_MEMORY) {
ret = dma_stm32_config_memcpy(dev, id);
} else {
ret = dma_stm32_config_devcpy(dev, stream, config);
ret = dma_stm32_config_devcpy(dev, id, config);
}
regs->sndtr = config->head_block->block_size;
@ -390,69 +390,69 @@ static int dma_stm32_config(struct device *dev, uint32_t channel,
return ret;
}
static int dma_stm32_start(struct device *dev, uint32_t channel)
static int dma_stm32_start(struct device *dev, uint32_t id)
{
struct dma_stm32_device *ddata = dev->driver_data;
struct dma_stm32_chan_reg *regs = &ddata->chan[channel].regs;
struct dma_stm32_stream_reg *regs = &ddata->stream[id].regs;
uint32_t irqstatus;
int ret;
ret = dma_stm32_disable_chan(ddata, channel);
ret = dma_stm32_disable_stream(ddata, id);
if (ret) {
return ret;
}
dma_stm32_write(ddata, DMA_STM32_SCR(channel), regs->scr);
dma_stm32_write(ddata, DMA_STM32_SPAR(channel), regs->spar);
dma_stm32_write(ddata, DMA_STM32_SM0AR(channel), regs->sm0ar);
dma_stm32_write(ddata, DMA_STM32_SFCR(channel), regs->sfcr);
dma_stm32_write(ddata, DMA_STM32_SM1AR(channel), regs->sm1ar);
dma_stm32_write(ddata, DMA_STM32_SNDTR(channel), regs->sndtr);
dma_stm32_write(ddata, DMA_STM32_SCR(id), regs->scr);
dma_stm32_write(ddata, DMA_STM32_SPAR(id), regs->spar);
dma_stm32_write(ddata, DMA_STM32_SM0AR(id), regs->sm0ar);
dma_stm32_write(ddata, DMA_STM32_SFCR(id), regs->sfcr);
dma_stm32_write(ddata, DMA_STM32_SM1AR(id), regs->sm1ar);
dma_stm32_write(ddata, DMA_STM32_SNDTR(id), regs->sndtr);
/* Clear remanent IRQs from previous transfers */
irqstatus = dma_stm32_irq_status(ddata, channel);
irqstatus = dma_stm32_irq_status(ddata, id);
if (irqstatus) {
dma_stm32_irq_clear(ddata, channel, irqstatus);
dma_stm32_irq_clear(ddata, id, irqstatus);
}
dma_stm32_dump_reg(ddata, channel);
dma_stm32_dump_reg(ddata, id);
/* Push the start button */
dma_stm32_write(ddata, DMA_STM32_SCR(channel),
dma_stm32_write(ddata, DMA_STM32_SCR(id),
regs->scr | DMA_STM32_SCR_EN);
return 0;
}
static int dma_stm32_stop(struct device *dev, uint32_t channel)
static int dma_stm32_stop(struct device *dev, uint32_t id)
{
struct dma_stm32_device *ddata = dev->driver_data;
struct dma_stm32_chan *chan = &ddata->chan[stream];
struct dma_stm32_stream *stream = &ddata->stream[id];
uint32_t scr, sfcr, irqstatus;
int ret;
/* Disable all IRQs */
scr = dma_stm32_read(ddata, DMA_STM32_SCR(channel));
scr = dma_stm32_read(ddata, DMA_STM32_SCR(id));
scr &= ~DMA_STM32_SCR_IRQ_MASK;
dma_stm32_write(ddata, DMA_STM32_SCR(channel), scr);
dma_stm32_write(ddata, DMA_STM32_SCR(id), scr);
sfcr = dma_stm32_read(ddata, DMA_STM32_SFCR(channel));
sfcr = dma_stm32_read(ddata, DMA_STM32_SFCR(id));
sfcr &= ~DMA_STM32_SFCR_FEIE;
dma_stm32_write(ddata, DMA_STM32_SFCR(channel), sfcr);
dma_stm32_write(ddata, DMA_STM32_SFCR(id), sfcr);
/* Disable channel */
ret = dma_stm32_disable_chan(ddata, channel);
/* Disable stream */
ret = dma_stm32_disable_stream(ddata, id);
if (ret)
return ret;
/* Clear remanent IRQs from previous transfers */
irqstatus = dma_stm32_irq_status(ddata, stream);
irqstatus = dma_stm32_irq_status(ddata, id);
if (irqstatus) {
dma_stm32_irq_clear(ddata, channel, irqstatus);
dma_stm32_irq_clear(ddata, id, irqstatus);
}
/* Finally, flag channel as free */
chan->busy = false;
/* Finally, flag stream as free */
stream->busy = false;
return 0;
}
@ -463,10 +463,10 @@ static int dma_stm32_init(struct device *dev)
const struct dma_stm32_config *cdata = dev->config->config_info;
int i;
for (i = 0; i < DMA_STM32_MAX_CHANNELS; i++) {
ddata->chan[i].id = i;
ddata->chan[i].dev = dev;
ddata->chan[i].busy = false;
for (i = 0; i < DMA_STM32_MAX_STREAMS; i++) {
ddata->stream[i].id = i;
ddata->stream[i].dev = dev;
ddata->stream[i].busy = false;
}
/* Enable DMA clock */