@@ -20,10 +20,10 @@
#define RX_BUSY 0
#define TX_BUSY 1
-static struct dw_dma_slave mid_dma_tx = { .dst_id = 1 };
-static struct dw_dma_slave mid_dma_rx = { .src_id = 0 };
+static struct dw_dma_slave pci_dma_tx = { .dst_id = 1 };
+static struct dw_dma_slave pci_dma_rx = { .src_id = 0 };
-static bool mid_spi_dma_chan_filter(struct dma_chan *chan, void *param)
+static bool dw_spi_dma_chan_filter(struct dma_chan *chan, void *param)
{
struct dw_dma_slave *s = param;
@@ -34,7 +34,7 @@ static bool mid_spi_dma_chan_filter(struct dma_chan *chan, void *param)
return true;
}
-static int mid_spi_dma_init(struct dw_spi *dws)
+static int dw_spi_pci_dma_init(struct dw_spi *dws)
{
struct pci_dev *dma_dev;
struct dw_dma_slave *tx = dws->dma_tx;
@@ -54,14 +54,14 @@ static int mid_spi_dma_init(struct dw_spi *dws)
/* 1. Init rx channel */
rx->dma_dev = &dma_dev->dev;
- dws->rxchan = dma_request_channel(mask, mid_spi_dma_chan_filter, rx);
+ dws->rxchan = dma_request_channel(mask, dw_spi_dma_chan_filter, rx);
if (!dws->rxchan)
goto err_exit;
dws->master->dma_rx = dws->rxchan;
/* 2. Init tx channel */
tx->dma_dev = &dma_dev->dev;
- dws->txchan = dma_request_channel(mask, mid_spi_dma_chan_filter, tx);
+ dws->txchan = dma_request_channel(mask, dw_spi_dma_chan_filter, tx);
if (!dws->txchan)
goto free_rxchan;
dws->master->dma_tx = dws->txchan;
@@ -75,7 +75,7 @@ static int mid_spi_dma_init(struct dw_spi *dws)
return -EBUSY;
}
-static void mid_spi_dma_exit(struct dw_spi *dws)
+static void dw_spi_dma_exit(struct dw_spi *dws)
{
if (!dws->dma_inited)
return;
@@ -103,7 +103,7 @@ static irqreturn_t dma_transfer(struct dw_spi *dws)
return IRQ_HANDLED;
}
-static bool mid_spi_can_dma(struct spi_controller *master,
+static bool dw_spi_can_dma(struct spi_controller *master,
struct spi_device *spi, struct spi_transfer *xfer)
{
struct dw_spi *dws = spi_controller_get_devdata(master);
@@ -215,7 +215,7 @@ static struct dma_async_tx_descriptor *dw_spi_dma_prepare_rx(struct dw_spi *dws,
return rxdesc;
}
-static int mid_spi_dma_setup(struct dw_spi *dws, struct spi_transfer *xfer)
+static int dw_spi_dma_setup(struct dw_spi *dws, struct spi_transfer *xfer)
{
u16 dma_ctrl = 0;
@@ -236,7 +236,7 @@ static int mid_spi_dma_setup(struct dw_spi *dws, struct spi_transfer *xfer)
return 0;
}
-static int mid_spi_dma_transfer(struct dw_spi *dws, struct spi_transfer *xfer)
+static int dw_spi_dma_transfer(struct dw_spi *dws, struct spi_transfer *xfer)
{
struct dma_async_tx_descriptor *txdesc, *rxdesc;
@@ -262,7 +262,7 @@ static int mid_spi_dma_transfer(struct dw_spi *dws, struct spi_transfer *xfer)
return 0;
}
-static void mid_spi_dma_stop(struct dw_spi *dws)
+static void dw_spi_dma_stop(struct dw_spi *dws)
{
if (test_bit(TX_BUSY, &dws->dma_chan_busy)) {
dmaengine_terminate_sync(dws->txchan);
@@ -274,19 +274,19 @@ static void mid_spi_dma_stop(struct dw_spi *dws)
}
}
-static const struct dw_spi_dma_ops mid_dma_ops = {
- .dma_init = mid_spi_dma_init,
- .dma_exit = mid_spi_dma_exit,
- .dma_setup = mid_spi_dma_setup,
- .can_dma = mid_spi_can_dma,
- .dma_transfer = mid_spi_dma_transfer,
- .dma_stop = mid_spi_dma_stop,
+static const struct dw_spi_dma_ops dw_spi_pci_dma_ops = {
+ .dma_init = dw_spi_pci_dma_init,
+ .dma_exit = dw_spi_dma_exit,
+ .dma_setup = dw_spi_dma_setup,
+ .can_dma = dw_spi_can_dma,
+ .dma_transfer = dw_spi_dma_transfer,
+ .dma_stop = dw_spi_dma_stop,
};
void dw_spi_pci_dma_setup(struct dw_spi *dws)
{
- dws->dma_tx = &mid_dma_tx;
- dws->dma_rx = &mid_dma_rx;
- dws->dma_ops = &mid_dma_ops;
+ dws->dma_tx = &pci_dma_tx;
+ dws->dma_rx = &pci_dma_rx;
+ dws->dma_ops = &dw_spi_pci_dma_ops;
}
EXPORT_SYMBOL_GPL(dw_spi_pci_dma_setup);