summaryrefslogtreecommitdiff
path: root/drivers/tty
diff options
context:
space:
mode:
authorFugang Duan <b38611@freescale.com>2015-02-02 15:35:03 +0800
committerNitin Garg <nitin.garg@freescale.com>2015-04-14 14:02:11 -0500
commit739d2f5c51e00df8030a171cb6eec5e0da9b9a4a (patch)
treecba7270233693edbaf51cdad1f15081cfc67b48e /drivers/tty
parent06998b7fa3a66f6cba599d372c72c790d3dda111 (diff)
MLK-10194 Revert "MA-6031 tty: serial: imx: reduce the transmit and receive latency"
The patch "MA-6031 tty: serial: imx: reduce the transmit and receive latency" cannot pass stress test, so revert it now. Signed-off-by: Fugang Duan <B38611@freescale.com>
Diffstat (limited to 'drivers/tty')
-rw-r--r--drivers/tty/serial/imx.c31
1 files changed, 21 insertions, 10 deletions
diff --git a/drivers/tty/serial/imx.c b/drivers/tty/serial/imx.c
index 97eab99a8f46..d557f1efb405 100644
--- a/drivers/tty/serial/imx.c
+++ b/drivers/tty/serial/imx.c
@@ -244,6 +244,8 @@ struct imx_port {
struct imx_dma_rxbuf rx_buf;
unsigned int tx_bytes;
unsigned int dma_tx_nents;
+ struct delayed_work tsk_dma_tx;
+ struct work_struct tsk_dma_rx;
wait_queue_head_t dma_wait;
unsigned int saved_reg[11];
#define DMA_TX_IS_WORKING 1
@@ -505,8 +507,6 @@ static inline void imx_transmit_buffer(struct imx_port *sport)
imx_stop_tx(&sport->port);
}
-static void imx_dma_tx(struct imx_port *sport);
-
static void dma_tx_callback(void *data)
{
struct imx_port *sport = data;
@@ -522,6 +522,7 @@ static void dma_tx_callback(void *data)
spin_lock_irqsave(&sport->port.lock, flags);
xmit->tail = (xmit->tail + sport->tx_bytes) & (UART_XMIT_SIZE - 1);
sport->port.icount.tx += sport->tx_bytes;
+ spin_unlock_irqrestore(&sport->port.lock, flags);
dev_dbg(sport->port.dev, "we finish the TX DMA.\n");
@@ -529,29 +530,31 @@ static void dma_tx_callback(void *data)
smp_mb__after_clear_bit();
uart_write_wakeup(&sport->port);
+ schedule_delayed_work(&sport->tsk_dma_tx, msecs_to_jiffies(1));
+
if (waitqueue_active(&sport->dma_wait)) {
wake_up(&sport->dma_wait);
dev_dbg(sport->port.dev, "exit in %s.\n", __func__);
return;
}
-
- if (!uart_circ_empty(xmit) && !uart_tx_stopped(&sport->port))
- imx_dma_tx(sport);
- spin_unlock_irqrestore(&sport->port.lock, flags);
}
-static void imx_dma_tx(struct imx_port *sport)
+static void dma_tx_work(struct work_struct *w)
{
+ struct delayed_work *delay_work = to_delayed_work(w);
+ struct imx_port *sport = container_of(delay_work, struct imx_port, tsk_dma_tx);
struct circ_buf *xmit = &sport->port.state->xmit;
struct scatterlist *sgl = sport->tx_sgl;
struct dma_async_tx_descriptor *desc;
struct dma_chan *chan = sport->dma_chan_tx;
struct device *dev = sport->port.dev;
+ unsigned long flags;
int ret;
if (test_and_set_bit(DMA_TX_IS_WORKING, &sport->flags))
return;
+ spin_lock_irqsave(&sport->port.lock, flags);
sport->tx_bytes = uart_circ_chars_pending(xmit);
if (sport->tx_bytes > 0) {
@@ -565,6 +568,7 @@ static void imx_dma_tx(struct imx_port *sport)
sport->dma_tx_nents = 1;
sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes);
}
+ spin_unlock_irqrestore(&sport->port.lock, flags);
ret = dma_map_sg(dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
if (ret == 0) {
@@ -588,6 +592,7 @@ static void imx_dma_tx(struct imx_port *sport)
dma_async_issue_pending(chan);
return;
}
+ spin_unlock_irqrestore(&sport->port.lock, flags);
err_out:
clear_bit(DMA_TX_IS_WORKING, &sport->flags);
smp_mb__after_clear_bit();
@@ -635,7 +640,7 @@ static void imx_start_tx(struct uart_port *port)
}
if (sport->dma_is_enabled) {
- imx_dma_tx(sport);
+ schedule_delayed_work(&sport->tsk_dma_tx, 0);
return;
}
@@ -909,8 +914,9 @@ static void dma_rx_push_data(struct imx_port *sport, struct tty_struct *tty,
}
}
-static void dma_rx_work(struct imx_port *sport)
+static void dma_rx_work(struct work_struct *w)
{
+ struct imx_port *sport = container_of(w, struct imx_port, tsk_dma_rx);
struct tty_struct *tty = sport->port.state->port.tty;
unsigned int cur_idx = sport->rx_buf.cur_idx;
@@ -971,7 +977,7 @@ static void dma_rx_callback(void *data)
dev_err(sport->port.dev, "overwrite!\n");
if (count)
- dma_rx_work(sport);
+ schedule_work(&sport->tsk_dma_rx);
}
static int start_rx_dma(struct imx_port *sport)
@@ -1210,6 +1216,11 @@ static int imx_startup(struct uart_port *port)
&& !sport->dma_is_inited)
imx_uart_dma_init(sport);
+ if (sport->dma_is_inited) {
+ INIT_DELAYED_WORK(&sport->tsk_dma_tx, dma_tx_work);
+ INIT_WORK(&sport->tsk_dma_rx, dma_rx_work);
+ }
+
spin_lock_irqsave(&sport->port.lock, flags);
/*
* Finally, clear and enable interrupts