Lines Matching refs:uap
278 static unsigned int pl011_reg_to_offset(const struct uart_amba_port *uap, in pl011_reg_to_offset() argument
281 return uap->reg_offset[reg]; in pl011_reg_to_offset()
284 static unsigned int pl011_read(const struct uart_amba_port *uap, in pl011_read() argument
287 void __iomem *addr = uap->port.membase + pl011_reg_to_offset(uap, reg); in pl011_read()
289 return (uap->port.iotype == UPIO_MEM32) ? in pl011_read()
293 static void pl011_write(unsigned int val, const struct uart_amba_port *uap, in pl011_write() argument
296 void __iomem *addr = uap->port.membase + pl011_reg_to_offset(uap, reg); in pl011_write()
298 if (uap->port.iotype == UPIO_MEM32) in pl011_write()
309 static int pl011_fifo_to_tty(struct uart_amba_port *uap) in pl011_fifo_to_tty() argument
316 status = pl011_read(uap, REG_FR); in pl011_fifo_to_tty()
321 ch = pl011_read(uap, REG_DR) | UART_DUMMY_DR_RX; in pl011_fifo_to_tty()
323 uap->port.icount.rx++; in pl011_fifo_to_tty()
328 uap->port.icount.brk++; in pl011_fifo_to_tty()
329 if (uart_handle_break(&uap->port)) in pl011_fifo_to_tty()
332 uap->port.icount.parity++; in pl011_fifo_to_tty()
334 uap->port.icount.frame++; in pl011_fifo_to_tty()
336 uap->port.icount.overrun++; in pl011_fifo_to_tty()
338 ch &= uap->port.read_status_mask; in pl011_fifo_to_tty()
348 spin_unlock(&uap->port.lock); in pl011_fifo_to_tty()
349 sysrq = uart_handle_sysrq_char(&uap->port, ch & 255); in pl011_fifo_to_tty()
350 spin_lock(&uap->port.lock); in pl011_fifo_to_tty()
353 uart_insert_char(&uap->port, ch, UART011_DR_OE, ch, flag); in pl011_fifo_to_tty()
398 static void pl011_dma_probe(struct uart_amba_port *uap) in pl011_dma_probe() argument
401 struct amba_pl011_data *plat = dev_get_platdata(uap->port.dev); in pl011_dma_probe()
402 struct device *dev = uap->port.dev; in pl011_dma_probe()
404 .dst_addr = uap->port.mapbase + in pl011_dma_probe()
405 pl011_reg_to_offset(uap, REG_DR), in pl011_dma_probe()
408 .dst_maxburst = uap->fifosize >> 1, in pl011_dma_probe()
414 uap->dma_probed = true; in pl011_dma_probe()
418 uap->dma_probed = false; in pl011_dma_probe()
424 dev_info(uap->port.dev, "no DMA platform data\n"); in pl011_dma_probe()
435 dev_err(uap->port.dev, "no TX DMA channel!\n"); in pl011_dma_probe()
441 uap->dmatx.chan = chan; in pl011_dma_probe()
443 dev_info(uap->port.dev, "DMA channel TX %s\n", in pl011_dma_probe()
444 dma_chan_name(uap->dmatx.chan)); in pl011_dma_probe()
453 dev_err(uap->port.dev, "no RX DMA channel!\n"); in pl011_dma_probe()
460 .src_addr = uap->port.mapbase + in pl011_dma_probe()
461 pl011_reg_to_offset(uap, REG_DR), in pl011_dma_probe()
464 .src_maxburst = uap->fifosize >> 2, in pl011_dma_probe()
478 dev_info(uap->port.dev, in pl011_dma_probe()
484 uap->dmarx.chan = chan; in pl011_dma_probe()
486 uap->dmarx.auto_poll_rate = false; in pl011_dma_probe()
490 uap->dmarx.auto_poll_rate = false; in pl011_dma_probe()
491 uap->dmarx.poll_rate = plat->dma_rx_poll_rate; in pl011_dma_probe()
498 uap->dmarx.auto_poll_rate = true; in pl011_dma_probe()
499 uap->dmarx.poll_rate = 100; in pl011_dma_probe()
503 uap->dmarx.poll_timeout = in pl011_dma_probe()
506 uap->dmarx.poll_timeout = 3000; in pl011_dma_probe()
508 uap->dmarx.auto_poll_rate = of_property_read_bool( in pl011_dma_probe()
510 if (uap->dmarx.auto_poll_rate) { in pl011_dma_probe()
515 uap->dmarx.poll_rate = x; in pl011_dma_probe()
517 uap->dmarx.poll_rate = 100; in pl011_dma_probe()
520 uap->dmarx.poll_timeout = x; in pl011_dma_probe()
522 uap->dmarx.poll_timeout = 3000; in pl011_dma_probe()
525 dev_info(uap->port.dev, "DMA channel RX %s\n", in pl011_dma_probe()
526 dma_chan_name(uap->dmarx.chan)); in pl011_dma_probe()
530 static void pl011_dma_remove(struct uart_amba_port *uap) in pl011_dma_remove() argument
532 if (uap->dmatx.chan) in pl011_dma_remove()
533 dma_release_channel(uap->dmatx.chan); in pl011_dma_remove()
534 if (uap->dmarx.chan) in pl011_dma_remove()
535 dma_release_channel(uap->dmarx.chan); in pl011_dma_remove()
539 static int pl011_dma_tx_refill(struct uart_amba_port *uap);
540 static void pl011_start_tx_pio(struct uart_amba_port *uap);
548 struct uart_amba_port *uap = data; in pl011_dma_tx_callback() local
549 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_callback()
553 spin_lock_irqsave(&uap->port.lock, flags); in pl011_dma_tx_callback()
554 if (uap->dmatx.queued) in pl011_dma_tx_callback()
558 dmacr = uap->dmacr; in pl011_dma_tx_callback()
559 uap->dmacr = dmacr & ~UART011_TXDMAE; in pl011_dma_tx_callback()
560 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_callback()
571 if (!(dmacr & UART011_TXDMAE) || uart_tx_stopped(&uap->port) || in pl011_dma_tx_callback()
572 uart_circ_empty(&uap->port.state->xmit)) { in pl011_dma_tx_callback()
573 uap->dmatx.queued = false; in pl011_dma_tx_callback()
574 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_tx_callback()
578 if (pl011_dma_tx_refill(uap) <= 0) in pl011_dma_tx_callback()
583 pl011_start_tx_pio(uap); in pl011_dma_tx_callback()
585 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_tx_callback()
596 static int pl011_dma_tx_refill(struct uart_amba_port *uap) in pl011_dma_tx_refill() argument
598 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_refill()
602 struct circ_buf *xmit = &uap->port.state->xmit; in pl011_dma_tx_refill()
612 if (count < (uap->fifosize >> 1)) { in pl011_dma_tx_refill()
613 uap->dmatx.queued = false; in pl011_dma_tx_refill()
645 uap->dmatx.queued = false; in pl011_dma_tx_refill()
646 dev_dbg(uap->port.dev, "unable to map TX DMA\n"); in pl011_dma_tx_refill()
654 uap->dmatx.queued = false; in pl011_dma_tx_refill()
659 dev_dbg(uap->port.dev, "TX DMA busy\n"); in pl011_dma_tx_refill()
665 desc->callback_param = uap; in pl011_dma_tx_refill()
673 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_refill()
674 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_refill()
675 uap->dmatx.queued = true; in pl011_dma_tx_refill()
682 uap->port.icount.tx += count; in pl011_dma_tx_refill()
685 uart_write_wakeup(&uap->port); in pl011_dma_tx_refill()
698 static bool pl011_dma_tx_irq(struct uart_amba_port *uap) in pl011_dma_tx_irq() argument
700 if (!uap->using_tx_dma) in pl011_dma_tx_irq()
708 if (uap->dmatx.queued) { in pl011_dma_tx_irq()
709 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_irq()
710 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_irq()
711 uap->im &= ~UART011_TXIM; in pl011_dma_tx_irq()
712 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_tx_irq()
720 if (pl011_dma_tx_refill(uap) > 0) { in pl011_dma_tx_irq()
721 uap->im &= ~UART011_TXIM; in pl011_dma_tx_irq()
722 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_tx_irq()
732 static inline void pl011_dma_tx_stop(struct uart_amba_port *uap) in pl011_dma_tx_stop() argument
734 if (uap->dmatx.queued) { in pl011_dma_tx_stop()
735 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_tx_stop()
736 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_stop()
748 static inline bool pl011_dma_tx_start(struct uart_amba_port *uap) in pl011_dma_tx_start() argument
752 if (!uap->using_tx_dma) in pl011_dma_tx_start()
755 if (!uap->port.x_char) { in pl011_dma_tx_start()
759 if (!uap->dmatx.queued) { in pl011_dma_tx_start()
760 if (pl011_dma_tx_refill(uap) > 0) { in pl011_dma_tx_start()
761 uap->im &= ~UART011_TXIM; in pl011_dma_tx_start()
762 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_tx_start()
765 } else if (!(uap->dmacr & UART011_TXDMAE)) { in pl011_dma_tx_start()
766 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_start()
767 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_start()
776 dmacr = uap->dmacr; in pl011_dma_tx_start()
777 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_tx_start()
778 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_start()
780 if (pl011_read(uap, REG_FR) & UART01x_FR_TXFF) { in pl011_dma_tx_start()
789 pl011_write(uap->port.x_char, uap, REG_DR); in pl011_dma_tx_start()
790 uap->port.icount.tx++; in pl011_dma_tx_start()
791 uap->port.x_char = 0; in pl011_dma_tx_start()
794 uap->dmacr = dmacr; in pl011_dma_tx_start()
795 pl011_write(dmacr, uap, REG_DMACR); in pl011_dma_tx_start()
805 __releases(&uap->port.lock) in pl011_dma_flush_buffer()
806 __acquires(&uap->port.lock) in pl011_dma_flush_buffer()
808 struct uart_amba_port *uap = in pl011_dma_flush_buffer() local
811 if (!uap->using_tx_dma) in pl011_dma_flush_buffer()
814 dmaengine_terminate_async(uap->dmatx.chan); in pl011_dma_flush_buffer()
816 if (uap->dmatx.queued) { in pl011_dma_flush_buffer()
817 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_flush_buffer()
819 uap->dmatx.queued = false; in pl011_dma_flush_buffer()
820 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_flush_buffer()
821 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_flush_buffer()
827 static int pl011_dma_rx_trigger_dma(struct uart_amba_port *uap) in pl011_dma_rx_trigger_dma() argument
829 struct dma_chan *rxchan = uap->dmarx.chan; in pl011_dma_rx_trigger_dma()
830 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_trigger_dma()
838 sgbuf = uap->dmarx.use_buf_b ? in pl011_dma_rx_trigger_dma()
839 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_trigger_dma()
849 uap->dmarx.running = false; in pl011_dma_rx_trigger_dma()
856 desc->callback_param = uap; in pl011_dma_rx_trigger_dma()
860 uap->dmacr |= UART011_RXDMAE; in pl011_dma_rx_trigger_dma()
861 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_rx_trigger_dma()
862 uap->dmarx.running = true; in pl011_dma_rx_trigger_dma()
864 uap->im &= ~UART011_RXIM; in pl011_dma_rx_trigger_dma()
865 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_trigger_dma()
875 static void pl011_dma_rx_chars(struct uart_amba_port *uap, in pl011_dma_rx_chars() argument
879 struct tty_port *port = &uap->port.state->port; in pl011_dma_rx_chars()
881 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_chars()
885 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_chars()
888 if (uap->dmarx.poll_rate) { in pl011_dma_rx_chars()
907 uap->port.icount.rx += dma_count; in pl011_dma_rx_chars()
909 dev_warn(uap->port.dev, in pl011_dma_rx_chars()
914 if (uap->dmarx.poll_rate) in pl011_dma_rx_chars()
924 UART011_FEIS, uap, REG_ICR); in pl011_dma_rx_chars()
937 fifotaken = pl011_fifo_to_tty(uap); in pl011_dma_rx_chars()
940 spin_unlock(&uap->port.lock); in pl011_dma_rx_chars()
941 dev_vdbg(uap->port.dev, in pl011_dma_rx_chars()
945 spin_lock(&uap->port.lock); in pl011_dma_rx_chars()
948 static void pl011_dma_rx_irq(struct uart_amba_port *uap) in pl011_dma_rx_irq() argument
950 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_irq()
964 dev_err(uap->port.dev, "unable to pause DMA transfer\n"); in pl011_dma_rx_irq()
968 dev_err(uap->port.dev, "unable to pause DMA transfer\n"); in pl011_dma_rx_irq()
971 uap->dmacr &= ~UART011_RXDMAE; in pl011_dma_rx_irq()
972 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_rx_irq()
973 uap->dmarx.running = false; in pl011_dma_rx_irq()
984 pl011_dma_rx_chars(uap, pending, dmarx->use_buf_b, true); in pl011_dma_rx_irq()
988 if (pl011_dma_rx_trigger_dma(uap)) { in pl011_dma_rx_irq()
989 dev_dbg(uap->port.dev, "could not retrigger RX DMA job " in pl011_dma_rx_irq()
991 uap->im |= UART011_RXIM; in pl011_dma_rx_irq()
992 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_irq()
998 struct uart_amba_port *uap = data; in pl011_dma_rx_callback() local
999 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_callback()
1015 spin_lock_irq(&uap->port.lock); in pl011_dma_rx_callback()
1026 uap->dmarx.running = false; in pl011_dma_rx_callback()
1028 ret = pl011_dma_rx_trigger_dma(uap); in pl011_dma_rx_callback()
1030 pl011_dma_rx_chars(uap, pending, lastbuf, false); in pl011_dma_rx_callback()
1031 spin_unlock_irq(&uap->port.lock); in pl011_dma_rx_callback()
1037 dev_dbg(uap->port.dev, "could not retrigger RX DMA job " in pl011_dma_rx_callback()
1039 uap->im |= UART011_RXIM; in pl011_dma_rx_callback()
1040 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_callback()
1049 static inline void pl011_dma_rx_stop(struct uart_amba_port *uap) in pl011_dma_rx_stop() argument
1052 uap->dmacr &= ~UART011_RXDMAE; in pl011_dma_rx_stop()
1053 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_rx_stop()
1063 struct uart_amba_port *uap = from_timer(uap, t, dmarx.timer); in pl011_dma_rx_poll() local
1064 struct tty_port *port = &uap->port.state->port; in pl011_dma_rx_poll()
1065 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_poll()
1066 struct dma_chan *rxchan = uap->dmarx.chan; in pl011_dma_rx_poll()
1074 sgbuf = dmarx->use_buf_b ? &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_poll()
1092 > uap->dmarx.poll_timeout) { in pl011_dma_rx_poll()
1094 spin_lock_irqsave(&uap->port.lock, flags); in pl011_dma_rx_poll()
1095 pl011_dma_rx_stop(uap); in pl011_dma_rx_poll()
1096 uap->im |= UART011_RXIM; in pl011_dma_rx_poll()
1097 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_poll()
1098 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_rx_poll()
1100 uap->dmarx.running = false; in pl011_dma_rx_poll()
1102 del_timer(&uap->dmarx.timer); in pl011_dma_rx_poll()
1104 mod_timer(&uap->dmarx.timer, in pl011_dma_rx_poll()
1105 jiffies + msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_dma_rx_poll()
1109 static void pl011_dma_startup(struct uart_amba_port *uap) in pl011_dma_startup() argument
1113 if (!uap->dma_probed) in pl011_dma_startup()
1114 pl011_dma_probe(uap); in pl011_dma_startup()
1116 if (!uap->dmatx.chan) in pl011_dma_startup()
1119 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA); in pl011_dma_startup()
1120 if (!uap->dmatx.buf) { in pl011_dma_startup()
1121 dev_err(uap->port.dev, "no memory for DMA TX buffer\n"); in pl011_dma_startup()
1122 uap->port.fifosize = uap->fifosize; in pl011_dma_startup()
1126 sg_init_one(&uap->dmatx.sg, uap->dmatx.buf, PL011_DMA_BUFFER_SIZE); in pl011_dma_startup()
1129 uap->port.fifosize = PL011_DMA_BUFFER_SIZE; in pl011_dma_startup()
1130 uap->using_tx_dma = true; in pl011_dma_startup()
1132 if (!uap->dmarx.chan) in pl011_dma_startup()
1136 ret = pl011_sgbuf_init(uap->dmarx.chan, &uap->dmarx.sgbuf_a, in pl011_dma_startup()
1139 dev_err(uap->port.dev, "failed to init DMA %s: %d\n", in pl011_dma_startup()
1144 ret = pl011_sgbuf_init(uap->dmarx.chan, &uap->dmarx.sgbuf_b, in pl011_dma_startup()
1147 dev_err(uap->port.dev, "failed to init DMA %s: %d\n", in pl011_dma_startup()
1149 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_a, in pl011_dma_startup()
1154 uap->using_rx_dma = true; in pl011_dma_startup()
1158 uap->dmacr |= UART011_DMAONERR; in pl011_dma_startup()
1159 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_startup()
1166 if (uap->vendor->dma_threshold) in pl011_dma_startup()
1168 uap, REG_ST_DMAWM); in pl011_dma_startup()
1170 if (uap->using_rx_dma) { in pl011_dma_startup()
1171 if (pl011_dma_rx_trigger_dma(uap)) in pl011_dma_startup()
1172 dev_dbg(uap->port.dev, "could not trigger initial " in pl011_dma_startup()
1174 if (uap->dmarx.poll_rate) { in pl011_dma_startup()
1175 timer_setup(&uap->dmarx.timer, pl011_dma_rx_poll, 0); in pl011_dma_startup()
1176 mod_timer(&uap->dmarx.timer, in pl011_dma_startup()
1178 msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_dma_startup()
1179 uap->dmarx.last_residue = PL011_DMA_BUFFER_SIZE; in pl011_dma_startup()
1180 uap->dmarx.last_jiffies = jiffies; in pl011_dma_startup()
1185 static void pl011_dma_shutdown(struct uart_amba_port *uap) in pl011_dma_shutdown() argument
1187 if (!(uap->using_tx_dma || uap->using_rx_dma)) in pl011_dma_shutdown()
1191 while (pl011_read(uap, REG_FR) & uap->vendor->fr_busy) in pl011_dma_shutdown()
1194 spin_lock_irq(&uap->port.lock); in pl011_dma_shutdown()
1195 uap->dmacr &= ~(UART011_DMAONERR | UART011_RXDMAE | UART011_TXDMAE); in pl011_dma_shutdown()
1196 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_shutdown()
1197 spin_unlock_irq(&uap->port.lock); in pl011_dma_shutdown()
1199 if (uap->using_tx_dma) { in pl011_dma_shutdown()
1201 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_shutdown()
1202 if (uap->dmatx.queued) { in pl011_dma_shutdown()
1203 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_shutdown()
1205 uap->dmatx.queued = false; in pl011_dma_shutdown()
1208 kfree(uap->dmatx.buf); in pl011_dma_shutdown()
1209 uap->using_tx_dma = false; in pl011_dma_shutdown()
1212 if (uap->using_rx_dma) { in pl011_dma_shutdown()
1213 dmaengine_terminate_all(uap->dmarx.chan); in pl011_dma_shutdown()
1215 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_a, DMA_FROM_DEVICE); in pl011_dma_shutdown()
1216 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_b, DMA_FROM_DEVICE); in pl011_dma_shutdown()
1217 if (uap->dmarx.poll_rate) in pl011_dma_shutdown()
1218 del_timer_sync(&uap->dmarx.timer); in pl011_dma_shutdown()
1219 uap->using_rx_dma = false; in pl011_dma_shutdown()
1223 static inline bool pl011_dma_rx_available(struct uart_amba_port *uap) in pl011_dma_rx_available() argument
1225 return uap->using_rx_dma; in pl011_dma_rx_available()
1228 static inline bool pl011_dma_rx_running(struct uart_amba_port *uap) in pl011_dma_rx_running() argument
1230 return uap->using_rx_dma && uap->dmarx.running; in pl011_dma_rx_running()
1235 static inline void pl011_dma_remove(struct uart_amba_port *uap) in pl011_dma_remove() argument
1239 static inline void pl011_dma_startup(struct uart_amba_port *uap) in pl011_dma_startup() argument
1243 static inline void pl011_dma_shutdown(struct uart_amba_port *uap) in pl011_dma_shutdown() argument
1247 static inline bool pl011_dma_tx_irq(struct uart_amba_port *uap) in pl011_dma_tx_irq() argument
1252 static inline void pl011_dma_tx_stop(struct uart_amba_port *uap) in pl011_dma_tx_stop() argument
1256 static inline bool pl011_dma_tx_start(struct uart_amba_port *uap) in pl011_dma_tx_start() argument
1261 static inline void pl011_dma_rx_irq(struct uart_amba_port *uap) in pl011_dma_rx_irq() argument
1265 static inline void pl011_dma_rx_stop(struct uart_amba_port *uap) in pl011_dma_rx_stop() argument
1269 static inline int pl011_dma_rx_trigger_dma(struct uart_amba_port *uap) in pl011_dma_rx_trigger_dma() argument
1274 static inline bool pl011_dma_rx_available(struct uart_amba_port *uap) in pl011_dma_rx_available() argument
1279 static inline bool pl011_dma_rx_running(struct uart_amba_port *uap) in pl011_dma_rx_running() argument
1289 struct uart_amba_port *uap = in pl011_stop_tx() local
1292 uap->im &= ~UART011_TXIM; in pl011_stop_tx()
1293 pl011_write(uap->im, uap, REG_IMSC); in pl011_stop_tx()
1294 pl011_dma_tx_stop(uap); in pl011_stop_tx()
1297 static bool pl011_tx_chars(struct uart_amba_port *uap, bool from_irq);
1300 static void pl011_start_tx_pio(struct uart_amba_port *uap) in pl011_start_tx_pio() argument
1302 if (pl011_tx_chars(uap, false)) { in pl011_start_tx_pio()
1303 uap->im |= UART011_TXIM; in pl011_start_tx_pio()
1304 pl011_write(uap->im, uap, REG_IMSC); in pl011_start_tx_pio()
1310 struct uart_amba_port *uap = in pl011_start_tx() local
1313 if (!pl011_dma_tx_start(uap)) in pl011_start_tx()
1314 pl011_start_tx_pio(uap); in pl011_start_tx()
1319 struct uart_amba_port *uap = in pl011_stop_rx() local
1322 uap->im &= ~(UART011_RXIM|UART011_RTIM|UART011_FEIM| in pl011_stop_rx()
1324 pl011_write(uap->im, uap, REG_IMSC); in pl011_stop_rx()
1326 pl011_dma_rx_stop(uap); in pl011_stop_rx()
1340 struct uart_amba_port *uap = in pl011_enable_ms() local
1343 uap->im |= UART011_RIMIM|UART011_CTSMIM|UART011_DCDMIM|UART011_DSRMIM; in pl011_enable_ms()
1344 pl011_write(uap->im, uap, REG_IMSC); in pl011_enable_ms()
1347 static void pl011_rx_chars(struct uart_amba_port *uap) in pl011_rx_chars() argument
1348 __releases(&uap->port.lock) in pl011_rx_chars()
1349 __acquires(&uap->port.lock) in pl011_rx_chars()
1351 pl011_fifo_to_tty(uap); in pl011_rx_chars()
1353 spin_unlock(&uap->port.lock); in pl011_rx_chars()
1354 tty_flip_buffer_push(&uap->port.state->port); in pl011_rx_chars()
1359 if (pl011_dma_rx_available(uap)) { in pl011_rx_chars()
1360 if (pl011_dma_rx_trigger_dma(uap)) { in pl011_rx_chars()
1361 dev_dbg(uap->port.dev, "could not trigger RX DMA job " in pl011_rx_chars()
1363 uap->im |= UART011_RXIM; in pl011_rx_chars()
1364 pl011_write(uap->im, uap, REG_IMSC); in pl011_rx_chars()
1368 if (uap->dmarx.poll_rate) { in pl011_rx_chars()
1369 uap->dmarx.last_jiffies = jiffies; in pl011_rx_chars()
1370 uap->dmarx.last_residue = PL011_DMA_BUFFER_SIZE; in pl011_rx_chars()
1371 mod_timer(&uap->dmarx.timer, in pl011_rx_chars()
1373 msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_rx_chars()
1378 spin_lock(&uap->port.lock); in pl011_rx_chars()
1381 static bool pl011_tx_char(struct uart_amba_port *uap, unsigned char c, in pl011_tx_char() argument
1385 pl011_read(uap, REG_FR) & UART01x_FR_TXFF) in pl011_tx_char()
1388 pl011_write(c, uap, REG_DR); in pl011_tx_char()
1389 uap->port.icount.tx++; in pl011_tx_char()
1395 static bool pl011_tx_chars(struct uart_amba_port *uap, bool from_irq) in pl011_tx_chars() argument
1397 struct circ_buf *xmit = &uap->port.state->xmit; in pl011_tx_chars()
1398 int count = uap->fifosize >> 1; in pl011_tx_chars()
1400 if (uap->port.x_char) { in pl011_tx_chars()
1401 if (!pl011_tx_char(uap, uap->port.x_char, from_irq)) in pl011_tx_chars()
1403 uap->port.x_char = 0; in pl011_tx_chars()
1406 if (uart_circ_empty(xmit) || uart_tx_stopped(&uap->port)) { in pl011_tx_chars()
1407 pl011_stop_tx(&uap->port); in pl011_tx_chars()
1412 if (pl011_dma_tx_irq(uap)) in pl011_tx_chars()
1419 if (!pl011_tx_char(uap, xmit->buf[xmit->tail], from_irq)) in pl011_tx_chars()
1426 uart_write_wakeup(&uap->port); in pl011_tx_chars()
1429 pl011_stop_tx(&uap->port); in pl011_tx_chars()
1435 static void pl011_modem_status(struct uart_amba_port *uap) in pl011_modem_status() argument
1439 status = pl011_read(uap, REG_FR) & UART01x_FR_MODEM_ANY; in pl011_modem_status()
1441 delta = status ^ uap->old_status; in pl011_modem_status()
1442 uap->old_status = status; in pl011_modem_status()
1448 uart_handle_dcd_change(&uap->port, status & UART01x_FR_DCD); in pl011_modem_status()
1450 if (delta & uap->vendor->fr_dsr) in pl011_modem_status()
1451 uap->port.icount.dsr++; in pl011_modem_status()
1453 if (delta & uap->vendor->fr_cts) in pl011_modem_status()
1454 uart_handle_cts_change(&uap->port, in pl011_modem_status()
1455 status & uap->vendor->fr_cts); in pl011_modem_status()
1457 wake_up_interruptible(&uap->port.state->port.delta_msr_wait); in pl011_modem_status()
1460 static void check_apply_cts_event_workaround(struct uart_amba_port *uap) in check_apply_cts_event_workaround() argument
1462 if (!uap->vendor->cts_event_workaround) in check_apply_cts_event_workaround()
1466 pl011_write(0x00, uap, REG_ICR); in check_apply_cts_event_workaround()
1473 pl011_read(uap, REG_ICR); in check_apply_cts_event_workaround()
1474 pl011_read(uap, REG_ICR); in check_apply_cts_event_workaround()
1479 struct uart_amba_port *uap = dev_id; in pl011_int() local
1484 spin_lock_irqsave(&uap->port.lock, flags); in pl011_int()
1485 status = pl011_read(uap, REG_RIS) & uap->im; in pl011_int()
1488 check_apply_cts_event_workaround(uap); in pl011_int()
1492 uap, REG_ICR); in pl011_int()
1495 if (pl011_dma_rx_running(uap)) in pl011_int()
1496 pl011_dma_rx_irq(uap); in pl011_int()
1498 pl011_rx_chars(uap); in pl011_int()
1502 pl011_modem_status(uap); in pl011_int()
1504 pl011_tx_chars(uap, true); in pl011_int()
1509 status = pl011_read(uap, REG_RIS) & uap->im; in pl011_int()
1514 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_int()
1521 struct uart_amba_port *uap = in pl011_tx_empty() local
1525 unsigned int status = pl011_read(uap, REG_FR) ^ uap->vendor->inv_fr; in pl011_tx_empty()
1527 return status & (uap->vendor->fr_busy | UART01x_FR_TXFF) ? in pl011_tx_empty()
1533 struct uart_amba_port *uap = in pl011_get_mctrl() local
1536 unsigned int status = pl011_read(uap, REG_FR); in pl011_get_mctrl()
1543 TIOCMBIT(uap->vendor->fr_dsr, TIOCM_DSR); in pl011_get_mctrl()
1544 TIOCMBIT(uap->vendor->fr_cts, TIOCM_CTS); in pl011_get_mctrl()
1545 TIOCMBIT(uap->vendor->fr_ri, TIOCM_RNG); in pl011_get_mctrl()
1552 struct uart_amba_port *uap = in pl011_set_mctrl() local
1556 cr = pl011_read(uap, REG_CR); in pl011_set_mctrl()
1576 pl011_write(cr, uap, REG_CR); in pl011_set_mctrl()
1581 struct uart_amba_port *uap = in pl011_break_ctl() local
1586 spin_lock_irqsave(&uap->port.lock, flags); in pl011_break_ctl()
1587 lcr_h = pl011_read(uap, REG_LCRH_TX); in pl011_break_ctl()
1592 pl011_write(lcr_h, uap, REG_LCRH_TX); in pl011_break_ctl()
1593 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_break_ctl()
1600 struct uart_amba_port *uap = in pl011_quiesce_irqs() local
1603 pl011_write(pl011_read(uap, REG_MIS), uap, REG_ICR); in pl011_quiesce_irqs()
1617 pl011_write(pl011_read(uap, REG_IMSC) & ~UART011_TXIM, uap, in pl011_quiesce_irqs()
1623 struct uart_amba_port *uap = in pl011_get_poll_char() local
1633 status = pl011_read(uap, REG_FR); in pl011_get_poll_char()
1637 return pl011_read(uap, REG_DR); in pl011_get_poll_char()
1643 struct uart_amba_port *uap = in pl011_put_poll_char() local
1646 while (pl011_read(uap, REG_FR) & UART01x_FR_TXFF) in pl011_put_poll_char()
1649 pl011_write(ch, uap, REG_DR); in pl011_put_poll_char()
1656 struct uart_amba_port *uap = in pl011_hwinit() local
1666 retval = clk_prepare_enable(uap->clk); in pl011_hwinit()
1670 uap->port.uartclk = clk_get_rate(uap->clk); in pl011_hwinit()
1675 uap, REG_ICR); in pl011_hwinit()
1681 uap->im = pl011_read(uap, REG_IMSC); in pl011_hwinit()
1682 pl011_write(UART011_RTIM | UART011_RXIM, uap, REG_IMSC); in pl011_hwinit()
1684 if (dev_get_platdata(uap->port.dev)) { in pl011_hwinit()
1687 plat = dev_get_platdata(uap->port.dev); in pl011_hwinit()
1694 static bool pl011_split_lcrh(const struct uart_amba_port *uap) in pl011_split_lcrh() argument
1696 return pl011_reg_to_offset(uap, REG_LCRH_RX) != in pl011_split_lcrh()
1697 pl011_reg_to_offset(uap, REG_LCRH_TX); in pl011_split_lcrh()
1700 static void pl011_write_lcr_h(struct uart_amba_port *uap, unsigned int lcr_h) in pl011_write_lcr_h() argument
1702 pl011_write(lcr_h, uap, REG_LCRH_RX); in pl011_write_lcr_h()
1703 if (pl011_split_lcrh(uap)) { in pl011_write_lcr_h()
1710 pl011_write(0xff, uap, REG_MIS); in pl011_write_lcr_h()
1711 pl011_write(lcr_h, uap, REG_LCRH_TX); in pl011_write_lcr_h()
1715 static int pl011_allocate_irq(struct uart_amba_port *uap) in pl011_allocate_irq() argument
1717 pl011_write(uap->im, uap, REG_IMSC); in pl011_allocate_irq()
1719 return request_irq(uap->port.irq, pl011_int, IRQF_SHARED, "uart-pl011", uap); in pl011_allocate_irq()
1727 static void pl011_enable_interrupts(struct uart_amba_port *uap) in pl011_enable_interrupts() argument
1732 spin_lock_irqsave(&uap->port.lock, flags); in pl011_enable_interrupts()
1735 pl011_write(UART011_RTIS | UART011_RXIS, uap, REG_ICR); in pl011_enable_interrupts()
1743 for (i = 0; i < uap->fifosize * 2; ++i) { in pl011_enable_interrupts()
1744 if (pl011_read(uap, REG_FR) & UART01x_FR_RXFE) in pl011_enable_interrupts()
1747 pl011_read(uap, REG_DR); in pl011_enable_interrupts()
1750 uap->im = UART011_RTIM; in pl011_enable_interrupts()
1751 if (!pl011_dma_rx_running(uap)) in pl011_enable_interrupts()
1752 uap->im |= UART011_RXIM; in pl011_enable_interrupts()
1753 pl011_write(uap->im, uap, REG_IMSC); in pl011_enable_interrupts()
1754 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_enable_interrupts()
1759 struct uart_amba_port *uap = container_of(port, struct uart_amba_port, port); in pl011_unthrottle_rx() local
1761 pl011_enable_interrupts(uap); in pl011_unthrottle_rx()
1766 struct uart_amba_port *uap = in pl011_startup() local
1775 retval = pl011_allocate_irq(uap); in pl011_startup()
1779 pl011_write(uap->vendor->ifls, uap, REG_IFLS); in pl011_startup()
1781 spin_lock_irq(&uap->port.lock); in pl011_startup()
1784 cr = uap->old_cr & (UART011_CR_RTS | UART011_CR_DTR); in pl011_startup()
1786 pl011_write(cr, uap, REG_CR); in pl011_startup()
1788 spin_unlock_irq(&uap->port.lock); in pl011_startup()
1793 uap->old_status = pl011_read(uap, REG_FR) & UART01x_FR_MODEM_ANY; in pl011_startup()
1796 pl011_dma_startup(uap); in pl011_startup()
1798 pl011_enable_interrupts(uap); in pl011_startup()
1803 clk_disable_unprepare(uap->clk); in pl011_startup()
1809 struct uart_amba_port *uap = in sbsa_uart_startup() local
1817 retval = pl011_allocate_irq(uap); in sbsa_uart_startup()
1822 uap->old_status = 0; in sbsa_uart_startup()
1824 pl011_enable_interrupts(uap); in sbsa_uart_startup()
1829 static void pl011_shutdown_channel(struct uart_amba_port *uap, in pl011_shutdown_channel() argument
1834 val = pl011_read(uap, lcrh); in pl011_shutdown_channel()
1836 pl011_write(val, uap, lcrh); in pl011_shutdown_channel()
1844 static void pl011_disable_uart(struct uart_amba_port *uap) in pl011_disable_uart() argument
1848 uap->port.status &= ~(UPSTAT_AUTOCTS | UPSTAT_AUTORTS); in pl011_disable_uart()
1849 spin_lock_irq(&uap->port.lock); in pl011_disable_uart()
1850 cr = pl011_read(uap, REG_CR); in pl011_disable_uart()
1851 uap->old_cr = cr; in pl011_disable_uart()
1854 pl011_write(cr, uap, REG_CR); in pl011_disable_uart()
1855 spin_unlock_irq(&uap->port.lock); in pl011_disable_uart()
1860 pl011_shutdown_channel(uap, REG_LCRH_RX); in pl011_disable_uart()
1861 if (pl011_split_lcrh(uap)) in pl011_disable_uart()
1862 pl011_shutdown_channel(uap, REG_LCRH_TX); in pl011_disable_uart()
1865 static void pl011_disable_interrupts(struct uart_amba_port *uap) in pl011_disable_interrupts() argument
1867 spin_lock_irq(&uap->port.lock); in pl011_disable_interrupts()
1870 uap->im = 0; in pl011_disable_interrupts()
1871 pl011_write(uap->im, uap, REG_IMSC); in pl011_disable_interrupts()
1872 pl011_write(0xffff, uap, REG_ICR); in pl011_disable_interrupts()
1874 spin_unlock_irq(&uap->port.lock); in pl011_disable_interrupts()
1879 struct uart_amba_port *uap = in pl011_shutdown() local
1882 pl011_disable_interrupts(uap); in pl011_shutdown()
1884 pl011_dma_shutdown(uap); in pl011_shutdown()
1886 free_irq(uap->port.irq, uap); in pl011_shutdown()
1888 pl011_disable_uart(uap); in pl011_shutdown()
1893 clk_disable_unprepare(uap->clk); in pl011_shutdown()
1897 if (dev_get_platdata(uap->port.dev)) { in pl011_shutdown()
1900 plat = dev_get_platdata(uap->port.dev); in pl011_shutdown()
1905 if (uap->port.ops->flush_buffer) in pl011_shutdown()
1906 uap->port.ops->flush_buffer(port); in pl011_shutdown()
1911 struct uart_amba_port *uap = in sbsa_uart_shutdown() local
1914 pl011_disable_interrupts(uap); in sbsa_uart_shutdown()
1916 free_irq(uap->port.irq, uap); in sbsa_uart_shutdown()
1918 if (uap->port.ops->flush_buffer) in sbsa_uart_shutdown()
1919 uap->port.ops->flush_buffer(port); in sbsa_uart_shutdown()
1958 struct uart_amba_port *uap = in pl011_set_termios() local
1964 if (uap->vendor->oversampling) in pl011_set_termios()
1978 if (uap->dmarx.auto_poll_rate) in pl011_set_termios()
1979 uap->dmarx.poll_rate = DIV_ROUND_UP(10000000, baud); in pl011_set_termios()
2010 if (uap->fifosize > 1) in pl011_set_termios()
2026 old_cr = pl011_read(uap, REG_CR); in pl011_set_termios()
2027 pl011_write(0, uap, REG_CR); in pl011_set_termios()
2040 if (uap->vendor->oversampling) { in pl011_set_termios()
2053 if (uap->vendor->oversampling) { in pl011_set_termios()
2060 pl011_write(quot & 0x3f, uap, REG_FBRD); in pl011_set_termios()
2061 pl011_write(quot >> 6, uap, REG_IBRD); in pl011_set_termios()
2069 pl011_write_lcr_h(uap, lcr_h); in pl011_set_termios()
2070 pl011_write(old_cr, uap, REG_CR); in pl011_set_termios()
2079 struct uart_amba_port *uap = in sbsa_uart_set_termios() local
2083 tty_termios_encode_baud_rate(termios, uap->fixed_baud, uap->fixed_baud); in sbsa_uart_set_termios()
2091 uart_update_timeout(port, CS8, uap->fixed_baud); in sbsa_uart_set_termios()
2098 struct uart_amba_port *uap = in pl011_type() local
2100 return uap->port.type == PORT_AMBA ? uap->type : NULL; in pl011_type()
2189 struct uart_amba_port *uap = in pl011_console_putchar() local
2192 while (pl011_read(uap, REG_FR) & UART01x_FR_TXFF) in pl011_console_putchar()
2194 pl011_write(ch, uap, REG_DR); in pl011_console_putchar()
2200 struct uart_amba_port *uap = amba_ports[co->index]; in pl011_console_write() local
2205 clk_enable(uap->clk); in pl011_console_write()
2208 if (uap->port.sysrq) in pl011_console_write()
2211 locked = spin_trylock(&uap->port.lock); in pl011_console_write()
2213 spin_lock(&uap->port.lock); in pl011_console_write()
2218 if (!uap->vendor->always_enabled) { in pl011_console_write()
2219 old_cr = pl011_read(uap, REG_CR); in pl011_console_write()
2222 pl011_write(new_cr, uap, REG_CR); in pl011_console_write()
2225 uart_console_write(&uap->port, s, count, pl011_console_putchar); in pl011_console_write()
2232 while ((pl011_read(uap, REG_FR) ^ uap->vendor->inv_fr) in pl011_console_write()
2233 & uap->vendor->fr_busy) in pl011_console_write()
2235 if (!uap->vendor->always_enabled) in pl011_console_write()
2236 pl011_write(old_cr, uap, REG_CR); in pl011_console_write()
2239 spin_unlock(&uap->port.lock); in pl011_console_write()
2242 clk_disable(uap->clk); in pl011_console_write()
2245 static void pl011_console_get_options(struct uart_amba_port *uap, int *baud, in pl011_console_get_options() argument
2248 if (pl011_read(uap, REG_CR) & UART01x_CR_UARTEN) { in pl011_console_get_options()
2251 lcr_h = pl011_read(uap, REG_LCRH_TX); in pl011_console_get_options()
2266 ibrd = pl011_read(uap, REG_IBRD); in pl011_console_get_options()
2267 fbrd = pl011_read(uap, REG_FBRD); in pl011_console_get_options()
2269 *baud = uap->port.uartclk * 4 / (64 * ibrd + fbrd); in pl011_console_get_options()
2271 if (uap->vendor->oversampling) { in pl011_console_get_options()
2272 if (pl011_read(uap, REG_CR) in pl011_console_get_options()
2281 struct uart_amba_port *uap; in pl011_console_setup() local
2295 uap = amba_ports[co->index]; in pl011_console_setup()
2296 if (!uap) in pl011_console_setup()
2300 pinctrl_pm_select_default_state(uap->port.dev); in pl011_console_setup()
2302 ret = clk_prepare(uap->clk); in pl011_console_setup()
2306 if (dev_get_platdata(uap->port.dev)) { in pl011_console_setup()
2309 plat = dev_get_platdata(uap->port.dev); in pl011_console_setup()
2314 uap->port.uartclk = clk_get_rate(uap->clk); in pl011_console_setup()
2316 if (uap->vendor->fixed_options) { in pl011_console_setup()
2317 baud = uap->fixed_baud; in pl011_console_setup()
2323 pl011_console_get_options(uap, &baud, &parity, &bits); in pl011_console_setup()
2326 return uart_set_options(&uap->port, co, baud, parity, bits, flow); in pl011_console_setup()
2564 static void pl011_unregister_port(struct uart_amba_port *uap) in pl011_unregister_port() argument
2570 if (amba_ports[i] == uap) in pl011_unregister_port()
2575 pl011_dma_remove(uap); in pl011_unregister_port()
2591 static int pl011_setup_port(struct device *dev, struct uart_amba_port *uap, in pl011_setup_port() argument
2602 uap->old_cr = 0; in pl011_setup_port()
2603 uap->port.dev = dev; in pl011_setup_port()
2604 uap->port.mapbase = mmiobase->start; in pl011_setup_port()
2605 uap->port.membase = base; in pl011_setup_port()
2606 uap->port.fifosize = uap->fifosize; in pl011_setup_port()
2607 uap->port.has_sysrq = IS_ENABLED(CONFIG_SERIAL_AMBA_PL011_CONSOLE); in pl011_setup_port()
2608 uap->port.flags = UPF_BOOT_AUTOCONF; in pl011_setup_port()
2609 uap->port.line = index; in pl011_setup_port()
2611 amba_ports[index] = uap; in pl011_setup_port()
2616 static int pl011_register_port(struct uart_amba_port *uap) in pl011_register_port() argument
2621 pl011_write(0, uap, REG_IMSC); in pl011_register_port()
2622 pl011_write(0xffff, uap, REG_ICR); in pl011_register_port()
2627 dev_err(uap->port.dev, in pl011_register_port()
2630 if (amba_ports[i] == uap) in pl011_register_port()
2636 ret = uart_add_one_port(&amba_reg, &uap->port); in pl011_register_port()
2638 pl011_unregister_port(uap); in pl011_register_port()
2645 struct uart_amba_port *uap; in pl011_probe() local
2653 uap = devm_kzalloc(&dev->dev, sizeof(struct uart_amba_port), in pl011_probe()
2655 if (!uap) in pl011_probe()
2658 uap->clk = devm_clk_get(&dev->dev, NULL); in pl011_probe()
2659 if (IS_ERR(uap->clk)) in pl011_probe()
2660 return PTR_ERR(uap->clk); in pl011_probe()
2662 uap->reg_offset = vendor->reg_offset; in pl011_probe()
2663 uap->vendor = vendor; in pl011_probe()
2664 uap->fifosize = vendor->get_fifosize(dev); in pl011_probe()
2665 uap->port.iotype = vendor->access_32b ? UPIO_MEM32 : UPIO_MEM; in pl011_probe()
2666 uap->port.irq = dev->irq[0]; in pl011_probe()
2667 uap->port.ops = &amba_pl011_pops; in pl011_probe()
2669 snprintf(uap->type, sizeof(uap->type), "PL011 rev%u", amba_rev(dev)); in pl011_probe()
2671 ret = pl011_setup_port(&dev->dev, uap, &dev->res, portnr); in pl011_probe()
2675 amba_set_drvdata(dev, uap); in pl011_probe()
2677 return pl011_register_port(uap); in pl011_probe()
2682 struct uart_amba_port *uap = amba_get_drvdata(dev); in pl011_remove() local
2684 uart_remove_one_port(&amba_reg, &uap->port); in pl011_remove()
2685 pl011_unregister_port(uap); in pl011_remove()
2691 struct uart_amba_port *uap = dev_get_drvdata(dev); in pl011_suspend() local
2693 if (!uap) in pl011_suspend()
2696 return uart_suspend_port(&amba_reg, &uap->port); in pl011_suspend()
2701 struct uart_amba_port *uap = dev_get_drvdata(dev); in pl011_resume() local
2703 if (!uap) in pl011_resume()
2706 return uart_resume_port(&amba_reg, &uap->port); in pl011_resume()
2714 struct uart_amba_port *uap; in sbsa_uart_probe() local
2737 uap = devm_kzalloc(&pdev->dev, sizeof(struct uart_amba_port), in sbsa_uart_probe()
2739 if (!uap) in sbsa_uart_probe()
2745 uap->port.irq = ret; in sbsa_uart_probe()
2750 uap->vendor = &vendor_qdt_qdf2400_e44; in sbsa_uart_probe()
2753 uap->vendor = &vendor_sbsa; in sbsa_uart_probe()
2755 uap->reg_offset = uap->vendor->reg_offset; in sbsa_uart_probe()
2756 uap->fifosize = 32; in sbsa_uart_probe()
2757 uap->port.iotype = uap->vendor->access_32b ? UPIO_MEM32 : UPIO_MEM; in sbsa_uart_probe()
2758 uap->port.ops = &sbsa_uart_pops; in sbsa_uart_probe()
2759 uap->fixed_baud = baudrate; in sbsa_uart_probe()
2761 snprintf(uap->type, sizeof(uap->type), "SBSA"); in sbsa_uart_probe()
2765 ret = pl011_setup_port(&pdev->dev, uap, r, portnr); in sbsa_uart_probe()
2769 platform_set_drvdata(pdev, uap); in sbsa_uart_probe()
2771 return pl011_register_port(uap); in sbsa_uart_probe()
2776 struct uart_amba_port *uap = platform_get_drvdata(pdev); in sbsa_uart_remove() local
2778 uart_remove_one_port(&amba_reg, &uap->port); in sbsa_uart_remove()
2779 pl011_unregister_port(uap); in sbsa_uart_remove()