Lines Matching refs:indio_dev

405 	struct iio_dev			*indio_dev;  member
467 static int at91_adc_chan_xlate(struct iio_dev *indio_dev, int chan) in at91_adc_chan_xlate() argument
471 for (i = 0; i < indio_dev->num_channels; i++) { in at91_adc_chan_xlate()
472 if (indio_dev->channels[i].scan_index == chan) in at91_adc_chan_xlate()
479 at91_adc_chan_get(struct iio_dev *indio_dev, int chan) in at91_adc_chan_get() argument
481 int index = at91_adc_chan_xlate(indio_dev, chan); in at91_adc_chan_get()
485 return indio_dev->channels + index; in at91_adc_chan_get()
488 static inline int at91_adc_of_xlate(struct iio_dev *indio_dev, in at91_adc_of_xlate() argument
491 return at91_adc_chan_xlate(indio_dev, iiospec->args[0]); in at91_adc_of_xlate()
494 static unsigned int at91_adc_active_scan_mask_to_reg(struct iio_dev *indio_dev) in at91_adc_active_scan_mask_to_reg() argument
499 for_each_set_bit(bit, indio_dev->active_scan_mask, in at91_adc_active_scan_mask_to_reg()
500 indio_dev->num_channels) { in at91_adc_active_scan_mask_to_reg()
502 at91_adc_chan_get(indio_dev, bit); in at91_adc_active_scan_mask_to_reg()
647 dev_dbg(&st->indio_dev->dev, "pos is 0\n"); in at91_adc_touch_pos()
653 dev_err(&st->indio_dev->dev, "scale is 0\n"); in at91_adc_touch_pos()
794 struct iio_dev *indio_dev = data; in at91_dma_buffer_done() local
796 iio_trigger_poll_chained(indio_dev->trig); in at91_dma_buffer_done()
799 static int at91_adc_dma_start(struct iio_dev *indio_dev) in at91_adc_dma_start() argument
801 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_dma_start()
819 for_each_set_bit(bit, indio_dev->active_scan_mask, in at91_adc_dma_start()
820 indio_dev->num_channels) { in at91_adc_dma_start()
822 at91_adc_chan_get(indio_dev, bit); in at91_adc_dma_start()
839 dev_err(&indio_dev->dev, "cannot prepare DMA cyclic\n"); in at91_adc_dma_start()
844 desc->callback_param = indio_dev; in at91_adc_dma_start()
849 dev_err(&indio_dev->dev, "cannot submit DMA cyclic\n"); in at91_adc_dma_start()
860 st->dma_st.dma_ts = iio_get_time_ns(indio_dev); in at91_adc_dma_start()
862 dev_dbg(&indio_dev->dev, "DMA cyclic started\n"); in at91_adc_dma_start()
879 static bool at91_adc_current_chan_is_touch(struct iio_dev *indio_dev) in at91_adc_current_chan_is_touch() argument
881 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_current_chan_is_touch()
883 return !!bitmap_subset(indio_dev->active_scan_mask, in at91_adc_current_chan_is_touch()
888 static int at91_adc_buffer_prepare(struct iio_dev *indio_dev) in at91_adc_buffer_prepare() argument
892 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_buffer_prepare()
895 if (at91_adc_current_chan_is_touch(indio_dev)) in at91_adc_buffer_prepare()
899 if (!(indio_dev->currentmode & INDIO_ALL_TRIGGERED_MODES)) in at91_adc_buffer_prepare()
903 ret = at91_adc_dma_start(indio_dev); in at91_adc_buffer_prepare()
905 dev_err(&indio_dev->dev, "buffer prepare failed\n"); in at91_adc_buffer_prepare()
909 for_each_set_bit(bit, indio_dev->active_scan_mask, in at91_adc_buffer_prepare()
910 indio_dev->num_channels) { in at91_adc_buffer_prepare()
912 at91_adc_chan_get(indio_dev, bit); in at91_adc_buffer_prepare()
936 if (at91_adc_buffer_check_use_irq(indio_dev, st)) in at91_adc_buffer_prepare()
942 static int at91_adc_buffer_postdisable(struct iio_dev *indio_dev) in at91_adc_buffer_postdisable() argument
944 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_buffer_postdisable()
948 if (at91_adc_current_chan_is_touch(indio_dev)) in at91_adc_buffer_postdisable()
952 if (!(indio_dev->currentmode & INDIO_ALL_TRIGGERED_MODES)) in at91_adc_buffer_postdisable()
961 for_each_set_bit(bit, indio_dev->active_scan_mask, in at91_adc_buffer_postdisable()
962 indio_dev->num_channels) { in at91_adc_buffer_postdisable()
964 at91_adc_chan_get(indio_dev, bit); in at91_adc_buffer_postdisable()
979 if (at91_adc_buffer_check_use_irq(indio_dev, st)) in at91_adc_buffer_postdisable()
1032 static void at91_adc_trigger_handler_nodma(struct iio_dev *indio_dev, in at91_adc_trigger_handler_nodma() argument
1035 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_trigger_handler_nodma()
1039 u32 mask = at91_adc_active_scan_mask_to_reg(indio_dev); in at91_adc_trigger_handler_nodma()
1056 for_each_set_bit(bit, indio_dev->active_scan_mask, in at91_adc_trigger_handler_nodma()
1057 indio_dev->num_channels) { in at91_adc_trigger_handler_nodma()
1059 at91_adc_chan_get(indio_dev, bit); in at91_adc_trigger_handler_nodma()
1083 iio_push_to_buffers_with_timestamp(indio_dev, st->buffer, in at91_adc_trigger_handler_nodma()
1087 static void at91_adc_trigger_handler_dma(struct iio_dev *indio_dev) in at91_adc_trigger_handler_dma() argument
1089 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_trigger_handler_dma()
1091 s64 ns = iio_get_time_ns(indio_dev); in at91_adc_trigger_handler_dma()
1099 indio_dev->name); in at91_adc_trigger_handler_dma()
1120 iio_push_to_buffers_with_timestamp(indio_dev, in at91_adc_trigger_handler_dma()
1133 st->dma_st.dma_ts = iio_get_time_ns(indio_dev); in at91_adc_trigger_handler_dma()
1139 struct iio_dev *indio_dev = pf->indio_dev; in at91_adc_trigger_handler() local
1140 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_trigger_handler()
1146 if (iio_trigger_validate_own_device(indio_dev->trig, indio_dev)) in at91_adc_trigger_handler()
1150 at91_adc_trigger_handler_dma(indio_dev); in at91_adc_trigger_handler()
1152 at91_adc_trigger_handler_nodma(indio_dev, pf); in at91_adc_trigger_handler()
1154 iio_trigger_notify_done(indio_dev->trig); in at91_adc_trigger_handler()
1190 static void at91_adc_setup_samp_freq(struct iio_dev *indio_dev, unsigned freq) in at91_adc_setup_samp_freq() argument
1192 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_setup_samp_freq()
1207 dev_dbg(&indio_dev->dev, "freq: %u, startup: %u, prescal: %u\n", in at91_adc_setup_samp_freq()
1217 static void at91_adc_touch_data_handler(struct iio_dev *indio_dev) in at91_adc_touch_data_handler() argument
1219 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_touch_data_handler()
1224 for_each_set_bit(bit, indio_dev->active_scan_mask, in at91_adc_touch_data_handler()
1227 at91_adc_chan_get(indio_dev, bit); in at91_adc_touch_data_handler()
1261 static void at91_adc_no_pen_detect_interrupt(struct iio_dev *indio_dev) in at91_adc_no_pen_detect_interrupt() argument
1263 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_no_pen_detect_interrupt()
1272 at91_adc_touch_data_handler(indio_dev); in at91_adc_no_pen_detect_interrupt()
1283 struct iio_dev *indio_dev = st->indio_dev; in at91_adc_workq_handler() local
1285 iio_push_to_buffers(indio_dev, st->buffer); in at91_adc_workq_handler()
1335 static int at91_adc_read_info_raw(struct iio_dev *indio_dev, in at91_adc_read_info_raw() argument
1338 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_read_info_raw()
1348 ret = iio_device_claim_direct_mode(indio_dev); in at91_adc_read_info_raw()
1359 iio_device_release_direct_mode(indio_dev); in at91_adc_read_info_raw()
1364 ret = iio_device_claim_direct_mode(indio_dev); in at91_adc_read_info_raw()
1375 iio_device_release_direct_mode(indio_dev); in at91_adc_read_info_raw()
1382 ret = iio_device_claim_direct_mode(indio_dev); in at91_adc_read_info_raw()
1421 iio_device_release_direct_mode(indio_dev); in at91_adc_read_info_raw()
1425 static int at91_adc_read_raw(struct iio_dev *indio_dev, in at91_adc_read_raw() argument
1429 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_read_raw()
1433 return at91_adc_read_info_raw(indio_dev, chan, val); in at91_adc_read_raw()
1454 static int at91_adc_write_raw(struct iio_dev *indio_dev, in at91_adc_write_raw() argument
1458 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_write_raw()
1480 at91_adc_setup_samp_freq(indio_dev, val); in at91_adc_write_raw()
1490 struct iio_dev *indio_dev = platform_get_drvdata(pdev); in at91_adc_dma_init() local
1491 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_dma_init()
1551 struct iio_dev *indio_dev = platform_get_drvdata(pdev); in at91_adc_dma_disable() local
1552 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_dma_disable()
1572 static int at91_adc_set_watermark(struct iio_dev *indio_dev, unsigned int val) in at91_adc_set_watermark() argument
1574 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_set_watermark()
1581 dev_dbg(&indio_dev->dev, "we need hw trigger for DMA\n"); in at91_adc_set_watermark()
1585 dev_dbg(&indio_dev->dev, "new watermark is %u\n", val); in at91_adc_set_watermark()
1595 at91_adc_dma_disable(to_platform_device(&indio_dev->dev)); in at91_adc_set_watermark()
1597 at91_adc_dma_init(to_platform_device(&indio_dev->dev)); in at91_adc_set_watermark()
1603 ret = at91_adc_buffer_prepare(indio_dev); in at91_adc_set_watermark()
1605 at91_adc_dma_disable(to_platform_device(&indio_dev->dev)); in at91_adc_set_watermark()
1610 static int at91_adc_update_scan_mode(struct iio_dev *indio_dev, in at91_adc_update_scan_mode() argument
1613 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_update_scan_mode()
1628 static void at91_adc_hw_init(struct iio_dev *indio_dev) in at91_adc_hw_init() argument
1630 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_hw_init()
1641 at91_adc_setup_samp_freq(indio_dev, st->soc_info.min_sample_rate); in at91_adc_hw_init()
1650 struct iio_dev *indio_dev = dev_get_drvdata(dev); in at91_adc_get_fifo_state() local
1651 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_get_fifo_state()
1659 struct iio_dev *indio_dev = dev_get_drvdata(dev); in at91_adc_get_watermark() local
1660 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_get_watermark()
1706 struct iio_dev *indio_dev; in at91_adc_probe() local
1712 indio_dev = devm_iio_device_alloc(&pdev->dev, sizeof(*st)); in at91_adc_probe()
1713 if (!indio_dev) in at91_adc_probe()
1716 indio_dev->name = dev_name(&pdev->dev); in at91_adc_probe()
1717 indio_dev->modes = INDIO_DIRECT_MODE | INDIO_BUFFER_SOFTWARE; in at91_adc_probe()
1718 indio_dev->info = &at91_adc_info; in at91_adc_probe()
1719 indio_dev->channels = at91_adc_channels; in at91_adc_probe()
1720 indio_dev->num_channels = ARRAY_SIZE(at91_adc_channels); in at91_adc_probe()
1722 st = iio_priv(indio_dev); in at91_adc_probe()
1723 st->indio_dev = indio_dev; in at91_adc_probe()
1813 pdev->dev.driver->name, indio_dev); in at91_adc_probe()
1831 at91_adc_hw_init(indio_dev); in at91_adc_probe()
1837 platform_set_drvdata(pdev, indio_dev); in at91_adc_probe()
1839 ret = at91_adc_buffer_init(indio_dev); in at91_adc_probe()
1846 ret = at91_adc_trigger_init(indio_dev); in at91_adc_probe()
1857 iio_buffer_set_attrs(indio_dev->buffer, in at91_adc_probe()
1861 if (dma_coerce_mask_and_coherent(&indio_dev->dev, DMA_BIT_MASK(32))) in at91_adc_probe()
1864 ret = iio_device_register(indio_dev); in at91_adc_probe()
1890 struct iio_dev *indio_dev = platform_get_drvdata(pdev); in at91_adc_remove() local
1891 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_remove()
1893 iio_device_unregister(indio_dev); in at91_adc_remove()
1907 struct iio_dev *indio_dev = dev_get_drvdata(dev); in at91_adc_suspend() local
1908 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_suspend()
1910 if (iio_buffer_enabled(indio_dev)) in at91_adc_suspend()
1911 at91_adc_buffer_postdisable(indio_dev); in at91_adc_suspend()
1930 struct iio_dev *indio_dev = dev_get_drvdata(dev); in at91_adc_resume() local
1931 struct at91_adc_state *st = iio_priv(indio_dev); in at91_adc_resume()
1950 at91_adc_hw_init(indio_dev); in at91_adc_resume()
1953 if (!iio_buffer_enabled(indio_dev)) in at91_adc_resume()
1956 ret = at91_adc_buffer_prepare(indio_dev); in at91_adc_resume()
1967 dev_err(&indio_dev->dev, "failed to resume\n"); in at91_adc_resume()