Lines Matching refs:indio_dev
46 static int iio_buffer_flush_hwfifo(struct iio_dev *indio_dev, in iio_buffer_flush_hwfifo() argument
49 if (!indio_dev->info->hwfifo_flush_to_buffer) in iio_buffer_flush_hwfifo()
52 return indio_dev->info->hwfifo_flush_to_buffer(indio_dev, required); in iio_buffer_flush_hwfifo()
55 static bool iio_buffer_ready(struct iio_dev *indio_dev, struct iio_buffer *buf, in iio_buffer_ready() argument
62 if (!indio_dev->info) in iio_buffer_ready()
76 iio_buffer_flush_hwfifo(indio_dev, buf, in iio_buffer_ready()
82 flushed = iio_buffer_flush_hwfifo(indio_dev, buf, in iio_buffer_ready()
111 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_read() local
117 if (!indio_dev->info) in iio_buffer_read()
142 if (!indio_dev->info) { in iio_buffer_read()
147 if (!iio_buffer_ready(indio_dev, rb, to_wait, n / datum_size)) { in iio_buffer_read()
180 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_write() local
185 if (!indio_dev->info) in iio_buffer_write()
197 if (indio_dev->info == NULL) in iio_buffer_write()
240 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_poll() local
242 if (!indio_dev->info || rb == NULL) in iio_buffer_poll()
249 if (iio_buffer_ready(indio_dev, rb, rb->watermark, 0)) in iio_buffer_poll()
307 void iio_buffer_wakeup_poll(struct iio_dev *indio_dev) in iio_buffer_wakeup_poll() argument
309 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_wakeup_poll()
339 void iio_device_detach_buffers(struct iio_dev *indio_dev) in iio_device_detach_buffers() argument
341 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_detach_buffers()
426 static bool iio_validate_scan_mask(struct iio_dev *indio_dev, in iio_validate_scan_mask() argument
429 if (!indio_dev->setup_ops->validate_scan_mask) in iio_validate_scan_mask()
432 return indio_dev->setup_ops->validate_scan_mask(indio_dev, mask); in iio_validate_scan_mask()
445 static int iio_scan_mask_set(struct iio_dev *indio_dev, in iio_scan_mask_set() argument
451 if (!indio_dev->masklength) { in iio_scan_mask_set()
456 trialmask = bitmap_alloc(indio_dev->masklength, GFP_KERNEL); in iio_scan_mask_set()
459 bitmap_copy(trialmask, buffer->scan_mask, indio_dev->masklength); in iio_scan_mask_set()
462 if (!iio_validate_scan_mask(indio_dev, trialmask)) in iio_scan_mask_set()
465 if (indio_dev->available_scan_masks) { in iio_scan_mask_set()
466 mask = iio_scan_mask_match(indio_dev->available_scan_masks, in iio_scan_mask_set()
467 indio_dev->masklength, in iio_scan_mask_set()
472 bitmap_copy(buffer->scan_mask, trialmask, indio_dev->masklength); in iio_scan_mask_set()
489 static int iio_scan_mask_query(struct iio_dev *indio_dev, in iio_scan_mask_query() argument
492 if (bit > indio_dev->masklength) in iio_scan_mask_query()
509 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_scan_el_store() local
516 mutex_lock(&indio_dev->mlock); in iio_scan_el_store()
521 ret = iio_scan_mask_query(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
529 ret = iio_scan_mask_set(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
535 mutex_unlock(&indio_dev->mlock); in iio_scan_el_store()
556 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_scan_el_ts_store() local
564 mutex_lock(&indio_dev->mlock); in iio_scan_el_ts_store()
571 mutex_unlock(&indio_dev->mlock); in iio_scan_el_ts_store()
576 static int iio_buffer_add_channel_sysfs(struct iio_dev *indio_dev, in iio_buffer_add_channel_sysfs() argument
588 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
600 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
613 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
623 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
644 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in length_store() local
656 mutex_lock(&indio_dev->mlock); in length_store()
668 mutex_unlock(&indio_dev->mlock); in length_store()
681 static unsigned int iio_storage_bytes_for_si(struct iio_dev *indio_dev, in iio_storage_bytes_for_si() argument
687 ch = iio_find_channel_from_si(indio_dev, scan_index); in iio_storage_bytes_for_si()
694 static unsigned int iio_storage_bytes_for_timestamp(struct iio_dev *indio_dev) in iio_storage_bytes_for_timestamp() argument
696 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_storage_bytes_for_timestamp()
698 return iio_storage_bytes_for_si(indio_dev, in iio_storage_bytes_for_timestamp()
702 static int iio_compute_scan_bytes(struct iio_dev *indio_dev, in iio_compute_scan_bytes() argument
710 indio_dev->masklength) { in iio_compute_scan_bytes()
711 length = iio_storage_bytes_for_si(indio_dev, i); in iio_compute_scan_bytes()
718 length = iio_storage_bytes_for_timestamp(indio_dev); in iio_compute_scan_bytes()
728 static void iio_buffer_activate(struct iio_dev *indio_dev, in iio_buffer_activate() argument
731 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_activate()
744 static void iio_buffer_deactivate_all(struct iio_dev *indio_dev) in iio_buffer_deactivate_all() argument
746 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_deactivate_all()
755 struct iio_dev *indio_dev) in iio_buffer_enable() argument
759 return buffer->access->enable(buffer, indio_dev); in iio_buffer_enable()
763 struct iio_dev *indio_dev) in iio_buffer_disable() argument
767 return buffer->access->disable(buffer, indio_dev); in iio_buffer_disable()
770 static void iio_buffer_update_bytes_per_datum(struct iio_dev *indio_dev, in iio_buffer_update_bytes_per_datum() argument
778 bytes = iio_compute_scan_bytes(indio_dev, buffer->scan_mask, in iio_buffer_update_bytes_per_datum()
784 static int iio_buffer_request_update(struct iio_dev *indio_dev, in iio_buffer_request_update() argument
789 iio_buffer_update_bytes_per_datum(indio_dev, buffer); in iio_buffer_request_update()
793 dev_dbg(&indio_dev->dev, in iio_buffer_request_update()
803 static void iio_free_scan_mask(struct iio_dev *indio_dev, in iio_free_scan_mask() argument
807 if (!indio_dev->available_scan_masks) in iio_free_scan_mask()
819 static int iio_verify_update(struct iio_dev *indio_dev, in iio_verify_update() argument
823 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_verify_update()
832 bitmap_empty(insert_buffer->scan_mask, indio_dev->masklength)) { in iio_verify_update()
833 dev_dbg(&indio_dev->dev, in iio_verify_update()
849 modes = indio_dev->modes; in iio_verify_update()
865 if ((modes & INDIO_BUFFER_TRIGGERED) && indio_dev->trig) { in iio_verify_update()
880 if (indio_dev->modes & INDIO_BUFFER_TRIGGERED) in iio_verify_update()
881 dev_dbg(&indio_dev->dev, "Buffer not started: no trigger\n"); in iio_verify_update()
886 compound_mask = bitmap_zalloc(indio_dev->masklength, GFP_KERNEL); in iio_verify_update()
896 indio_dev->masklength); in iio_verify_update()
902 insert_buffer->scan_mask, indio_dev->masklength); in iio_verify_update()
906 if (indio_dev->available_scan_masks) { in iio_verify_update()
907 scan_mask = iio_scan_mask_match(indio_dev->available_scan_masks, in iio_verify_update()
908 indio_dev->masklength, in iio_verify_update()
918 config->scan_bytes = iio_compute_scan_bytes(indio_dev, in iio_verify_update()
971 static int iio_buffer_update_demux(struct iio_dev *indio_dev, in iio_buffer_update_demux() argument
984 if (bitmap_equal(indio_dev->active_scan_mask, in iio_buffer_update_demux()
986 indio_dev->masklength)) in iio_buffer_update_demux()
992 indio_dev->masklength) { in iio_buffer_update_demux()
993 in_ind = find_next_bit(indio_dev->active_scan_mask, in iio_buffer_update_demux()
994 indio_dev->masklength, in iio_buffer_update_demux()
997 length = iio_storage_bytes_for_si(indio_dev, in_ind); in iio_buffer_update_demux()
1000 in_ind = find_next_bit(indio_dev->active_scan_mask, in iio_buffer_update_demux()
1001 indio_dev->masklength, in iio_buffer_update_demux()
1004 length = iio_storage_bytes_for_si(indio_dev, in_ind); in iio_buffer_update_demux()
1015 length = iio_storage_bytes_for_timestamp(indio_dev); in iio_buffer_update_demux()
1036 static int iio_update_demux(struct iio_dev *indio_dev) in iio_update_demux() argument
1038 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_update_demux()
1043 ret = iio_buffer_update_demux(indio_dev, buffer); in iio_update_demux()
1056 static int iio_enable_buffers(struct iio_dev *indio_dev, in iio_enable_buffers() argument
1059 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_enable_buffers()
1063 indio_dev->active_scan_mask = config->scan_mask; in iio_enable_buffers()
1064 indio_dev->scan_timestamp = config->scan_timestamp; in iio_enable_buffers()
1065 indio_dev->scan_bytes = config->scan_bytes; in iio_enable_buffers()
1068 iio_update_demux(indio_dev); in iio_enable_buffers()
1071 if (indio_dev->setup_ops->preenable) { in iio_enable_buffers()
1072 ret = indio_dev->setup_ops->preenable(indio_dev); in iio_enable_buffers()
1074 dev_dbg(&indio_dev->dev, in iio_enable_buffers()
1080 if (indio_dev->info->update_scan_mode) { in iio_enable_buffers()
1081 ret = indio_dev->info in iio_enable_buffers()
1082 ->update_scan_mode(indio_dev, in iio_enable_buffers()
1083 indio_dev->active_scan_mask); in iio_enable_buffers()
1085 dev_dbg(&indio_dev->dev, in iio_enable_buffers()
1092 if (indio_dev->info->hwfifo_set_watermark) in iio_enable_buffers()
1093 indio_dev->info->hwfifo_set_watermark(indio_dev, in iio_enable_buffers()
1097 ret = iio_buffer_enable(buffer, indio_dev); in iio_enable_buffers()
1105 ret = iio_trigger_attach_poll_func(indio_dev->trig, in iio_enable_buffers()
1106 indio_dev->pollfunc); in iio_enable_buffers()
1111 if (indio_dev->setup_ops->postenable) { in iio_enable_buffers()
1112 ret = indio_dev->setup_ops->postenable(indio_dev); in iio_enable_buffers()
1114 dev_dbg(&indio_dev->dev, in iio_enable_buffers()
1124 iio_trigger_detach_poll_func(indio_dev->trig, in iio_enable_buffers()
1125 indio_dev->pollfunc); in iio_enable_buffers()
1131 iio_buffer_disable(buffer, indio_dev); in iio_enable_buffers()
1133 if (indio_dev->setup_ops->postdisable) in iio_enable_buffers()
1134 indio_dev->setup_ops->postdisable(indio_dev); in iio_enable_buffers()
1137 indio_dev->active_scan_mask = NULL; in iio_enable_buffers()
1142 static int iio_disable_buffers(struct iio_dev *indio_dev) in iio_disable_buffers() argument
1144 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_disable_buffers()
1160 if (indio_dev->setup_ops->predisable) { in iio_disable_buffers()
1161 ret2 = indio_dev->setup_ops->predisable(indio_dev); in iio_disable_buffers()
1167 iio_trigger_detach_poll_func(indio_dev->trig, in iio_disable_buffers()
1168 indio_dev->pollfunc); in iio_disable_buffers()
1172 ret2 = iio_buffer_disable(buffer, indio_dev); in iio_disable_buffers()
1177 if (indio_dev->setup_ops->postdisable) { in iio_disable_buffers()
1178 ret2 = indio_dev->setup_ops->postdisable(indio_dev); in iio_disable_buffers()
1183 iio_free_scan_mask(indio_dev, indio_dev->active_scan_mask); in iio_disable_buffers()
1184 indio_dev->active_scan_mask = NULL; in iio_disable_buffers()
1190 static int __iio_update_buffers(struct iio_dev *indio_dev, in __iio_update_buffers() argument
1194 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in __iio_update_buffers()
1198 ret = iio_verify_update(indio_dev, insert_buffer, remove_buffer, in __iio_update_buffers()
1204 ret = iio_buffer_request_update(indio_dev, insert_buffer); in __iio_update_buffers()
1209 ret = iio_disable_buffers(indio_dev); in __iio_update_buffers()
1216 iio_buffer_activate(indio_dev, insert_buffer); in __iio_update_buffers()
1222 ret = iio_enable_buffers(indio_dev, &new_config); in __iio_update_buffers()
1237 iio_buffer_deactivate_all(indio_dev); in __iio_update_buffers()
1240 iio_free_scan_mask(indio_dev, new_config.scan_mask); in __iio_update_buffers()
1244 int iio_update_buffers(struct iio_dev *indio_dev, in iio_update_buffers() argument
1248 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_update_buffers()
1259 mutex_lock(&indio_dev->mlock); in iio_update_buffers()
1272 if (indio_dev->info == NULL) { in iio_update_buffers()
1277 ret = __iio_update_buffers(indio_dev, insert_buffer, remove_buffer); in iio_update_buffers()
1280 mutex_unlock(&indio_dev->mlock); in iio_update_buffers()
1287 void iio_disable_all_buffers(struct iio_dev *indio_dev) in iio_disable_all_buffers() argument
1289 iio_disable_buffers(indio_dev); in iio_disable_all_buffers()
1290 iio_buffer_deactivate_all(indio_dev); in iio_disable_all_buffers()
1298 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in enable_store() local
1306 mutex_lock(&indio_dev->mlock); in enable_store()
1315 ret = __iio_update_buffers(indio_dev, buffer, NULL); in enable_store()
1317 ret = __iio_update_buffers(indio_dev, NULL, buffer); in enable_store()
1320 mutex_unlock(&indio_dev->mlock); in enable_store()
1336 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in watermark_store() local
1347 mutex_lock(&indio_dev->mlock); in watermark_store()
1361 mutex_unlock(&indio_dev->mlock); in watermark_store()
1439 static int iio_buffer_register_legacy_sysfs_groups(struct iio_dev *indio_dev, in iio_buffer_register_legacy_sysfs_groups() argument
1444 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_register_legacy_sysfs_groups()
1459 ret = iio_device_register_sysfs_group(indio_dev, group); in iio_buffer_register_legacy_sysfs_groups()
1476 ret = iio_device_register_sysfs_group(indio_dev, group); in iio_buffer_register_legacy_sysfs_groups()
1490 static void iio_buffer_unregister_legacy_sysfs_groups(struct iio_dev *indio_dev) in iio_buffer_unregister_legacy_sysfs_groups() argument
1492 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_unregister_legacy_sysfs_groups()
1501 struct iio_dev *indio_dev = ib->indio_dev; in iio_buffer_chrdev_release() local
1508 iio_device_put(indio_dev); in iio_buffer_chrdev_release()
1522 static long iio_device_buffer_getfd(struct iio_dev *indio_dev, unsigned long arg) in iio_device_buffer_getfd() argument
1524 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_buffer_getfd()
1536 iio_device_get(indio_dev); in iio_device_buffer_getfd()
1551 ib->indio_dev = indio_dev; in iio_device_buffer_getfd()
1582 iio_device_put(indio_dev); in iio_device_buffer_getfd()
1586 static long iio_device_buffer_ioctl(struct iio_dev *indio_dev, struct file *filp, in iio_device_buffer_ioctl() argument
1591 return iio_device_buffer_getfd(indio_dev, arg); in iio_device_buffer_ioctl()
1598 struct iio_dev *indio_dev, in __iio_buffer_alloc_sysfs_and_mask() argument
1601 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in __iio_buffer_alloc_sysfs_and_mask()
1615 channels = indio_dev->channels; in __iio_buffer_alloc_sysfs_and_mask()
1618 for (i = 0; i < indio_dev->num_channels; i++) { in __iio_buffer_alloc_sysfs_and_mask()
1626 dev_err(&indio_dev->dev, in __iio_buffer_alloc_sysfs_and_mask()
1635 ret = iio_buffer_add_channel_sysfs(indio_dev, buffer, in __iio_buffer_alloc_sysfs_and_mask()
1644 if (indio_dev->masklength && buffer->scan_mask == NULL) { in __iio_buffer_alloc_sysfs_and_mask()
1645 buffer->scan_mask = bitmap_zalloc(indio_dev->masklength, in __iio_buffer_alloc_sysfs_and_mask()
1696 ret = iio_device_register_sysfs_group(indio_dev, &buffer->buffer_group); in __iio_buffer_alloc_sysfs_and_mask()
1704 ret = iio_buffer_register_legacy_sysfs_groups(indio_dev, attr, in __iio_buffer_alloc_sysfs_and_mask()
1725 struct iio_dev *indio_dev, in __iio_buffer_free_sysfs_and_mask() argument
1729 iio_buffer_unregister_legacy_sysfs_groups(indio_dev); in __iio_buffer_free_sysfs_and_mask()
1736 int iio_buffers_alloc_sysfs_and_mask(struct iio_dev *indio_dev) in iio_buffers_alloc_sysfs_and_mask() argument
1738 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffers_alloc_sysfs_and_mask()
1744 channels = indio_dev->channels; in iio_buffers_alloc_sysfs_and_mask()
1746 int ml = indio_dev->masklength; in iio_buffers_alloc_sysfs_and_mask()
1748 for (i = 0; i < indio_dev->num_channels; i++) in iio_buffers_alloc_sysfs_and_mask()
1750 indio_dev->masklength = ml; in iio_buffers_alloc_sysfs_and_mask()
1758 ret = __iio_buffer_alloc_sysfs_and_mask(buffer, indio_dev, idx); in iio_buffers_alloc_sysfs_and_mask()
1771 iio_device_ioctl_handler_register(indio_dev, in iio_buffers_alloc_sysfs_and_mask()
1779 __iio_buffer_free_sysfs_and_mask(buffer, indio_dev, idx); in iio_buffers_alloc_sysfs_and_mask()
1784 void iio_buffers_free_sysfs_and_mask(struct iio_dev *indio_dev) in iio_buffers_free_sysfs_and_mask() argument
1786 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffers_free_sysfs_and_mask()
1798 __iio_buffer_free_sysfs_and_mask(buffer, indio_dev, i); in iio_buffers_free_sysfs_and_mask()
1811 bool iio_validate_scan_mask_onehot(struct iio_dev *indio_dev, in iio_validate_scan_mask_onehot() argument
1814 return bitmap_weight(mask, indio_dev->masklength) == 1; in iio_validate_scan_mask_onehot()
1854 int iio_push_to_buffers(struct iio_dev *indio_dev, const void *data) in iio_push_to_buffers() argument
1856 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_push_to_buffers()
1883 int iio_push_to_buffers_with_ts_unaligned(struct iio_dev *indio_dev, in iio_push_to_buffers_with_ts_unaligned() argument
1888 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_push_to_buffers_with_ts_unaligned()
1897 data_sz = min_t(size_t, indio_dev->scan_bytes, data_sz); in iio_push_to_buffers_with_ts_unaligned()
1898 if (iio_dev_opaque->bounce_buffer_size != indio_dev->scan_bytes) { in iio_push_to_buffers_with_ts_unaligned()
1901 bb = devm_krealloc(&indio_dev->dev, in iio_push_to_buffers_with_ts_unaligned()
1903 indio_dev->scan_bytes, GFP_KERNEL); in iio_push_to_buffers_with_ts_unaligned()
1907 iio_dev_opaque->bounce_buffer_size = indio_dev->scan_bytes; in iio_push_to_buffers_with_ts_unaligned()
1910 return iio_push_to_buffers_with_timestamp(indio_dev, in iio_push_to_buffers_with_ts_unaligned()
1971 int iio_device_attach_buffer(struct iio_dev *indio_dev, in iio_device_attach_buffer() argument
1974 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_attach_buffer()
1988 if (!indio_dev->buffer) in iio_device_attach_buffer()
1989 indio_dev->buffer = buffer; in iio_device_attach_buffer()