Lines Matching refs:iio_dev_opaque
312 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_wakeup_poll() local
316 for (i = 0; i < iio_dev_opaque->attached_buffers_cnt; i++) { in iio_buffer_wakeup_poll()
317 buffer = iio_dev_opaque->attached_buffers[i]; in iio_buffer_wakeup_poll()
344 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_detach_buffers() local
348 for (i = 0; i < iio_dev_opaque->attached_buffers_cnt; i++) { in iio_device_detach_buffers()
349 buffer = iio_dev_opaque->attached_buffers[i]; in iio_device_detach_buffers()
353 kfree(iio_dev_opaque->attached_buffers); in iio_device_detach_buffers()
513 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_scan_el_store() local
520 mutex_lock(&iio_dev_opaque->mlock); in iio_scan_el_store()
539 mutex_unlock(&iio_dev_opaque->mlock); in iio_scan_el_store()
560 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_scan_el_ts_store() local
568 mutex_lock(&iio_dev_opaque->mlock); in iio_scan_el_ts_store()
575 mutex_unlock(&iio_dev_opaque->mlock); in iio_scan_el_ts_store()
649 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in length_store() local
661 mutex_lock(&iio_dev_opaque->mlock); in length_store()
673 mutex_unlock(&iio_dev_opaque->mlock); in length_store()
701 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_storage_bytes_for_timestamp() local
704 iio_dev_opaque->scan_index_timestamp); in iio_storage_bytes_for_timestamp()
736 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_activate() local
739 list_add(&buffer->buffer_list, &iio_dev_opaque->buffer_list); in iio_buffer_activate()
751 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_deactivate_all() local
755 &iio_dev_opaque->buffer_list, buffer_list) in iio_buffer_deactivate_all()
829 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_verify_update() local
852 list_is_singular(&iio_dev_opaque->buffer_list)) in iio_verify_update()
857 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_verify_update()
878 if (insert_buffer && !list_empty(&iio_dev_opaque->buffer_list)) in iio_verify_update()
898 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_verify_update()
1044 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_update_demux() local
1048 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_update_demux()
1056 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) in iio_update_demux()
1065 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_enable_buffers() local
1072 iio_dev_opaque->currentmode = config->mode; in iio_enable_buffers()
1102 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_enable_buffers()
1110 if (iio_dev_opaque->currentmode == INDIO_BUFFER_TRIGGERED) { in iio_enable_buffers()
1129 if (iio_dev_opaque->currentmode == INDIO_BUFFER_TRIGGERED) { in iio_enable_buffers()
1134 buffer = list_prepare_entry(tmp, &iio_dev_opaque->buffer_list, buffer_list); in iio_enable_buffers()
1135 list_for_each_entry_continue_reverse(buffer, &iio_dev_opaque->buffer_list, in iio_enable_buffers()
1142 iio_dev_opaque->currentmode = INDIO_DIRECT_MODE; in iio_enable_buffers()
1150 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_disable_buffers() local
1156 if (list_empty(&iio_dev_opaque->buffer_list)) in iio_disable_buffers()
1172 if (iio_dev_opaque->currentmode == INDIO_BUFFER_TRIGGERED) { in iio_disable_buffers()
1177 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_disable_buffers()
1191 iio_dev_opaque->currentmode = INDIO_DIRECT_MODE; in iio_disable_buffers()
1200 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in __iio_update_buffers() local
1225 if (list_empty(&iio_dev_opaque->buffer_list)) in __iio_update_buffers()
1254 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_update_buffers() local
1264 mutex_lock(&iio_dev_opaque->info_exist_lock); in iio_update_buffers()
1265 mutex_lock(&iio_dev_opaque->mlock); in iio_update_buffers()
1286 mutex_unlock(&iio_dev_opaque->mlock); in iio_update_buffers()
1287 mutex_unlock(&iio_dev_opaque->info_exist_lock); in iio_update_buffers()
1305 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in enable_store() local
1313 mutex_lock(&iio_dev_opaque->mlock); in enable_store()
1327 mutex_unlock(&iio_dev_opaque->mlock); in enable_store()
1344 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in watermark_store() local
1355 mutex_lock(&iio_dev_opaque->mlock); in watermark_store()
1369 mutex_unlock(&iio_dev_opaque->mlock); in watermark_store()
1452 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_register_legacy_sysfs_groups() local
1463 group = &iio_dev_opaque->legacy_buffer_group; in iio_buffer_register_legacy_sysfs_groups()
1480 group = &iio_dev_opaque->legacy_scan_el_group; in iio_buffer_register_legacy_sysfs_groups()
1491 kfree(iio_dev_opaque->legacy_scan_el_group.attrs); in iio_buffer_register_legacy_sysfs_groups()
1493 kfree(iio_dev_opaque->legacy_buffer_group.attrs); in iio_buffer_register_legacy_sysfs_groups()
1500 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_unregister_legacy_sysfs_groups() local
1502 kfree(iio_dev_opaque->legacy_buffer_group.attrs); in iio_buffer_unregister_legacy_sysfs_groups()
1503 kfree(iio_dev_opaque->legacy_scan_el_group.attrs); in iio_buffer_unregister_legacy_sysfs_groups()
1532 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_buffer_getfd() local
1541 if (idx >= iio_dev_opaque->attached_buffers_cnt) in iio_device_buffer_getfd()
1546 buffer = iio_dev_opaque->attached_buffers[idx]; in iio_device_buffer_getfd()
1609 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in __iio_buffer_alloc_sysfs_and_mask() local
1651 iio_dev_opaque->scan_index_timestamp = in __iio_buffer_alloc_sysfs_and_mask()
1749 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffers_alloc_sysfs_and_mask() local
1764 if (!iio_dev_opaque->attached_buffers_cnt) in iio_buffers_alloc_sysfs_and_mask()
1767 for (idx = 0; idx < iio_dev_opaque->attached_buffers_cnt; idx++) { in iio_buffers_alloc_sysfs_and_mask()
1768 buffer = iio_dev_opaque->attached_buffers[idx]; in iio_buffers_alloc_sysfs_and_mask()
1774 sz = sizeof(*iio_dev_opaque->buffer_ioctl_handler); in iio_buffers_alloc_sysfs_and_mask()
1775 iio_dev_opaque->buffer_ioctl_handler = kzalloc(sz, GFP_KERNEL); in iio_buffers_alloc_sysfs_and_mask()
1776 if (!iio_dev_opaque->buffer_ioctl_handler) { in iio_buffers_alloc_sysfs_and_mask()
1781 iio_dev_opaque->buffer_ioctl_handler->ioctl = iio_device_buffer_ioctl; in iio_buffers_alloc_sysfs_and_mask()
1783 iio_dev_opaque->buffer_ioctl_handler); in iio_buffers_alloc_sysfs_and_mask()
1789 buffer = iio_dev_opaque->attached_buffers[idx]; in iio_buffers_alloc_sysfs_and_mask()
1797 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffers_free_sysfs_and_mask() local
1801 if (!iio_dev_opaque->attached_buffers_cnt) in iio_buffers_free_sysfs_and_mask()
1804 iio_device_ioctl_handler_unregister(iio_dev_opaque->buffer_ioctl_handler); in iio_buffers_free_sysfs_and_mask()
1805 kfree(iio_dev_opaque->buffer_ioctl_handler); in iio_buffers_free_sysfs_and_mask()
1807 for (i = iio_dev_opaque->attached_buffers_cnt - 1; i >= 0; i--) { in iio_buffers_free_sysfs_and_mask()
1808 buffer = iio_dev_opaque->attached_buffers[i]; in iio_buffers_free_sysfs_and_mask()
1867 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_push_to_buffers() local
1871 list_for_each_entry(buf, &iio_dev_opaque->buffer_list, buffer_list) { in iio_push_to_buffers()
1899 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_push_to_buffers_with_ts_unaligned() local
1909 if (iio_dev_opaque->bounce_buffer_size != indio_dev->scan_bytes) { in iio_push_to_buffers_with_ts_unaligned()
1913 iio_dev_opaque->bounce_buffer, in iio_push_to_buffers_with_ts_unaligned()
1917 iio_dev_opaque->bounce_buffer = bb; in iio_push_to_buffers_with_ts_unaligned()
1918 iio_dev_opaque->bounce_buffer_size = indio_dev->scan_bytes; in iio_push_to_buffers_with_ts_unaligned()
1920 memcpy(iio_dev_opaque->bounce_buffer, data, data_sz); in iio_push_to_buffers_with_ts_unaligned()
1922 iio_dev_opaque->bounce_buffer, in iio_push_to_buffers_with_ts_unaligned()
1985 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_attach_buffer() local
1986 struct iio_buffer **new, **old = iio_dev_opaque->attached_buffers; in iio_device_attach_buffer()
1987 unsigned int cnt = iio_dev_opaque->attached_buffers_cnt; in iio_device_attach_buffer()
1994 iio_dev_opaque->attached_buffers = new; in iio_device_attach_buffer()
2002 iio_dev_opaque->attached_buffers[cnt - 1] = buffer; in iio_device_attach_buffer()
2003 iio_dev_opaque->attached_buffers_cnt = cnt; in iio_device_attach_buffer()