Lines Matching refs:iio_dev_opaque

309 	struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev);  in iio_buffer_wakeup_poll()  local
313 for (i = 0; i < iio_dev_opaque->attached_buffers_cnt; i++) { in iio_buffer_wakeup_poll()
314 buffer = iio_dev_opaque->attached_buffers[i]; in iio_buffer_wakeup_poll()
341 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_detach_buffers() local
345 for (i = 0; i < iio_dev_opaque->attached_buffers_cnt; i++) { in iio_device_detach_buffers()
346 buffer = iio_dev_opaque->attached_buffers[i]; in iio_device_detach_buffers()
350 kfree(iio_dev_opaque->attached_buffers); in iio_device_detach_buffers()
699 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_storage_bytes_for_timestamp() local
702 iio_dev_opaque->scan_index_timestamp); in iio_storage_bytes_for_timestamp()
734 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_activate() local
737 list_add(&buffer->buffer_list, &iio_dev_opaque->buffer_list); in iio_buffer_activate()
749 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_deactivate_all() local
753 &iio_dev_opaque->buffer_list, buffer_list) in iio_buffer_deactivate_all()
826 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_verify_update() local
849 list_is_singular(&iio_dev_opaque->buffer_list)) in iio_verify_update()
854 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_verify_update()
875 if (insert_buffer && !list_empty(&iio_dev_opaque->buffer_list)) in iio_verify_update()
895 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_verify_update()
1040 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_update_demux() local
1044 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_update_demux()
1052 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) in iio_update_demux()
1061 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_enable_buffers() local
1068 iio_dev_opaque->currentmode = config->mode; in iio_enable_buffers()
1098 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_enable_buffers()
1106 if (iio_dev_opaque->currentmode == INDIO_BUFFER_TRIGGERED) { in iio_enable_buffers()
1125 if (iio_dev_opaque->currentmode == INDIO_BUFFER_TRIGGERED) { in iio_enable_buffers()
1130 buffer = list_prepare_entry(tmp, &iio_dev_opaque->buffer_list, buffer_list); in iio_enable_buffers()
1131 list_for_each_entry_continue_reverse(buffer, &iio_dev_opaque->buffer_list, in iio_enable_buffers()
1138 iio_dev_opaque->currentmode = INDIO_DIRECT_MODE; in iio_enable_buffers()
1146 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_disable_buffers() local
1152 if (list_empty(&iio_dev_opaque->buffer_list)) in iio_disable_buffers()
1168 if (iio_dev_opaque->currentmode == INDIO_BUFFER_TRIGGERED) { in iio_disable_buffers()
1173 list_for_each_entry(buffer, &iio_dev_opaque->buffer_list, buffer_list) { in iio_disable_buffers()
1187 iio_dev_opaque->currentmode = INDIO_DIRECT_MODE; in iio_disable_buffers()
1196 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in __iio_update_buffers() local
1221 if (list_empty(&iio_dev_opaque->buffer_list)) in __iio_update_buffers()
1250 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_update_buffers() local
1260 mutex_lock(&iio_dev_opaque->info_exist_lock); in iio_update_buffers()
1283 mutex_unlock(&iio_dev_opaque->info_exist_lock); in iio_update_buffers()
1457 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_register_legacy_sysfs_groups() local
1468 group = &iio_dev_opaque->legacy_buffer_group; in iio_buffer_register_legacy_sysfs_groups()
1485 group = &iio_dev_opaque->legacy_scan_el_group; in iio_buffer_register_legacy_sysfs_groups()
1496 kfree(iio_dev_opaque->legacy_scan_el_group.attrs); in iio_buffer_register_legacy_sysfs_groups()
1498 kfree(iio_dev_opaque->legacy_buffer_group.attrs); in iio_buffer_register_legacy_sysfs_groups()
1505 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffer_unregister_legacy_sysfs_groups() local
1507 kfree(iio_dev_opaque->legacy_buffer_group.attrs); in iio_buffer_unregister_legacy_sysfs_groups()
1508 kfree(iio_dev_opaque->legacy_scan_el_group.attrs); in iio_buffer_unregister_legacy_sysfs_groups()
1537 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_buffer_getfd() local
1546 if (idx >= iio_dev_opaque->attached_buffers_cnt) in iio_device_buffer_getfd()
1551 buffer = iio_dev_opaque->attached_buffers[idx]; in iio_device_buffer_getfd()
1614 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in __iio_buffer_alloc_sysfs_and_mask() local
1654 iio_dev_opaque->scan_index_timestamp = in __iio_buffer_alloc_sysfs_and_mask()
1751 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffers_alloc_sysfs_and_mask() local
1766 if (!iio_dev_opaque->attached_buffers_cnt) in iio_buffers_alloc_sysfs_and_mask()
1769 for (idx = 0; idx < iio_dev_opaque->attached_buffers_cnt; idx++) { in iio_buffers_alloc_sysfs_and_mask()
1770 buffer = iio_dev_opaque->attached_buffers[idx]; in iio_buffers_alloc_sysfs_and_mask()
1776 sz = sizeof(*(iio_dev_opaque->buffer_ioctl_handler)); in iio_buffers_alloc_sysfs_and_mask()
1777 iio_dev_opaque->buffer_ioctl_handler = kzalloc(sz, GFP_KERNEL); in iio_buffers_alloc_sysfs_and_mask()
1778 if (!iio_dev_opaque->buffer_ioctl_handler) { in iio_buffers_alloc_sysfs_and_mask()
1783 iio_dev_opaque->buffer_ioctl_handler->ioctl = iio_device_buffer_ioctl; in iio_buffers_alloc_sysfs_and_mask()
1785 iio_dev_opaque->buffer_ioctl_handler); in iio_buffers_alloc_sysfs_and_mask()
1791 buffer = iio_dev_opaque->attached_buffers[idx]; in iio_buffers_alloc_sysfs_and_mask()
1799 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_buffers_free_sysfs_and_mask() local
1803 if (!iio_dev_opaque->attached_buffers_cnt) in iio_buffers_free_sysfs_and_mask()
1806 iio_device_ioctl_handler_unregister(iio_dev_opaque->buffer_ioctl_handler); in iio_buffers_free_sysfs_and_mask()
1807 kfree(iio_dev_opaque->buffer_ioctl_handler); in iio_buffers_free_sysfs_and_mask()
1809 for (i = iio_dev_opaque->attached_buffers_cnt - 1; i >= 0; i--) { in iio_buffers_free_sysfs_and_mask()
1810 buffer = iio_dev_opaque->attached_buffers[i]; in iio_buffers_free_sysfs_and_mask()
1869 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_push_to_buffers() local
1873 list_for_each_entry(buf, &iio_dev_opaque->buffer_list, buffer_list) { in iio_push_to_buffers()
1901 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_push_to_buffers_with_ts_unaligned() local
1911 if (iio_dev_opaque->bounce_buffer_size != indio_dev->scan_bytes) { in iio_push_to_buffers_with_ts_unaligned()
1915 iio_dev_opaque->bounce_buffer, in iio_push_to_buffers_with_ts_unaligned()
1919 iio_dev_opaque->bounce_buffer = bb; in iio_push_to_buffers_with_ts_unaligned()
1920 iio_dev_opaque->bounce_buffer_size = indio_dev->scan_bytes; in iio_push_to_buffers_with_ts_unaligned()
1922 memcpy(iio_dev_opaque->bounce_buffer, data, data_sz); in iio_push_to_buffers_with_ts_unaligned()
1924 iio_dev_opaque->bounce_buffer, in iio_push_to_buffers_with_ts_unaligned()
1987 struct iio_dev_opaque *iio_dev_opaque = to_iio_dev_opaque(indio_dev); in iio_device_attach_buffer() local
1988 struct iio_buffer **new, **old = iio_dev_opaque->attached_buffers; in iio_device_attach_buffer()
1989 unsigned int cnt = iio_dev_opaque->attached_buffers_cnt; in iio_device_attach_buffer()
1996 iio_dev_opaque->attached_buffers = new; in iio_device_attach_buffer()
2004 iio_dev_opaque->attached_buffers[cnt - 1] = buffer; in iio_device_attach_buffer()
2005 iio_dev_opaque->attached_buffers_cnt = cnt; in iio_device_attach_buffer()