Lines Matching refs:hv_dev
48 static struct device *hv_dev; variable
85 if (hv_dev == NULL) in vmbus_exists()
130 struct hv_device *hv_dev = device_to_hv_device(dev); in id_show() local
132 if (!hv_dev->channel) in id_show()
134 return sprintf(buf, "%d\n", hv_dev->channel->offermsg.child_relid); in id_show()
141 struct hv_device *hv_dev = device_to_hv_device(dev); in state_show() local
143 if (!hv_dev->channel) in state_show()
145 return sprintf(buf, "%d\n", hv_dev->channel->state); in state_show()
152 struct hv_device *hv_dev = device_to_hv_device(dev); in monitor_id_show() local
154 if (!hv_dev->channel) in monitor_id_show()
156 return sprintf(buf, "%d\n", hv_dev->channel->offermsg.monitorid); in monitor_id_show()
163 struct hv_device *hv_dev = device_to_hv_device(dev); in class_id_show() local
165 if (!hv_dev->channel) in class_id_show()
168 &hv_dev->channel->offermsg.offer.if_type); in class_id_show()
175 struct hv_device *hv_dev = device_to_hv_device(dev); in device_id_show() local
177 if (!hv_dev->channel) in device_id_show()
180 &hv_dev->channel->offermsg.offer.if_instance); in device_id_show()
187 struct hv_device *hv_dev = device_to_hv_device(dev); in modalias_show() local
189 return sprintf(buf, "vmbus:%*phN\n", UUID_SIZE, &hv_dev->dev_type); in modalias_show()
197 struct hv_device *hv_dev = device_to_hv_device(dev); in numa_node_show() local
199 if (!hv_dev->channel) in numa_node_show()
202 return sprintf(buf, "%d\n", cpu_to_node(hv_dev->channel->target_cpu)); in numa_node_show()
211 struct hv_device *hv_dev = device_to_hv_device(dev); in server_monitor_pending_show() local
213 if (!hv_dev->channel) in server_monitor_pending_show()
216 channel_pending(hv_dev->channel, in server_monitor_pending_show()
225 struct hv_device *hv_dev = device_to_hv_device(dev); in client_monitor_pending_show() local
227 if (!hv_dev->channel) in client_monitor_pending_show()
230 channel_pending(hv_dev->channel, in client_monitor_pending_show()
239 struct hv_device *hv_dev = device_to_hv_device(dev); in server_monitor_latency_show() local
241 if (!hv_dev->channel) in server_monitor_latency_show()
244 channel_latency(hv_dev->channel, in server_monitor_latency_show()
253 struct hv_device *hv_dev = device_to_hv_device(dev); in client_monitor_latency_show() local
255 if (!hv_dev->channel) in client_monitor_latency_show()
258 channel_latency(hv_dev->channel, in client_monitor_latency_show()
267 struct hv_device *hv_dev = device_to_hv_device(dev); in server_monitor_conn_id_show() local
269 if (!hv_dev->channel) in server_monitor_conn_id_show()
272 channel_conn_id(hv_dev->channel, in server_monitor_conn_id_show()
281 struct hv_device *hv_dev = device_to_hv_device(dev); in client_monitor_conn_id_show() local
283 if (!hv_dev->channel) in client_monitor_conn_id_show()
286 channel_conn_id(hv_dev->channel, in client_monitor_conn_id_show()
294 struct hv_device *hv_dev = device_to_hv_device(dev); in out_intr_mask_show() local
298 if (!hv_dev->channel) in out_intr_mask_show()
301 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->outbound, in out_intr_mask_show()
313 struct hv_device *hv_dev = device_to_hv_device(dev); in out_read_index_show() local
317 if (!hv_dev->channel) in out_read_index_show()
320 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->outbound, in out_read_index_show()
332 struct hv_device *hv_dev = device_to_hv_device(dev); in out_write_index_show() local
336 if (!hv_dev->channel) in out_write_index_show()
339 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->outbound, in out_write_index_show()
351 struct hv_device *hv_dev = device_to_hv_device(dev); in out_read_bytes_avail_show() local
355 if (!hv_dev->channel) in out_read_bytes_avail_show()
358 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->outbound, in out_read_bytes_avail_show()
370 struct hv_device *hv_dev = device_to_hv_device(dev); in out_write_bytes_avail_show() local
374 if (!hv_dev->channel) in out_write_bytes_avail_show()
377 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->outbound, in out_write_bytes_avail_show()
388 struct hv_device *hv_dev = device_to_hv_device(dev); in in_intr_mask_show() local
392 if (!hv_dev->channel) in in_intr_mask_show()
395 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_intr_mask_show()
406 struct hv_device *hv_dev = device_to_hv_device(dev); in in_read_index_show() local
410 if (!hv_dev->channel) in in_read_index_show()
413 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_read_index_show()
424 struct hv_device *hv_dev = device_to_hv_device(dev); in in_write_index_show() local
428 if (!hv_dev->channel) in in_write_index_show()
431 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_write_index_show()
443 struct hv_device *hv_dev = device_to_hv_device(dev); in in_read_bytes_avail_show() local
447 if (!hv_dev->channel) in in_read_bytes_avail_show()
450 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_read_bytes_avail_show()
462 struct hv_device *hv_dev = device_to_hv_device(dev); in in_write_bytes_avail_show() local
466 if (!hv_dev->channel) in in_write_bytes_avail_show()
469 ret = hv_ringbuffer_get_debuginfo(&hv_dev->channel->inbound, &inbound); in in_write_bytes_avail_show()
481 struct hv_device *hv_dev = device_to_hv_device(dev); in channel_vp_mapping_show() local
482 struct vmbus_channel *channel = hv_dev->channel, *cur_sc; in channel_vp_mapping_show()
517 struct hv_device *hv_dev = device_to_hv_device(dev); in vendor_show() local
519 return sprintf(buf, "0x%x\n", hv_dev->vendor_id); in vendor_show()
527 struct hv_device *hv_dev = device_to_hv_device(dev); in device_show() local
529 return sprintf(buf, "0x%x\n", hv_dev->device_id); in device_show()
537 struct hv_device *hv_dev = device_to_hv_device(dev); in driver_override_store() local
540 ret = driver_set_override(dev, &hv_dev->driver_override, buf, count); in driver_override_store()
550 struct hv_device *hv_dev = device_to_hv_device(dev); in driver_override_show() local
554 len = snprintf(buf, PAGE_SIZE, "%s\n", hv_dev->driver_override); in driver_override_show()
603 const struct hv_device *hv_dev = device_to_hv_device(dev); in vmbus_dev_attr_is_visible() local
606 if (!hv_dev->channel->offermsg.monitor_allocated && in vmbus_dev_attr_is_visible()
825 struct hv_device *hv_dev = device_to_hv_device(device); in vmbus_match() local
828 if (is_hvsock_channel(hv_dev->channel)) in vmbus_match()
831 if (hv_vmbus_get_id(drv, hv_dev)) in vmbus_match()
874 device_get_dma_attr(hv_dev) == DEV_DMA_COHERENT); in vmbus_dma_configure()
960 struct hv_device *hv_dev = device_to_hv_device(device); in vmbus_device_release() local
961 struct vmbus_channel *channel = hv_dev->channel; in vmbus_device_release()
963 hv_debug_rm_dev_dir(hv_dev); in vmbus_device_release()
968 kfree(hv_dev); in vmbus_device_release()
1905 child_device_obj->device.parent = hv_dev; in vmbus_device_register()
2264 hv_dev = &device->dev; in vmbus_acpi_add()
2325 hv_dev = &pdev->dev; in vmbus_device_add()
2638 if (!hv_dev) { in hv_acpi_init()
2669 hv_dev = NULL; in hv_acpi_init()