Lines Matching full:private
41 struct vfio_ccw_private *private = dev_get_drvdata(&sch->dev); in vfio_ccw_sch_quiesce() local
67 private->completion = &completion; in vfio_ccw_sch_quiesce()
73 private->completion = NULL; in vfio_ccw_sch_quiesce()
79 private->state = VFIO_CCW_STATE_NOT_OPER; in vfio_ccw_sch_quiesce()
86 struct vfio_ccw_private *private; in vfio_ccw_sch_io_todo() local
90 private = container_of(work, struct vfio_ccw_private, io_work); in vfio_ccw_sch_io_todo()
91 irb = &private->irb; in vfio_ccw_sch_io_todo()
96 cp_update_scsw(&private->cp, &irb->scsw); in vfio_ccw_sch_io_todo()
97 if (is_final && private->state == VFIO_CCW_STATE_CP_PENDING) in vfio_ccw_sch_io_todo()
98 cp_free(&private->cp); in vfio_ccw_sch_io_todo()
100 mutex_lock(&private->io_mutex); in vfio_ccw_sch_io_todo()
101 memcpy(private->io_region->irb_area, irb, sizeof(*irb)); in vfio_ccw_sch_io_todo()
102 mutex_unlock(&private->io_mutex); in vfio_ccw_sch_io_todo()
104 if (private->mdev && is_final) in vfio_ccw_sch_io_todo()
105 private->state = VFIO_CCW_STATE_IDLE; in vfio_ccw_sch_io_todo()
107 if (private->io_trigger) in vfio_ccw_sch_io_todo()
108 eventfd_signal(private->io_trigger, 1); in vfio_ccw_sch_io_todo()
113 struct vfio_ccw_private *private; in vfio_ccw_crw_todo() local
115 private = container_of(work, struct vfio_ccw_private, crw_work); in vfio_ccw_crw_todo()
117 if (!list_empty(&private->crw) && private->crw_trigger) in vfio_ccw_crw_todo()
118 eventfd_signal(private->crw_trigger, 1); in vfio_ccw_crw_todo()
126 struct vfio_ccw_private *private = dev_get_drvdata(&sch->dev); in vfio_ccw_sch_irq() local
129 vfio_ccw_fsm_event(private, VFIO_CCW_EVENT_INTERRUPT); in vfio_ccw_sch_irq()
132 static void vfio_ccw_free_regions(struct vfio_ccw_private *private) in vfio_ccw_free_regions() argument
134 if (private->crw_region) in vfio_ccw_free_regions()
135 kmem_cache_free(vfio_ccw_crw_region, private->crw_region); in vfio_ccw_free_regions()
136 if (private->schib_region) in vfio_ccw_free_regions()
137 kmem_cache_free(vfio_ccw_schib_region, private->schib_region); in vfio_ccw_free_regions()
138 if (private->cmd_region) in vfio_ccw_free_regions()
139 kmem_cache_free(vfio_ccw_cmd_region, private->cmd_region); in vfio_ccw_free_regions()
140 if (private->io_region) in vfio_ccw_free_regions()
141 kmem_cache_free(vfio_ccw_io_region, private->io_region); in vfio_ccw_free_regions()
147 struct vfio_ccw_private *private; in vfio_ccw_sch_probe() local
156 private = kzalloc(sizeof(*private), GFP_KERNEL | GFP_DMA); in vfio_ccw_sch_probe()
157 if (!private) in vfio_ccw_sch_probe()
160 private->cp.guest_cp = kcalloc(CCWCHAIN_LEN_MAX, sizeof(struct ccw1), in vfio_ccw_sch_probe()
162 if (!private->cp.guest_cp) in vfio_ccw_sch_probe()
165 private->io_region = kmem_cache_zalloc(vfio_ccw_io_region, in vfio_ccw_sch_probe()
167 if (!private->io_region) in vfio_ccw_sch_probe()
170 private->cmd_region = kmem_cache_zalloc(vfio_ccw_cmd_region, in vfio_ccw_sch_probe()
172 if (!private->cmd_region) in vfio_ccw_sch_probe()
175 private->schib_region = kmem_cache_zalloc(vfio_ccw_schib_region, in vfio_ccw_sch_probe()
178 if (!private->schib_region) in vfio_ccw_sch_probe()
181 private->crw_region = kmem_cache_zalloc(vfio_ccw_crw_region, in vfio_ccw_sch_probe()
184 if (!private->crw_region) in vfio_ccw_sch_probe()
187 private->sch = sch; in vfio_ccw_sch_probe()
188 dev_set_drvdata(&sch->dev, private); in vfio_ccw_sch_probe()
189 mutex_init(&private->io_mutex); in vfio_ccw_sch_probe()
192 private->state = VFIO_CCW_STATE_NOT_OPER; in vfio_ccw_sch_probe()
199 INIT_LIST_HEAD(&private->crw); in vfio_ccw_sch_probe()
200 INIT_WORK(&private->io_work, vfio_ccw_sch_io_todo); in vfio_ccw_sch_probe()
201 INIT_WORK(&private->crw_work, vfio_ccw_crw_todo); in vfio_ccw_sch_probe()
202 atomic_set(&private->avail, 1); in vfio_ccw_sch_probe()
203 private->state = VFIO_CCW_STATE_STANDBY; in vfio_ccw_sch_probe()
223 vfio_ccw_free_regions(private); in vfio_ccw_sch_probe()
224 kfree(private->cp.guest_cp); in vfio_ccw_sch_probe()
225 kfree(private); in vfio_ccw_sch_probe()
231 struct vfio_ccw_private *private = dev_get_drvdata(&sch->dev); in vfio_ccw_sch_remove() local
236 list_for_each_entry_safe(crw, temp, &private->crw, next) { in vfio_ccw_sch_remove()
245 vfio_ccw_free_regions(private); in vfio_ccw_sch_remove()
246 kfree(private->cp.guest_cp); in vfio_ccw_sch_remove()
247 kfree(private); in vfio_ccw_sch_remove()
272 struct vfio_ccw_private *private = dev_get_drvdata(&sch->dev); in vfio_ccw_sch_event() local
284 vfio_ccw_fsm_event(private, VFIO_CCW_EVENT_NOT_OPER); in vfio_ccw_sch_event()
289 private = dev_get_drvdata(&sch->dev); in vfio_ccw_sch_event()
290 if (private->state == VFIO_CCW_STATE_NOT_OPER) { in vfio_ccw_sch_event()
291 private->state = private->mdev ? VFIO_CCW_STATE_IDLE : in vfio_ccw_sch_event()
302 static void vfio_ccw_queue_crw(struct vfio_ccw_private *private, in vfio_ccw_queue_crw() argument
325 list_add_tail(&crw->next, &private->crw); in vfio_ccw_queue_crw()
326 queue_work(vfio_ccw_work_q, &private->crw_work); in vfio_ccw_queue_crw()
332 struct vfio_ccw_private *private = dev_get_drvdata(&sch->dev); in vfio_ccw_chp_event() local
336 if (!private || !mask) in vfio_ccw_chp_event()
339 trace_vfio_ccw_chp_event(private->sch->schid, mask, event); in vfio_ccw_chp_event()
341 mdev_uuid(private->mdev), sch->schid.cssid, in vfio_ccw_chp_event()
360 vfio_ccw_queue_crw(private, CRW_RSC_CPATH, CRW_ERC_PERRN, in vfio_ccw_chp_event()
371 vfio_ccw_queue_crw(private, CRW_RSC_CPATH, CRW_ERC_INIT, in vfio_ccw_chp_event()