Lines Matching refs:mdev_state
133 struct mdev_state { struct
162 static struct mdev_state *find_mdev_state_by_uuid(uuid_le uuid) in find_mdev_state_by_uuid() argument
164 struct mdev_state *mds; in find_mdev_state_by_uuid()
188 static void mtty_create_config_space(struct mdev_state *mdev_state) in mtty_create_config_space() argument
191 STORE_LE32((u32 *) &mdev_state->vconfig[0x0], 0x32534348); in mtty_create_config_space()
194 STORE_LE16((u16 *) &mdev_state->vconfig[0x4], 0x0001); in mtty_create_config_space()
197 STORE_LE16((u16 *) &mdev_state->vconfig[0x6], 0x0200); in mtty_create_config_space()
200 mdev_state->vconfig[0x8] = 0x10; in mtty_create_config_space()
203 mdev_state->vconfig[0x9] = 0x02; in mtty_create_config_space()
206 mdev_state->vconfig[0xa] = 0x00; in mtty_create_config_space()
209 mdev_state->vconfig[0xb] = 0x07; in mtty_create_config_space()
213 STORE_LE32((u32 *) &mdev_state->vconfig[0x10], 0x000001); in mtty_create_config_space()
214 mdev_state->bar_mask[0] = ~(MTTY_IO_BAR_SIZE) + 1; in mtty_create_config_space()
216 if (mdev_state->nr_ports == 2) { in mtty_create_config_space()
218 STORE_LE32((u32 *) &mdev_state->vconfig[0x14], 0x000001); in mtty_create_config_space()
219 mdev_state->bar_mask[1] = ~(MTTY_IO_BAR_SIZE) + 1; in mtty_create_config_space()
223 STORE_LE32((u32 *) &mdev_state->vconfig[0x2c], 0x32534348); in mtty_create_config_space()
225 mdev_state->vconfig[0x34] = 0x00; /* Cap Ptr */ in mtty_create_config_space()
226 mdev_state->vconfig[0x3d] = 0x01; /* interrupt pin (INTA#) */ in mtty_create_config_space()
229 mdev_state->vconfig[0x40] = 0x23; in mtty_create_config_space()
230 mdev_state->vconfig[0x43] = 0x80; in mtty_create_config_space()
231 mdev_state->vconfig[0x44] = 0x23; in mtty_create_config_space()
232 mdev_state->vconfig[0x48] = 0x23; in mtty_create_config_space()
233 mdev_state->vconfig[0x4c] = 0x23; in mtty_create_config_space()
235 mdev_state->vconfig[0x60] = 0x50; in mtty_create_config_space()
236 mdev_state->vconfig[0x61] = 0x43; in mtty_create_config_space()
237 mdev_state->vconfig[0x62] = 0x49; in mtty_create_config_space()
238 mdev_state->vconfig[0x63] = 0x20; in mtty_create_config_space()
239 mdev_state->vconfig[0x64] = 0x53; in mtty_create_config_space()
240 mdev_state->vconfig[0x65] = 0x65; in mtty_create_config_space()
241 mdev_state->vconfig[0x66] = 0x72; in mtty_create_config_space()
242 mdev_state->vconfig[0x67] = 0x69; in mtty_create_config_space()
243 mdev_state->vconfig[0x68] = 0x61; in mtty_create_config_space()
244 mdev_state->vconfig[0x69] = 0x6c; in mtty_create_config_space()
245 mdev_state->vconfig[0x6a] = 0x2f; in mtty_create_config_space()
246 mdev_state->vconfig[0x6b] = 0x55; in mtty_create_config_space()
247 mdev_state->vconfig[0x6c] = 0x41; in mtty_create_config_space()
248 mdev_state->vconfig[0x6d] = 0x52; in mtty_create_config_space()
249 mdev_state->vconfig[0x6e] = 0x54; in mtty_create_config_space()
252 static void handle_pci_cfg_write(struct mdev_state *mdev_state, u16 offset, in handle_pci_cfg_write() argument
263 mdev_state->vconfig[0x3c] = buf[0]; in handle_pci_cfg_write()
278 if ((mdev_state->nr_ports == 1) && (bar_index == 1)) { in handle_pci_cfg_write()
279 STORE_LE32(&mdev_state->vconfig[offset], 0); in handle_pci_cfg_write()
287 bar_mask = mdev_state->bar_mask[bar_index]; in handle_pci_cfg_write()
291 cfg_addr |= (mdev_state->vconfig[offset] & 0x3ul); in handle_pci_cfg_write()
292 STORE_LE32(&mdev_state->vconfig[offset], cfg_addr); in handle_pci_cfg_write()
297 STORE_LE32(&mdev_state->vconfig[offset], 0); in handle_pci_cfg_write()
306 static void handle_bar_write(unsigned int index, struct mdev_state *mdev_state, in handle_bar_write() argument
315 if (mdev_state->s[index].dlab) { in handle_bar_write()
316 mdev_state->s[index].divisor |= data; in handle_bar_write()
320 mutex_lock(&mdev_state->rxtx_lock); in handle_bar_write()
323 if (mdev_state->s[index].rxtx.count < in handle_bar_write()
324 mdev_state->s[index].max_fifo_size) { in handle_bar_write()
325 mdev_state->s[index].rxtx.fifo[ in handle_bar_write()
326 mdev_state->s[index].rxtx.head] = data; in handle_bar_write()
327 mdev_state->s[index].rxtx.count++; in handle_bar_write()
328 CIRCULAR_BUF_INC_IDX(mdev_state->s[index].rxtx.head); in handle_bar_write()
329 mdev_state->s[index].overrun = false; in handle_bar_write()
335 if ((mdev_state->s[index].uart_reg[UART_IER] & in handle_bar_write()
337 (mdev_state->s[index].rxtx.count == in handle_bar_write()
338 mdev_state->s[index].intr_trigger_level)) { in handle_bar_write()
345 mdev_uuid(mdev_state->mdev)); in handle_bar_write()
351 mdev_state->s[index].overrun = true; in handle_bar_write()
357 if (mdev_state->s[index].uart_reg[UART_IER] & in handle_bar_write()
360 mdev_uuid(mdev_state->mdev)); in handle_bar_write()
362 mutex_unlock(&mdev_state->rxtx_lock); in handle_bar_write()
367 if (mdev_state->s[index].dlab) in handle_bar_write()
368 mdev_state->s[index].divisor |= (u16)data << 8; in handle_bar_write()
370 mdev_state->s[index].uart_reg[offset] = data; in handle_bar_write()
371 mutex_lock(&mdev_state->rxtx_lock); in handle_bar_write()
373 (mdev_state->s[index].rxtx.head == in handle_bar_write()
374 mdev_state->s[index].rxtx.tail)) { in handle_bar_write()
380 mdev_uuid(mdev_state->mdev)); in handle_bar_write()
383 mutex_unlock(&mdev_state->rxtx_lock); in handle_bar_write()
389 mdev_state->s[index].fcr = data; in handle_bar_write()
391 mutex_lock(&mdev_state->rxtx_lock); in handle_bar_write()
394 mdev_state->s[index].rxtx.count = 0; in handle_bar_write()
395 mdev_state->s[index].rxtx.head = 0; in handle_bar_write()
396 mdev_state->s[index].rxtx.tail = 0; in handle_bar_write()
398 mutex_unlock(&mdev_state->rxtx_lock); in handle_bar_write()
402 mdev_state->s[index].intr_trigger_level = 1; in handle_bar_write()
406 mdev_state->s[index].intr_trigger_level = 4; in handle_bar_write()
410 mdev_state->s[index].intr_trigger_level = 8; in handle_bar_write()
414 mdev_state->s[index].intr_trigger_level = 14; in handle_bar_write()
423 mdev_state->s[index].intr_trigger_level = 1; in handle_bar_write()
425 mdev_state->s[index].max_fifo_size = MAX_FIFO_SIZE; in handle_bar_write()
427 mdev_state->s[index].max_fifo_size = 1; in handle_bar_write()
428 mdev_state->s[index].intr_trigger_level = 1; in handle_bar_write()
435 mdev_state->s[index].dlab = true; in handle_bar_write()
436 mdev_state->s[index].divisor = 0; in handle_bar_write()
438 mdev_state->s[index].dlab = false; in handle_bar_write()
440 mdev_state->s[index].uart_reg[offset] = data; in handle_bar_write()
444 mdev_state->s[index].uart_reg[offset] = data; in handle_bar_write()
446 if ((mdev_state->s[index].uart_reg[UART_IER] & UART_IER_MSI) && in handle_bar_write()
451 mtty_trigger_interrupt(mdev_uuid(mdev_state->mdev)); in handle_bar_write()
454 if ((mdev_state->s[index].uart_reg[UART_IER] & UART_IER_MSI) && in handle_bar_write()
459 mtty_trigger_interrupt(mdev_uuid(mdev_state->mdev)); in handle_bar_write()
469 mdev_state->s[index].uart_reg[offset] = data; in handle_bar_write()
477 static void handle_bar_read(unsigned int index, struct mdev_state *mdev_state, in handle_bar_read() argument
484 if (mdev_state->s[index].dlab) { in handle_bar_read()
485 *buf = (u8)mdev_state->s[index].divisor; in handle_bar_read()
489 mutex_lock(&mdev_state->rxtx_lock); in handle_bar_read()
491 if (mdev_state->s[index].rxtx.head != in handle_bar_read()
492 mdev_state->s[index].rxtx.tail) { in handle_bar_read()
493 *buf = mdev_state->s[index].rxtx.fifo[ in handle_bar_read()
494 mdev_state->s[index].rxtx.tail]; in handle_bar_read()
495 mdev_state->s[index].rxtx.count--; in handle_bar_read()
496 CIRCULAR_BUF_INC_IDX(mdev_state->s[index].rxtx.tail); in handle_bar_read()
499 if (mdev_state->s[index].rxtx.head == in handle_bar_read()
500 mdev_state->s[index].rxtx.tail) { in handle_bar_read()
508 if (mdev_state->s[index].uart_reg[UART_IER] & in handle_bar_read()
511 mdev_uuid(mdev_state->mdev)); in handle_bar_read()
513 mutex_unlock(&mdev_state->rxtx_lock); in handle_bar_read()
518 if (mdev_state->s[index].dlab) { in handle_bar_read()
519 *buf = (u8)(mdev_state->s[index].divisor >> 8); in handle_bar_read()
522 *buf = mdev_state->s[index].uart_reg[offset] & 0x0f; in handle_bar_read()
527 u8 ier = mdev_state->s[index].uart_reg[UART_IER]; in handle_bar_read()
530 mutex_lock(&mdev_state->rxtx_lock); in handle_bar_read()
532 if ((ier & UART_IER_RLSI) && mdev_state->s[index].overrun) in handle_bar_read()
537 (mdev_state->s[index].rxtx.count >= in handle_bar_read()
538 mdev_state->s[index].intr_trigger_level)) in handle_bar_read()
543 (mdev_state->s[index].rxtx.head == in handle_bar_read()
544 mdev_state->s[index].rxtx.tail)) in handle_bar_read()
549 (mdev_state->s[index].uart_reg[UART_MCR] & in handle_bar_read()
559 mutex_unlock(&mdev_state->rxtx_lock); in handle_bar_read()
565 *buf = mdev_state->s[index].uart_reg[offset]; in handle_bar_read()
572 mutex_lock(&mdev_state->rxtx_lock); in handle_bar_read()
574 if (mdev_state->s[index].rxtx.head != in handle_bar_read()
575 mdev_state->s[index].rxtx.tail) in handle_bar_read()
579 if (mdev_state->s[index].overrun) in handle_bar_read()
583 if (mdev_state->s[index].rxtx.head == in handle_bar_read()
584 mdev_state->s[index].rxtx.tail) in handle_bar_read()
587 mutex_unlock(&mdev_state->rxtx_lock); in handle_bar_read()
594 mutex_lock(&mdev_state->rxtx_lock); in handle_bar_read()
596 if (mdev_state->s[index].uart_reg[UART_MCR] & in handle_bar_read()
598 if (mdev_state->s[index].rxtx.count < in handle_bar_read()
599 mdev_state->s[index].max_fifo_size) in handle_bar_read()
603 mutex_unlock(&mdev_state->rxtx_lock); in handle_bar_read()
608 *buf = mdev_state->s[index].uart_reg[offset]; in handle_bar_read()
616 static void mdev_read_base(struct mdev_state *mdev_state) in mdev_read_base() argument
626 if (!mdev_state->region_info[index].size) in mdev_read_base()
629 start_lo = (*(u32 *)(mdev_state->vconfig + pos)) & in mdev_read_base()
631 mem_type = (*(u32 *)(mdev_state->vconfig + pos)) & in mdev_read_base()
636 start_hi = (*(u32 *)(mdev_state->vconfig + pos + 4)); in mdev_read_base()
648 mdev_state->region_info[index].start = ((u64)start_hi << 32) | in mdev_read_base()
656 struct mdev_state *mdev_state; in mdev_access() local
664 mdev_state = mdev_get_drvdata(mdev); in mdev_access()
665 if (!mdev_state) { in mdev_access()
670 mutex_lock(&mdev_state->ops_lock); in mdev_access()
683 handle_pci_cfg_write(mdev_state, offset, buf, count); in mdev_access()
685 memcpy(buf, (mdev_state->vconfig + offset), count); in mdev_access()
692 if (!mdev_state->region_info[index].start) in mdev_access()
693 mdev_read_base(mdev_state); in mdev_access()
701 (u8)*buf, mdev_state->s[index].dlab); in mdev_access()
703 handle_bar_write(index, mdev_state, offset, buf, count); in mdev_access()
705 handle_bar_read(index, mdev_state, offset, buf, count); in mdev_access()
711 (u8)*buf, mdev_state->s[index].dlab); in mdev_access()
725 mutex_unlock(&mdev_state->ops_lock); in mdev_access()
732 struct mdev_state *mdev_state; in mtty_create() local
751 mdev_state = kzalloc(sizeof(struct mdev_state), GFP_KERNEL); in mtty_create()
752 if (mdev_state == NULL) in mtty_create()
755 mdev_state->nr_ports = nr_ports; in mtty_create()
756 mdev_state->irq_index = -1; in mtty_create()
757 mdev_state->s[0].max_fifo_size = MAX_FIFO_SIZE; in mtty_create()
758 mdev_state->s[1].max_fifo_size = MAX_FIFO_SIZE; in mtty_create()
759 mutex_init(&mdev_state->rxtx_lock); in mtty_create()
760 mdev_state->vconfig = kzalloc(MTTY_CONFIG_SPACE_SIZE, GFP_KERNEL); in mtty_create()
762 if (mdev_state->vconfig == NULL) { in mtty_create()
763 kfree(mdev_state); in mtty_create()
767 mutex_init(&mdev_state->ops_lock); in mtty_create()
768 mdev_state->mdev = mdev; in mtty_create()
769 mdev_set_drvdata(mdev, mdev_state); in mtty_create()
771 mtty_create_config_space(mdev_state); in mtty_create()
774 list_add(&mdev_state->next, &mdev_devices_list); in mtty_create()
782 struct mdev_state *mds, *tmp_mds; in mtty_remove()
783 struct mdev_state *mdev_state = mdev_get_drvdata(mdev); in mtty_remove() local
788 if (mdev_state == mds) { in mtty_remove()
789 list_del(&mdev_state->next); in mtty_remove()
791 kfree(mdev_state->vconfig); in mtty_remove()
792 kfree(mdev_state); in mtty_remove()
804 struct mdev_state *mdev_state; in mtty_reset() local
809 mdev_state = mdev_get_drvdata(mdev); in mtty_reset()
810 if (!mdev_state) in mtty_reset()
939 struct mdev_state *mdev_state; in mtty_set_irqs() local
944 mdev_state = mdev_get_drvdata(mdev); in mtty_set_irqs()
945 if (!mdev_state) in mtty_set_irqs()
948 mutex_lock(&mdev_state->ops_lock); in mtty_set_irqs()
959 if (mdev_state->intx_evtfd) in mtty_set_irqs()
960 eventfd_ctx_put(mdev_state->intx_evtfd); in mtty_set_irqs()
975 mdev_state->intx_evtfd = evt; in mtty_set_irqs()
976 mdev_state->irq_fd = fd; in mtty_set_irqs()
977 mdev_state->irq_index = index; in mtty_set_irqs()
992 if (mdev_state->msi_evtfd) in mtty_set_irqs()
993 eventfd_ctx_put(mdev_state->msi_evtfd); in mtty_set_irqs()
995 mdev_state->irq_index = VFIO_PCI_INTX_IRQ_INDEX; in mtty_set_irqs()
1005 if (mdev_state->msi_evtfd) in mtty_set_irqs()
1013 mdev_state->msi_evtfd = evt; in mtty_set_irqs()
1014 mdev_state->irq_fd = fd; in mtty_set_irqs()
1015 mdev_state->irq_index = index; in mtty_set_irqs()
1031 mutex_unlock(&mdev_state->ops_lock); in mtty_set_irqs()
1038 struct mdev_state *mdev_state; in mtty_trigger_interrupt() local
1040 mdev_state = find_mdev_state_by_uuid(uuid); in mtty_trigger_interrupt()
1042 if (!mdev_state) { in mtty_trigger_interrupt()
1047 if ((mdev_state->irq_index == VFIO_PCI_MSI_IRQ_INDEX) && in mtty_trigger_interrupt()
1048 (!mdev_state->msi_evtfd)) in mtty_trigger_interrupt()
1050 else if ((mdev_state->irq_index == VFIO_PCI_INTX_IRQ_INDEX) && in mtty_trigger_interrupt()
1051 (!mdev_state->intx_evtfd)) { in mtty_trigger_interrupt()
1056 if (mdev_state->irq_index == VFIO_PCI_MSI_IRQ_INDEX) in mtty_trigger_interrupt()
1057 ret = eventfd_signal(mdev_state->msi_evtfd, 1); in mtty_trigger_interrupt()
1059 ret = eventfd_signal(mdev_state->intx_evtfd, 1); in mtty_trigger_interrupt()
1075 struct mdev_state *mdev_state; in mtty_get_region_info() local
1081 mdev_state = mdev_get_drvdata(mdev); in mtty_get_region_info()
1082 if (!mdev_state) in mtty_get_region_info()
1089 mutex_lock(&mdev_state->ops_lock); in mtty_get_region_info()
1099 if (mdev_state->nr_ports == 2) in mtty_get_region_info()
1107 mdev_state->region_info[bar_index].size = size; in mtty_get_region_info()
1108 mdev_state->region_info[bar_index].vfio_offset = in mtty_get_region_info()
1115 mutex_unlock(&mdev_state->ops_lock); in mtty_get_region_info()
1158 struct mdev_state *mdev_state; in mtty_ioctl() local
1163 mdev_state = mdev_get_drvdata(mdev); in mtty_ioctl()
1164 if (!mdev_state) in mtty_ioctl()
1184 memcpy(&mdev_state->dev_info, &info, sizeof(info)); in mtty_ioctl()
1226 (info.index >= mdev_state->dev_info.num_irqs)) in mtty_ioctl()
1250 mdev_state->dev_info.num_irqs, in mtty_ioctl()
1361 struct mdev_state *mds; in available_instances_show()