Lines Matching full:data

40 	struct ivshmem *data = dev->data;  in ivshmem_configure_msi_x_interrupts()  local
48 n_vectors = pcie_msi_vectors_allocate(data->pcie->bdf, in ivshmem_configure_msi_x_interrupts()
50 data->vectors, in ivshmem_configure_msi_x_interrupts()
61 data->params[i].dev = dev; in ivshmem_configure_msi_x_interrupts()
62 data->params[i].vector = i; in ivshmem_configure_msi_x_interrupts()
64 if (!pcie_msi_vector_connect(data->pcie->bdf, in ivshmem_configure_msi_x_interrupts()
65 &data->vectors[i], in ivshmem_configure_msi_x_interrupts()
67 &data->params[i], 0)) { in ivshmem_configure_msi_x_interrupts()
75 if (!pcie_msi_enable(data->pcie->bdf, data->vectors, n_vectors, 0)) { in ivshmem_configure_msi_x_interrupts()
80 data->n_vectors = n_vectors; in ivshmem_configure_msi_x_interrupts()
96 struct ivshmem *data = dev->data; in ivshmem_configure_int_x_interrupts() local
98 uint32_t cfg_int = pcie_conf_read(data->pcie->bdf, PCIE_CONF_INTR); in ivshmem_configure_int_x_interrupts()
107 pcie_set_cmd(data->pcie->bdf, PCIE_CONF_CMDSTAT_INTX_DISABLE, false); in ivshmem_configure_int_x_interrupts()
111 data->params[0].dev = dev; in ivshmem_configure_int_x_interrupts()
112 data->params[0].vector = 0; in ivshmem_configure_int_x_interrupts()
117 data->pcie->bdf, intx->irq, intx->priority, in ivshmem_configure_int_x_interrupts()
118 ivshmem_doorbell, &data->params[0], intx->flags)) { in ivshmem_configure_int_x_interrupts()
123 data->n_vectors = 1; in ivshmem_configure_int_x_interrupts()
125 pcie_irq_enable(data->pcie->bdf, intx->irq); in ivshmem_configure_int_x_interrupts()
135 struct ivshmem *data = dev->data; in register_signal() local
137 data->params[vector].signal = signal; in register_signal()
160 struct ivshmem *data = dev->data; in ivshmem_configure() local
163 if (!pcie_get_mbar(data->pcie->bdf, IVSHMEM_PCIE_REG_BAR_IDX, &mbar_regs)) { in ivshmem_configure()
165 IF_ENABLED(CONFIG_IVSHMEM_V2, (|| data->ivshmem_v2))) { in ivshmem_configure()
173 pcie_set_cmd(data->pcie->bdf, PCIE_CONF_CMDSTAT_MEM | in ivshmem_configure()
181 data->pcie->bdf, IVSHMEM_PCIE_MSI_X_BAR_IDX, &mbar_msi_x); in ivshmem_configure()
183 data->pcie->bdf, IVSHMEM_PCIE_SHMEM_BAR_IDX, &mbar_shmem); in ivshmem_configure()
191 if (data->ivshmem_v2) { in ivshmem_configure()
200 data->max_peers = regs->max_peers; in ivshmem_configure()
201 if (!IN_RANGE(data->max_peers, 2, CONFIG_IVSHMEM_V2_MAX_PEERS)) { in ivshmem_configure()
202 LOG_ERR("Invalid max peers %u", data->max_peers); in ivshmem_configure()
206 uint32_t vendor_cap = pcie_get_cap(data->pcie->bdf, PCI_CAP_ID_VNDR); in ivshmem_configure()
211 shmem_phys_addr = pcie_conf_read_u64(data->pcie->bdf, cap_pos); in ivshmem_configure()
216 size_t state_table_size = pcie_conf_read(data->pcie->bdf, cap_pos); in ivshmem_configure()
218 if (state_table_size < sizeof(uint32_t) * data->max_peers) { in ivshmem_configure()
222 k_mem_map_phys_bare((uint8_t **)&data->state_table_shmem, in ivshmem_configure()
228 data->rw_section_size = pcie_conf_read_u64(data->pcie->bdf, cap_pos); in ivshmem_configure()
230 LOG_INF("RW section size 0x%zX", data->rw_section_size); in ivshmem_configure()
231 if (data->rw_section_size > 0) { in ivshmem_configure()
232 k_mem_map_phys_bare((uint8_t **)&data->rw_section_shmem, in ivshmem_configure()
234 data->rw_section_size, in ivshmem_configure()
240 data->output_section_size = pcie_conf_read_u64(data->pcie->bdf, cap_pos); in ivshmem_configure()
241 size_t output_section_offset = rw_section_offset + data->rw_section_size; in ivshmem_configure()
242 LOG_INF("Output section size 0x%zX", data->output_section_size); in ivshmem_configure()
243 for (uint32_t i = 0; i < data->max_peers; i++) { in ivshmem_configure()
246 (data->output_section_size * i); in ivshmem_configure()
253 k_mem_map_phys_bare((uint8_t **)&data->output_section_shmem[i], in ivshmem_configure()
254 phys_addr, data->output_section_size, flags); in ivshmem_configure()
257 data->size = output_section_offset + in ivshmem_configure()
258 data->output_section_size * data->max_peers; in ivshmem_configure()
262 uint32_t cfg_priv_cntl = pcie_conf_read(data->pcie->bdf, cap_pos); in ivshmem_configure()
266 pcie_conf_write(data->pcie->bdf, cap_pos, cfg_priv_cntl); in ivshmem_configure()
275 data->size = mbar_shmem.size; in ivshmem_configure()
277 k_mem_map_phys_bare((uint8_t **)&data->shmem, in ivshmem_configure()
278 shmem_phys_addr, data->size, in ivshmem_configure()
289 else if (data->ivshmem_v2) { in ivshmem_configure()
301 data->size, shmem_phys_addr, data->shmem); in ivshmem_configure()
309 struct ivshmem *data = dev->data; in ivshmem_api_get_mem() local
312 if (data->ivshmem_v2) { in ivshmem_api_get_mem()
318 *memmap = data->shmem; in ivshmem_api_get_mem()
320 return data->size; in ivshmem_api_get_mem()
328 struct ivshmem *data = dev->data; in ivshmem_api_get_id() local
330 if (data->ivshmem_v2) { in ivshmem_api_get_id()
350 struct ivshmem *data = dev->data; in ivshmem_api_get_vectors() local
352 return data->n_vectors; in ivshmem_api_get_vectors()
362 struct ivshmem *data = dev->data; in ivshmem_api_int_peer() local
366 if (vector >= data->n_vectors) { in ivshmem_api_int_peer()
371 if (data->ivshmem_v2 && peer_id >= data->max_peers) { in ivshmem_api_int_peer()
375 if (data->ivshmem_v2) { in ivshmem_api_int_peer()
401 struct ivshmem *data = dev->data; in ivshmem_api_register_handler() local
403 if (vector >= data->n_vectors) { in ivshmem_api_register_handler()
420 struct ivshmem *data = dev->data; in ivshmem_api_get_rw_mem_section() local
422 if (!data->ivshmem_v2) { in ivshmem_api_get_rw_mem_section()
427 *memmap = data->rw_section_shmem; in ivshmem_api_get_rw_mem_section()
429 return data->rw_section_size; in ivshmem_api_get_rw_mem_section()
436 struct ivshmem *data = dev->data; in ivshmem_api_get_output_mem_section() local
438 if (!data->ivshmem_v2 || peer_id >= data->max_peers) { in ivshmem_api_get_output_mem_section()
443 *memmap = data->output_section_shmem[peer_id]; in ivshmem_api_get_output_mem_section()
445 return data->output_section_size; in ivshmem_api_get_output_mem_section()
451 struct ivshmem *data = dev->data; in ivshmem_api_get_state() local
453 if (!data->ivshmem_v2 || peer_id >= data->max_peers) { in ivshmem_api_get_state()
458 (const volatile uint32_t *)data->state_table_shmem; in ivshmem_api_get_state()
466 struct ivshmem *data = dev->data; in ivshmem_api_set_state() local
468 if (!data->ivshmem_v2) { in ivshmem_api_set_state()
482 struct ivshmem *data = dev->data; in ivshmem_api_get_max_peers() local
484 if (!data->ivshmem_v2) { in ivshmem_api_get_max_peers()
488 return data->max_peers; in ivshmem_api_get_max_peers()
493 struct ivshmem *data = dev->data; in ivshmem_api_get_protocol() local
495 if (!data->ivshmem_v2) { in ivshmem_api_get_protocol()
499 uint16_t protocol = (data->pcie->class_rev >> 8) & 0xFFFF; in ivshmem_api_get_protocol()
507 struct ivshmem *data = dev->data; in ivshmem_api_enable_interrupts() local
509 if (!data->ivshmem_v2) { in ivshmem_api_enable_interrupts()
542 struct ivshmem *data = dev->data; in ivshmem_init() local
544 if (data->pcie->bdf == PCIE_BDF_NONE) { in ivshmem_init()
550 data->pcie->id, data->pcie->bdf, data->pcie->class_rev); in ivshmem_init()