Home
last modified time | relevance | path

Searched refs:ipa (Results 1 – 25 of 75) sorted by relevance

123

/Linux-v5.10/drivers/net/ipa/
Dipa_main.c83 static void ipa_suspend_handler(struct ipa *ipa, enum ipa_irq_id irq_id) in ipa_suspend_handler() argument
89 if (!test_and_set_bit(IPA_FLAG_RESUMED, ipa->flags)) in ipa_suspend_handler()
90 pm_wakeup_dev_event(&ipa->pdev->dev, 0, true); in ipa_suspend_handler()
93 ipa_interrupt_suspend_clear_all(ipa->interrupt); in ipa_suspend_handler()
107 int ipa_setup(struct ipa *ipa) in ipa_setup() argument
111 struct device *dev = &ipa->pdev->dev; in ipa_setup()
115 ret = gsi_setup(&ipa->gsi, ipa->version == IPA_VERSION_3_5_1); in ipa_setup()
119 ipa->interrupt = ipa_interrupt_setup(ipa); in ipa_setup()
120 if (IS_ERR(ipa->interrupt)) { in ipa_setup()
121 ret = PTR_ERR(ipa->interrupt); in ipa_setup()
[all …]
Dipa_table.c153 ipa_table_valid_one(struct ipa *ipa, bool route, bool ipv6, bool hashed) in ipa_table_valid_one() argument
155 struct device *dev = &ipa->pdev->dev; in ipa_table_valid_one()
161 mem = hashed ? &ipa->mem[IPA_MEM_V6_ROUTE_HASHED] in ipa_table_valid_one()
162 : &ipa->mem[IPA_MEM_V6_ROUTE]; in ipa_table_valid_one()
164 mem = hashed ? &ipa->mem[IPA_MEM_V4_ROUTE_HASHED] in ipa_table_valid_one()
165 : &ipa->mem[IPA_MEM_V4_ROUTE]; in ipa_table_valid_one()
169 mem = hashed ? &ipa->mem[IPA_MEM_V6_FILTER_HASHED] in ipa_table_valid_one()
170 : &ipa->mem[IPA_MEM_V6_FILTER]; in ipa_table_valid_one()
172 mem = hashed ? &ipa->mem[IPA_MEM_V4_FILTER_HASHED] in ipa_table_valid_one()
173 : &ipa->mem[IPA_MEM_V4_FILTER]; in ipa_table_valid_one()
[all …]
Dipa_mem.c33 struct ipa *ipa = container_of(trans->gsi, struct ipa, gsi); in ipa_mem_zero_region_add() local
34 dma_addr_t addr = ipa->zero_addr; in ipa_mem_zero_region_add()
58 int ipa_mem_setup(struct ipa *ipa) in ipa_mem_setup() argument
60 dma_addr_t addr = ipa->zero_addr; in ipa_mem_setup()
68 trans = ipa_cmd_trans_alloc(ipa, 4); in ipa_mem_setup()
70 dev_err(&ipa->pdev->dev, "no transaction for memory setup\n"); in ipa_mem_setup()
77 offset = ipa->mem[IPA_MEM_MODEM_HEADER].offset; in ipa_mem_setup()
78 size = ipa->mem[IPA_MEM_MODEM_HEADER].size; in ipa_mem_setup()
79 size += ipa->mem[IPA_MEM_AP_HEADER].size; in ipa_mem_setup()
83 ipa_mem_zero_region_add(trans, &ipa->mem[IPA_MEM_MODEM_PROC_CTX]); in ipa_mem_setup()
[all …]
Dipa_modem.c36 struct ipa *ipa; member
43 struct ipa *ipa = priv->ipa; in ipa_open() local
46 ret = ipa_endpoint_enable_one(ipa->name_map[IPA_ENDPOINT_AP_MODEM_TX]); in ipa_open()
49 ret = ipa_endpoint_enable_one(ipa->name_map[IPA_ENDPOINT_AP_MODEM_RX]); in ipa_open()
58 ipa_endpoint_disable_one(ipa->name_map[IPA_ENDPOINT_AP_MODEM_TX]); in ipa_open()
67 struct ipa *ipa = priv->ipa; in ipa_stop() local
71 ipa_endpoint_disable_one(ipa->name_map[IPA_ENDPOINT_AP_MODEM_RX]); in ipa_stop()
72 ipa_endpoint_disable_one(ipa->name_map[IPA_ENDPOINT_AP_MODEM_TX]); in ipa_stop()
90 struct ipa *ipa = priv->ipa; in ipa_start_xmit() local
97 endpoint = ipa->name_map[IPA_ENDPOINT_AP_MODEM_TX]; in ipa_start_xmit()
[all …]
Dipa_table.h11 struct ipa;
30 bool ipa_table_valid(struct ipa *ipa);
38 bool ipa_filter_map_valid(struct ipa *ipa, u32 filter_mask);
42 static inline bool ipa_table_valid(struct ipa *ipa) in ipa_table_valid() argument
47 static inline bool ipa_filter_map_valid(struct ipa *ipa, u32 filter_mask) in ipa_filter_map_valid() argument
59 void ipa_table_reset(struct ipa *ipa, bool modem);
65 int ipa_table_hash_flush(struct ipa *ipa);
71 int ipa_table_setup(struct ipa *ipa);
77 void ipa_table_teardown(struct ipa *ipa);
83 void ipa_table_config(struct ipa *ipa);
[all …]
Dipa_endpoint.h19 struct ipa;
48 struct ipa *ipa; member
70 void ipa_endpoint_modem_hol_block_clear_all(struct ipa *ipa);
72 void ipa_endpoint_modem_pause_all(struct ipa *ipa, bool enable);
74 int ipa_endpoint_modem_exception_reset_all(struct ipa *ipa);
86 void ipa_endpoint_suspend(struct ipa *ipa);
87 void ipa_endpoint_resume(struct ipa *ipa);
89 void ipa_endpoint_setup(struct ipa *ipa);
90 void ipa_endpoint_teardown(struct ipa *ipa);
92 int ipa_endpoint_config(struct ipa *ipa);
[all …]
Dipa_uc.c117 static struct ipa_uc_mem_area *ipa_uc_shared(struct ipa *ipa) in ipa_uc_shared() argument
119 u32 offset = ipa->mem_offset + ipa->mem[IPA_MEM_UC_SHARED].offset; in ipa_uc_shared()
121 return ipa->mem_virt + offset; in ipa_uc_shared()
125 static void ipa_uc_event_handler(struct ipa *ipa, enum ipa_irq_id irq_id) in ipa_uc_event_handler() argument
127 struct ipa_uc_mem_area *shared = ipa_uc_shared(ipa); in ipa_uc_event_handler()
128 struct device *dev = &ipa->pdev->dev; in ipa_uc_event_handler()
138 static void ipa_uc_response_hdlr(struct ipa *ipa, enum ipa_irq_id irq_id) in ipa_uc_response_hdlr() argument
140 struct ipa_uc_mem_area *shared = ipa_uc_shared(ipa); in ipa_uc_response_hdlr()
152 ipa->uc_loaded = true; in ipa_uc_response_hdlr()
153 ipa_clock_put(ipa); in ipa_uc_response_hdlr()
[all …]
Dipa_interrupt.c39 struct ipa *ipa; member
55 struct ipa *ipa = interrupt->ipa; in ipa_interrupt_process() local
62 iowrite32(mask, ipa->reg_virt + IPA_REG_IRQ_CLR_OFFSET); in ipa_interrupt_process()
65 interrupt->handler[irq_id](interrupt->ipa, irq_id); in ipa_interrupt_process()
72 iowrite32(mask, ipa->reg_virt + IPA_REG_IRQ_CLR_OFFSET); in ipa_interrupt_process()
78 struct ipa *ipa = interrupt->ipa; in ipa_interrupt_process_all() local
86 mask = ioread32(ipa->reg_virt + IPA_REG_IRQ_STTS_OFFSET); in ipa_interrupt_process_all()
95 mask = ioread32(ipa->reg_virt + IPA_REG_IRQ_STTS_OFFSET); in ipa_interrupt_process_all()
104 ipa_clock_get(interrupt->ipa); in ipa_isr_thread()
108 ipa_clock_put(interrupt->ipa); in ipa_isr_thread()
[all …]
Dipa_endpoint.c102 static bool ipa_endpoint_data_valid_one(struct ipa *ipa, u32 count, in ipa_endpoint_data_valid_one() argument
107 struct device *dev = &ipa->pdev->dev; in ipa_endpoint_data_valid_one()
183 static bool ipa_endpoint_data_valid(struct ipa *ipa, u32 count, in ipa_endpoint_data_valid() argument
187 struct device *dev = &ipa->pdev->dev; in ipa_endpoint_data_valid()
217 if (!ipa_endpoint_data_valid_one(ipa, count, data, dp)) in ipa_endpoint_data_valid()
225 static bool ipa_endpoint_data_valid(struct ipa *ipa, u32 count, in ipa_endpoint_data_valid() argument
237 struct gsi *gsi = &endpoint->ipa->gsi; in ipa_endpoint_trans_alloc()
253 struct ipa *ipa = endpoint->ipa; in ipa_endpoint_init_ctrl() local
268 val = ioread32(ipa->reg_virt + offset); in ipa_endpoint_init_ctrl()
273 iowrite32(val, ipa->reg_virt + offset); in ipa_endpoint_init_ctrl()
[all …]
Dipa_qmi.c86 struct ipa *ipa = container_of(ipa_qmi, struct ipa, qmi); in ipa_server_init_complete() local
99 dev_err(&ipa->pdev->dev, in ipa_server_init_complete()
128 struct ipa *ipa = container_of(ipa_qmi, struct ipa, qmi); in ipa_qmi_ready() local
148 ipa = container_of(ipa_qmi, struct ipa, qmi); in ipa_qmi_ready()
149 ret = ipa_modem_start(ipa); in ipa_qmi_ready()
151 dev_err(&ipa->pdev->dev, "error %d starting modem\n", ret); in ipa_qmi_ready()
186 struct ipa *ipa; in ipa_server_indication_register() local
190 ipa = container_of(ipa_qmi, struct ipa, qmi); in ipa_server_indication_register()
202 dev_err(&ipa->pdev->dev, in ipa_server_indication_register()
215 struct ipa *ipa; in ipa_server_driver_init_complete() local
[all …]
Dipa_cmd.c168 bool ipa_cmd_table_valid(struct ipa *ipa, const struct ipa_mem *mem, in ipa_cmd_table_valid() argument
171 struct device *dev = &ipa->pdev->dev; in ipa_cmd_table_valid()
177 ipa->mem_offset > offset_max - mem->offset) { in ipa_cmd_table_valid()
182 ipa->mem_offset, mem->offset, offset_max); in ipa_cmd_table_valid()
186 if (mem->offset > ipa->mem_size || in ipa_cmd_table_valid()
187 mem->size > ipa->mem_size - mem->offset) { in ipa_cmd_table_valid()
192 mem->offset, mem->size, ipa->mem_size); in ipa_cmd_table_valid()
200 static bool ipa_cmd_header_valid(struct ipa *ipa) in ipa_cmd_header_valid() argument
202 const struct ipa_mem *mem = &ipa->mem[IPA_MEM_MODEM_HEADER]; in ipa_cmd_header_valid()
203 struct device *dev = &ipa->pdev->dev; in ipa_cmd_header_valid()
[all …]
Dipa_modem.h9 struct ipa;
14 int ipa_modem_start(struct ipa *ipa);
15 int ipa_modem_stop(struct ipa *ipa);
22 int ipa_modem_init(struct ipa *ipa, bool modem_init);
23 void ipa_modem_exit(struct ipa *ipa);
25 int ipa_modem_config(struct ipa *ipa);
26 void ipa_modem_deconfig(struct ipa *ipa);
28 int ipa_modem_setup(struct ipa *ipa);
29 void ipa_modem_teardown(struct ipa *ipa);
Dipa_clock.c114 static int ipa_interconnect_enable(struct ipa *ipa) in ipa_interconnect_enable() argument
116 struct ipa_clock *clock = ipa->clock; in ipa_interconnect_enable()
142 static int ipa_interconnect_disable(struct ipa *ipa) in ipa_interconnect_disable() argument
144 struct ipa_clock *clock = ipa->clock; in ipa_interconnect_disable()
170 static int ipa_clock_enable(struct ipa *ipa) in ipa_clock_enable() argument
174 ret = ipa_interconnect_enable(ipa); in ipa_clock_enable()
178 ret = clk_prepare_enable(ipa->clock->core); in ipa_clock_enable()
180 ipa_interconnect_disable(ipa); in ipa_clock_enable()
186 static void ipa_clock_disable(struct ipa *ipa) in ipa_clock_disable() argument
188 clk_disable_unprepare(ipa->clock->core); in ipa_clock_disable()
[all …]
Dipa_mem.h9 struct ipa;
80 int ipa_mem_config(struct ipa *ipa);
81 void ipa_mem_deconfig(struct ipa *ipa);
83 int ipa_mem_setup(struct ipa *ipa);
84 void ipa_mem_teardown(struct ipa *ipa);
86 int ipa_mem_zero_modem(struct ipa *ipa);
88 int ipa_mem_init(struct ipa *ipa, const struct ipa_mem_data *mem_data);
89 void ipa_mem_exit(struct ipa *ipa);
Dipa_smp2p.c60 struct ipa *ipa; member
92 smp2p->clock_on = ipa_clock_get_additional(smp2p->ipa); in ipa_smp2p_notify()
127 ipa_uc_panic_notifier(smp2p->ipa); in ipa_smp2p_panic_notifier()
158 ret = ipa_setup(smp2p->ipa); in ipa_smp2p_modem_setup_ready_isr()
160 dev_err(&smp2p->ipa->pdev->dev, in ipa_smp2p_modem_setup_ready_isr()
174 struct device *dev = &smp2p->ipa->pdev->dev; in ipa_smp2p_irq_init()
178 ret = platform_get_irq_byname(smp2p->ipa->pdev, name); in ipa_smp2p_irq_init()
201 static void ipa_smp2p_clock_release(struct ipa *ipa) in ipa_smp2p_clock_release() argument
203 if (!ipa->smp2p->clock_on) in ipa_smp2p_clock_release()
206 ipa_clock_put(ipa); in ipa_smp2p_clock_release()
[all …]
Dipa_gsi.c17 struct ipa *ipa = container_of(trans->gsi, struct ipa, gsi); in ipa_gsi_trans_complete() local
19 ipa_endpoint_trans_complete(ipa->channel_map[trans->channel_id], trans); in ipa_gsi_trans_complete()
24 struct ipa *ipa = container_of(trans->gsi, struct ipa, gsi); in ipa_gsi_trans_release() local
26 ipa_endpoint_trans_release(ipa->channel_map[trans->channel_id], trans); in ipa_gsi_trans_release()
32 struct ipa *ipa = container_of(gsi, struct ipa, gsi); in ipa_gsi_channel_tx_queued() local
35 endpoint = ipa->channel_map[channel_id]; in ipa_gsi_channel_tx_queued()
43 struct ipa *ipa = container_of(gsi, struct ipa, gsi); in ipa_gsi_channel_tx_completed() local
46 endpoint = ipa->channel_map[channel_id]; in ipa_gsi_channel_tx_completed()
Dipa_cmd.h15 struct ipa;
66 bool ipa_cmd_table_valid(struct ipa *ipa, const struct ipa_mem *mem,
75 bool ipa_cmd_data_valid(struct ipa *ipa);
79 static inline bool ipa_cmd_table_valid(struct ipa *ipa, in ipa_cmd_table_valid() argument
86 static inline bool ipa_cmd_data_valid(struct ipa *ipa) in ipa_cmd_data_valid() argument
180 void ipa_cmd_tag_process(struct ipa *ipa);
190 struct gsi_trans *ipa_cmd_trans_alloc(struct ipa *ipa, u32 tre_count);
Dipa_reg.c12 int ipa_reg_init(struct ipa *ipa) in ipa_reg_init() argument
14 struct device *dev = &ipa->pdev->dev; in ipa_reg_init()
18 res = platform_get_resource_byname(ipa->pdev, IORESOURCE_MEM, in ipa_reg_init()
25 ipa->reg_virt = ioremap(res->start, resource_size(res)); in ipa_reg_init()
26 if (!ipa->reg_virt) { in ipa_reg_init()
30 ipa->reg_addr = res->start; in ipa_reg_init()
35 void ipa_reg_exit(struct ipa *ipa) in ipa_reg_exit() argument
37 iounmap(ipa->reg_virt); in ipa_reg_exit()
Dipa_smp2p.h11 struct ipa;
21 int ipa_smp2p_init(struct ipa *ipa, bool modem_init);
27 void ipa_smp2p_exit(struct ipa *ipa);
36 void ipa_smp2p_disable(struct ipa *ipa);
46 void ipa_smp2p_notify_reset(struct ipa *ipa);
Dipa_clock.h11 struct ipa;
19 u32 ipa_clock_rate(struct ipa *ipa);
41 void ipa_clock_get(struct ipa *ipa);
49 bool ipa_clock_get_additional(struct ipa *ipa);
59 void ipa_clock_put(struct ipa *ipa);
Dipa_uc.h9 struct ipa;
15 void ipa_uc_setup(struct ipa *ipa);
21 void ipa_uc_teardown(struct ipa *ipa);
30 void ipa_uc_panic_notifier(struct ipa *ipa);
Dipa_qmi.h12 struct ipa;
38 int ipa_qmi_setup(struct ipa *ipa);
39 void ipa_qmi_teardown(struct ipa *ipa);
Dipa_interrupt.h12 struct ipa;
39 typedef void (*ipa_irq_handler_t)(struct ipa *ipa, enum ipa_irq_id irq_id);
109 struct ipa_interrupt *ipa_interrupt_setup(struct ipa *ipa);
/Linux-v5.10/arch/arm64/kvm/
Dpvtime.c84 u64 ipa; in kvm_arm_pvtime_set_attr() local
92 if (get_user(ipa, user)) in kvm_arm_pvtime_set_attr()
94 if (!IS_ALIGNED(ipa, 64)) in kvm_arm_pvtime_set_attr()
101 if (kvm_is_error_hva(gfn_to_hva(kvm, ipa >> PAGE_SHIFT))) in kvm_arm_pvtime_set_attr()
106 vcpu->arch.steal.base = ipa; in kvm_arm_pvtime_set_attr()
115 u64 ipa; in kvm_arm_pvtime_get_attr() local
121 ipa = vcpu->arch.steal.base; in kvm_arm_pvtime_get_attr()
123 if (put_user(ipa, user)) in kvm_arm_pvtime_get_attr()
Dtrace_arm.h55 unsigned long long ipa),
56 TP_ARGS(vcpu_pc, hsr, hxfar, ipa),
62 __field( unsigned long long, ipa )
69 __entry->ipa = ipa;
73 __entry->ipa, __entry->hsr,
78 TP_PROTO(unsigned long ipa),
79 TP_ARGS(ipa),
82 __field( unsigned long, ipa )
86 __entry->ipa = ipa;
89 TP_printk("IPA: %lx", __entry->ipa)

123