Lines Matching refs:irq_ptr

121 		nr += q->irq_ptr->nr_input_qs;  in qdio_do_eqbs()
123 ccq = do_eqbs(q->irq_ptr->sch_token, state, nr, &tmp_start, &tmp_count, in qdio_do_eqbs()
134 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "EQBS part:%02x", in qdio_do_eqbs()
139 DBF_DEV_EVENT(DBF_WARN, q->irq_ptr, "EQBS again:%2d", ccq); in qdio_do_eqbs()
145 q->handler(q->irq_ptr->cdev, QDIO_ERROR_GET_BUF_STATE, q->nr, in qdio_do_eqbs()
146 q->first_to_check, count, q->irq_ptr->int_parm); in qdio_do_eqbs()
174 nr += q->irq_ptr->nr_input_qs; in qdio_do_sqbs()
176 ccq = do_sqbs(q->irq_ptr->sch_token, state, nr, &tmp_start, &tmp_count); in qdio_do_sqbs()
186 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "SQBS again:%2d", ccq); in qdio_do_sqbs()
193 q->handler(q->irq_ptr->cdev, QDIO_ERROR_SET_BUF_STATE, q->nr, in qdio_do_sqbs()
194 q->first_to_check, count, q->irq_ptr->int_parm); in qdio_do_sqbs()
278 static void qdio_init_buf_states(struct qdio_irq *irq_ptr) in qdio_init_buf_states() argument
283 for_each_input_queue(irq_ptr, q, i) in qdio_init_buf_states()
286 for_each_output_queue(irq_ptr, q, i) in qdio_init_buf_states()
294 unsigned long schid = *((u32 *) &q->irq_ptr->schid); in qdio_siga_sync()
298 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "siga-s:%1d", q->nr); in qdio_siga_sync()
302 schid = q->irq_ptr->sch_token; in qdio_siga_sync()
323 unsigned long schid = *((u32 *) &q->irq_ptr->schid); in qdio_siga_output()
336 schid = q->irq_ptr->sch_token; in qdio_siga_output()
354 DBF_DEV_EVENT(DBF_WARN, q->irq_ptr, in qdio_siga_output()
356 DBF_DEV_EVENT(DBF_WARN, q->irq_ptr, "count:%u", retries); in qdio_siga_output()
363 unsigned long schid = *((u32 *) &q->irq_ptr->schid); in qdio_siga_input()
367 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "siga-r:%1d", q->nr); in qdio_siga_input()
371 schid = q->irq_ptr->sch_token; in qdio_siga_input()
387 if (pci_out_supported(q->irq_ptr)) in qdio_sync_queues()
429 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "OUTFULL FTC:%02x", start); in process_buffer_error()
474 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "in prim:%1d %02x", q->nr, in get_inbound_buffer_frontier()
480 if (q->irq_ptr->perf_stat_enabled) in get_inbound_buffer_frontier()
484 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "in err:%1d %02x", q->nr, in get_inbound_buffer_frontier()
491 if (q->irq_ptr->perf_stat_enabled) in get_inbound_buffer_frontier()
495 if (q->irq_ptr->perf_stat_enabled) in get_inbound_buffer_frontier()
497 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "in nop:%1d %#02x", in get_inbound_buffer_frontier()
504 dev_WARN_ONCE(&q->irq_ptr->cdev->dev, 1, in get_inbound_buffer_frontier()
556 if (unlikely(q->irq_ptr->state != QDIO_IRQ_STATE_ACTIVE)) in qdio_kick_handler()
561 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "kih s:%02x c:%02x", start, count); in qdio_kick_handler()
564 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "koh: s:%02x c:%02x", in qdio_kick_handler()
568 q->handler(q->irq_ptr->cdev, q->qdio_error, q->nr, start, count, in qdio_kick_handler()
569 q->irq_ptr->int_parm); in qdio_kick_handler()
577 if (likely(q->irq_ptr->state == QDIO_IRQ_STATE_ACTIVE)) { in qdio_tasklet_schedule()
645 !pci_out_supported(q->irq_ptr)) || in get_outbound_buffer_frontier()
662 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, in get_outbound_buffer_frontier()
666 if (q->irq_ptr->perf_stat_enabled) in get_outbound_buffer_frontier()
672 if (q->irq_ptr->perf_stat_enabled) in get_outbound_buffer_frontier()
677 if (q->irq_ptr->perf_stat_enabled) in get_outbound_buffer_frontier()
679 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "out primed:%1d", in get_outbound_buffer_frontier()
687 dev_WARN_ONCE(&q->irq_ptr->cdev->dev, 1, in get_outbound_buffer_frontier()
707 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "out moved:%1d", q->nr); in qdio_outbound_q_moved()
729 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "siga-w:%1d", q->nr); in qdio_kick_outbound_q()
746 DBF_DEV_EVENT(DBF_INFO, q->irq_ptr, "siga-w cc2:%1d", q->nr); in qdio_kick_outbound_q()
777 if (queue_type(q) == QDIO_ZFCP_QFMT && !pci_out_supported(q->irq_ptr) && in __qdio_outbound_processing()
793 likely(q->irq_ptr->state == QDIO_IRQ_STATE_ACTIVE)) in __qdio_outbound_processing()
836 qdio_check_outbound_pci_queues(q->irq_ptr); in tiqdio_inbound_processing()
841 static inline void qdio_set_state(struct qdio_irq *irq_ptr, in qdio_set_state() argument
844 DBF_DEV_EVENT(DBF_INFO, irq_ptr, "newstate: %1d", state); in qdio_set_state()
846 irq_ptr->state = state; in qdio_set_state()
850 static void qdio_irq_check_sense(struct qdio_irq *irq_ptr, struct irb *irb) in qdio_irq_check_sense() argument
853 DBF_ERROR("%4x sense:", irq_ptr->schid.sch_no); in qdio_irq_check_sense()
860 static void qdio_int_handler_pci(struct qdio_irq *irq_ptr) in qdio_int_handler_pci() argument
865 if (unlikely(irq_ptr->state != QDIO_IRQ_STATE_ACTIVE)) in qdio_int_handler_pci()
868 if (irq_ptr->irq_poll) { in qdio_int_handler_pci()
869 if (!test_and_set_bit(QDIO_IRQ_DISABLED, &irq_ptr->poll_state)) in qdio_int_handler_pci()
870 irq_ptr->irq_poll(irq_ptr->cdev, irq_ptr->int_parm); in qdio_int_handler_pci()
872 QDIO_PERF_STAT_INC(irq_ptr, int_discarded); in qdio_int_handler_pci()
874 for_each_input_queue(irq_ptr, q, i) in qdio_int_handler_pci()
878 if (!pci_out_supported(irq_ptr) || !irq_ptr->scan_threshold) in qdio_int_handler_pci()
881 for_each_output_queue(irq_ptr, q, i) { in qdio_int_handler_pci()
890 static void qdio_handle_activate_check(struct qdio_irq *irq_ptr, in qdio_handle_activate_check() argument
896 DBF_ERROR("%4x ACT CHECK", irq_ptr->schid.sch_no); in qdio_handle_activate_check()
900 if (irq_ptr->nr_input_qs) { in qdio_handle_activate_check()
901 q = irq_ptr->input_qs[0]; in qdio_handle_activate_check()
902 } else if (irq_ptr->nr_output_qs) { in qdio_handle_activate_check()
903 q = irq_ptr->output_qs[0]; in qdio_handle_activate_check()
909 q->handler(q->irq_ptr->cdev, QDIO_ERROR_ACTIVATE, in qdio_handle_activate_check()
910 q->nr, q->first_to_check, 0, irq_ptr->int_parm); in qdio_handle_activate_check()
912 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_STOPPED); in qdio_handle_activate_check()
920 static void qdio_establish_handle_irq(struct qdio_irq *irq_ptr, int cstat, in qdio_establish_handle_irq() argument
923 DBF_DEV_EVENT(DBF_INFO, irq_ptr, "qest irq"); in qdio_establish_handle_irq()
931 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_ESTABLISHED); in qdio_establish_handle_irq()
935 DBF_ERROR("%4x EQ:error", irq_ptr->schid.sch_no); in qdio_establish_handle_irq()
937 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_ERR); in qdio_establish_handle_irq()
944 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_int_handler() local
948 if (!intparm || !irq_ptr) { in qdio_int_handler()
954 if (irq_ptr->perf_stat_enabled) in qdio_int_handler()
955 irq_ptr->perf_stat.qdio_int++; in qdio_int_handler()
958 DBF_ERROR("%4x IO error", irq_ptr->schid.sch_no); in qdio_int_handler()
959 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_ERR); in qdio_int_handler()
963 qdio_irq_check_sense(irq_ptr, irb); in qdio_int_handler()
967 switch (irq_ptr->state) { in qdio_int_handler()
969 qdio_establish_handle_irq(irq_ptr, cstat, dstat); in qdio_int_handler()
972 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_INACTIVE); in qdio_int_handler()
977 qdio_int_handler_pci(irq_ptr); in qdio_int_handler()
981 qdio_handle_activate_check(irq_ptr, intparm, cstat, in qdio_int_handler()
1014 static void qdio_shutdown_queues(struct qdio_irq *irq_ptr) in qdio_shutdown_queues() argument
1019 for_each_input_queue(irq_ptr, q, i) in qdio_shutdown_queues()
1022 for_each_output_queue(irq_ptr, q, i) { in qdio_shutdown_queues()
1035 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_shutdown() local
1039 if (!irq_ptr) in qdio_shutdown()
1046 mutex_lock(&irq_ptr->setup_mutex); in qdio_shutdown()
1051 if (irq_ptr->state == QDIO_IRQ_STATE_INACTIVE) { in qdio_shutdown()
1052 mutex_unlock(&irq_ptr->setup_mutex); in qdio_shutdown()
1060 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_STOPPED); in qdio_shutdown()
1062 tiqdio_remove_device(irq_ptr); in qdio_shutdown()
1063 qdio_shutdown_queues(irq_ptr); in qdio_shutdown()
1064 qdio_shutdown_debug_entries(irq_ptr); in qdio_shutdown()
1068 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_CLEANUP); in qdio_shutdown()
1076 DBF_ERROR("%4x SHUTD ERR", irq_ptr->schid.sch_no); in qdio_shutdown()
1082 irq_ptr->state == QDIO_IRQ_STATE_INACTIVE || in qdio_shutdown()
1083 irq_ptr->state == QDIO_IRQ_STATE_ERR, in qdio_shutdown()
1087 qdio_shutdown_thinint(irq_ptr); in qdio_shutdown()
1088 qdio_shutdown_irq(irq_ptr); in qdio_shutdown()
1090 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_INACTIVE); in qdio_shutdown()
1091 mutex_unlock(&irq_ptr->setup_mutex); in qdio_shutdown()
1104 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_free() local
1107 if (!irq_ptr) in qdio_free()
1112 DBF_DEV_EVENT(DBF_ERR, irq_ptr, "dbf abandoned"); in qdio_free()
1113 mutex_lock(&irq_ptr->setup_mutex); in qdio_free()
1115 irq_ptr->debug_area = NULL; in qdio_free()
1117 mutex_unlock(&irq_ptr->setup_mutex); in qdio_free()
1119 qdio_free_async_data(irq_ptr); in qdio_free()
1120 qdio_free_queues(irq_ptr); in qdio_free()
1121 free_page((unsigned long) irq_ptr->qdr); in qdio_free()
1122 free_page(irq_ptr->chsc_page); in qdio_free()
1123 free_page((unsigned long) irq_ptr); in qdio_free()
1138 struct qdio_irq *irq_ptr; in qdio_allocate() local
1149 irq_ptr = (void *) get_zeroed_page(GFP_KERNEL | GFP_DMA); in qdio_allocate()
1150 if (!irq_ptr) in qdio_allocate()
1153 irq_ptr->cdev = cdev; in qdio_allocate()
1154 mutex_init(&irq_ptr->setup_mutex); in qdio_allocate()
1155 if (qdio_allocate_dbf(irq_ptr)) in qdio_allocate()
1158 DBF_DEV_EVENT(DBF_ERR, irq_ptr, "alloc niq:%1u noq:%1u", no_input_qs, in qdio_allocate()
1167 irq_ptr->chsc_page = get_zeroed_page(GFP_KERNEL); in qdio_allocate()
1168 if (!irq_ptr->chsc_page) in qdio_allocate()
1172 irq_ptr->qdr = (struct qdr *) get_zeroed_page(GFP_KERNEL | GFP_DMA); in qdio_allocate()
1173 if (!irq_ptr->qdr) in qdio_allocate()
1176 rc = qdio_allocate_qs(irq_ptr, no_input_qs, no_output_qs); in qdio_allocate()
1180 INIT_LIST_HEAD(&irq_ptr->entry); in qdio_allocate()
1181 cdev->private->qdio_data = irq_ptr; in qdio_allocate()
1182 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_INACTIVE); in qdio_allocate()
1186 free_page((unsigned long) irq_ptr->qdr); in qdio_allocate()
1188 free_page(irq_ptr->chsc_page); in qdio_allocate()
1191 free_page((unsigned long) irq_ptr); in qdio_allocate()
1196 static void qdio_detect_hsicq(struct qdio_irq *irq_ptr) in qdio_detect_hsicq() argument
1198 struct qdio_q *q = irq_ptr->input_qs[0]; in qdio_detect_hsicq()
1201 if (irq_ptr->nr_input_qs > 1 && queue_type(q) == QDIO_IQDIO_QFMT) in qdio_detect_hsicq()
1204 for_each_output_queue(irq_ptr, q, i) { in qdio_detect_hsicq()
1244 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_establish() local
1251 if (!irq_ptr) in qdio_establish()
1254 if (init_data->no_input_qs > irq_ptr->max_input_qs || in qdio_establish()
1255 init_data->no_output_qs > irq_ptr->max_output_qs) in qdio_establish()
1266 mutex_lock(&irq_ptr->setup_mutex); in qdio_establish()
1267 qdio_trace_init_data(irq_ptr, init_data); in qdio_establish()
1268 qdio_setup_irq(irq_ptr, init_data); in qdio_establish()
1270 rc = qdio_establish_thinint(irq_ptr); in qdio_establish()
1272 qdio_shutdown_irq(irq_ptr); in qdio_establish()
1273 mutex_unlock(&irq_ptr->setup_mutex); in qdio_establish()
1278 irq_ptr->ccw.cmd_code = irq_ptr->equeue.cmd; in qdio_establish()
1279 irq_ptr->ccw.flags = CCW_FLAG_SLI; in qdio_establish()
1280 irq_ptr->ccw.count = irq_ptr->equeue.count; in qdio_establish()
1281 irq_ptr->ccw.cda = (u32)((addr_t)irq_ptr->qdr); in qdio_establish()
1286 rc = ccw_device_start(cdev, &irq_ptr->ccw, QDIO_DOING_ESTABLISH, 0, 0); in qdio_establish()
1289 DBF_ERROR("%4x est IO ERR", irq_ptr->schid.sch_no); in qdio_establish()
1291 qdio_shutdown_thinint(irq_ptr); in qdio_establish()
1292 qdio_shutdown_irq(irq_ptr); in qdio_establish()
1293 mutex_unlock(&irq_ptr->setup_mutex); in qdio_establish()
1298 irq_ptr->state == QDIO_IRQ_STATE_ESTABLISHED || in qdio_establish()
1299 irq_ptr->state == QDIO_IRQ_STATE_ERR, HZ); in qdio_establish()
1301 if (irq_ptr->state != QDIO_IRQ_STATE_ESTABLISHED) { in qdio_establish()
1302 mutex_unlock(&irq_ptr->setup_mutex); in qdio_establish()
1307 qdio_setup_ssqd_info(irq_ptr); in qdio_establish()
1309 qdio_detect_hsicq(irq_ptr); in qdio_establish()
1312 qdio_init_buf_states(irq_ptr); in qdio_establish()
1314 mutex_unlock(&irq_ptr->setup_mutex); in qdio_establish()
1315 qdio_print_subchannel_info(irq_ptr); in qdio_establish()
1316 qdio_setup_debug_entries(irq_ptr); in qdio_establish()
1327 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_activate() local
1334 if (!irq_ptr) in qdio_activate()
1337 mutex_lock(&irq_ptr->setup_mutex); in qdio_activate()
1338 if (irq_ptr->state == QDIO_IRQ_STATE_INACTIVE) { in qdio_activate()
1343 irq_ptr->ccw.cmd_code = irq_ptr->aqueue.cmd; in qdio_activate()
1344 irq_ptr->ccw.flags = CCW_FLAG_SLI; in qdio_activate()
1345 irq_ptr->ccw.count = irq_ptr->aqueue.count; in qdio_activate()
1346 irq_ptr->ccw.cda = 0; in qdio_activate()
1351 rc = ccw_device_start(cdev, &irq_ptr->ccw, QDIO_DOING_ACTIVATE, in qdio_activate()
1355 DBF_ERROR("%4x act IO ERR", irq_ptr->schid.sch_no); in qdio_activate()
1360 if (is_thinint_irq(irq_ptr)) in qdio_activate()
1361 tiqdio_add_device(irq_ptr); in qdio_activate()
1366 switch (irq_ptr->state) { in qdio_activate()
1372 qdio_set_state(irq_ptr, QDIO_IRQ_STATE_ACTIVE); in qdio_activate()
1376 mutex_unlock(&irq_ptr->setup_mutex); in qdio_activate()
1422 const unsigned int scan_threshold = q->irq_ptr->scan_threshold; in handle_outbound()
1468 likely(q->irq_ptr->state == QDIO_IRQ_STATE_ACTIVE)) in handle_outbound()
1484 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in do_QDIO() local
1489 if (!irq_ptr) in do_QDIO()
1492 DBF_DEV_EVENT(DBF_INFO, irq_ptr, in do_QDIO()
1495 if (irq_ptr->state != QDIO_IRQ_STATE_ACTIVE) in do_QDIO()
1500 return handle_inbound(irq_ptr->input_qs[q_nr], in do_QDIO()
1503 return handle_outbound(irq_ptr->output_qs[q_nr], in do_QDIO()
1520 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_start_irq() local
1523 if (!irq_ptr) in qdio_start_irq()
1526 for_each_input_queue(irq_ptr, q, i) in qdio_start_irq()
1529 clear_bit(QDIO_IRQ_DISABLED, &irq_ptr->poll_state); in qdio_start_irq()
1535 if (test_nonshared_ind(irq_ptr)) in qdio_start_irq()
1538 for_each_input_queue(irq_ptr, q, i) { in qdio_start_irq()
1546 if (test_and_set_bit(QDIO_IRQ_DISABLED, &irq_ptr->poll_state)) in qdio_start_irq()
1578 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_inspect_queue() local
1581 if (!irq_ptr) in qdio_inspect_queue()
1583 q = is_input ? irq_ptr->input_qs[nr] : irq_ptr->output_qs[nr]; in qdio_inspect_queue()
1608 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_get_next_buffers() local
1610 if (!irq_ptr) in qdio_get_next_buffers()
1612 q = irq_ptr->input_qs[nr]; in qdio_get_next_buffers()
1621 qdio_check_outbound_pci_queues(irq_ptr); in qdio_get_next_buffers()
1624 if (unlikely(q->irq_ptr->state != QDIO_IRQ_STATE_ACTIVE)) in qdio_get_next_buffers()
1641 struct qdio_irq *irq_ptr = cdev->private->qdio_data; in qdio_stop_irq() local
1643 if (!irq_ptr) in qdio_stop_irq()
1646 if (test_and_set_bit(QDIO_IRQ_DISABLED, &irq_ptr->poll_state)) in qdio_stop_irq()