Searched refs:work_tx (Results 1 – 6 of 6) sorted by relevance
347 INIT_DELAYED_WORK(&udev->work_tx, do_tx); in init_usb()558 schedule_work(&udev->work_tx.work); in gdm_usb_send_complete()637 container_of(work, struct lte_udev, work_tx.work); in do_tx()749 schedule_work(&udev->work_tx.work); in gdm_usb_sdu_send()786 schedule_work(&udev->work_tx.work); in gdm_usb_hci_send()929 cancel_work_sync(&udev->work_tx.work); in gdm_usb_suspend()969 schedule_work(&udev->work_tx.work); in gdm_usb_resume()
88 struct delayed_work work_tx; member
523 unsigned long work_tx; member
1297 for_each_set_bit(queue_id, &fep->work_tx, FEC_ENET_MAX_TX_QS) { in fec_enet_tx()1298 clear_bit(queue_id, &fep->work_tx); in fec_enet_tx()1581 fep->work_tx |= (1 << 2); in fec_enet_collect_events()1583 fep->work_tx |= (1 << 0); in fec_enet_collect_events()1585 fep->work_tx |= (1 << 1); in fec_enet_collect_events()1602 if ((fep->work_tx || fep->work_rx) && fep->link) { in fec_enet_interrupt()
150 struct work_struct work_tx; member592 schedule_work(&s->work_tx); in sci_start_tx()1207 schedule_work(&s->work_tx); in sci_dma_tx_complete()1325 cancel_work_sync(&s->work_tx); in sci_tx_dma_release()1379 struct sci_port *s = container_of(work, struct sci_port, work_tx); in work_fn_tx()1580 INIT_WORK(&s->work_tx, work_fn_tx); in sci_request_dma()
395 u8 work_tx; member1137 mp->work_tx &= ~(1 << txq->index); in txq_reclaim()2187 mp->work_tx |= int_cause_ext & INT_EXT_TX; in mv643xx_eth_collect_events()2283 queue_mask = mp->work_tx | mp->work_tx_end | mp->work_rx; in mv643xx_eth_poll()2302 } else if (mp->work_tx & queue_mask) { in mv643xx_eth_poll()