Searched refs:work_tx (Results 1 – 6 of 6) sorted by relevance
347 INIT_DELAYED_WORK(&udev->work_tx, do_tx); in init_usb()558 schedule_work(&udev->work_tx.work); in gdm_usb_send_complete()637 container_of(work, struct lte_udev, work_tx.work); in do_tx()749 schedule_work(&udev->work_tx.work); in gdm_usb_sdu_send()786 schedule_work(&udev->work_tx.work); in gdm_usb_hci_send()929 cancel_work_sync(&udev->work_tx.work); in gdm_usb_suspend()969 schedule_work(&udev->work_tx.work); in gdm_usb_resume()
88 struct delayed_work work_tx; member
523 unsigned long work_tx; member
1300 for_each_set_bit(queue_id, &fep->work_tx, FEC_ENET_MAX_TX_QS) { in fec_enet_tx()1301 clear_bit(queue_id, &fep->work_tx); in fec_enet_tx()1584 fep->work_tx |= (1 << 2); in fec_enet_collect_events()1586 fep->work_tx |= (1 << 0); in fec_enet_collect_events()1588 fep->work_tx |= (1 << 1); in fec_enet_collect_events()1605 if ((fep->work_tx || fep->work_rx) && fep->link) { in fec_enet_interrupt()
150 struct work_struct work_tx; member592 schedule_work(&s->work_tx); in sci_start_tx()1195 schedule_work(&s->work_tx); in sci_dma_tx_complete()1338 cancel_work_sync(&s->work_tx); in sci_dma_tx_release()1392 struct sci_port *s = container_of(work, struct sci_port, work_tx); in sci_dma_tx_work_fn()1601 INIT_WORK(&s->work_tx, sci_dma_tx_work_fn); in sci_request_dma()
383 u8 work_tx; member1125 mp->work_tx &= ~(1 << txq->index); in txq_reclaim()2168 mp->work_tx |= int_cause_ext & INT_EXT_TX; in mv643xx_eth_collect_events()2264 queue_mask = mp->work_tx | mp->work_tx_end | mp->work_rx; in mv643xx_eth_poll()2283 } else if (mp->work_tx & queue_mask) { in mv643xx_eth_poll()