Lines Matching full:c

2 /*	(c) Copyright 1998 Alan Cox <alan@lxorguk.ukuu.org.uk>
3 * (c) Copyright 2000, 2001 Red Hat Inc
103 static void z8530_rx_done(struct z8530_channel *c);
104 static void z8530_tx_done(struct z8530_channel *c);
108 * @c: Z8530 channel to read from (2 per chip)
117 static inline u8 read_zsreg(struct z8530_channel *c, u8 reg) in read_zsreg() argument
120 z8530_write_port(c->ctrlio, reg); in read_zsreg()
121 return z8530_read_port(c->ctrlio); in read_zsreg()
126 * @c: The Z8530 channel to read the data port from
132 static inline u8 read_zsdata(struct z8530_channel *c) in read_zsdata() argument
136 r = z8530_read_port(c->dataio); in read_zsdata()
142 * @c: The Z8530 channel
150 * Assumes c->lock is held.
152 static inline void write_zsreg(struct z8530_channel *c, u8 reg, u8 val) in write_zsreg() argument
155 z8530_write_port(c->ctrlio, reg); in write_zsreg()
156 z8530_write_port(c->ctrlio, val); in write_zsreg()
161 * @c: The Z8530 channel
167 static inline void write_zsctrl(struct z8530_channel *c, u8 val) in write_zsctrl() argument
169 z8530_write_port(c->ctrlio, val); in write_zsctrl()
174 * @c: The Z8530 channel
179 static inline void write_zsdata(struct z8530_channel *c, u8 val) in write_zsdata() argument
181 z8530_write_port(c->dataio, val); in write_zsdata()
244 * @c: Channel to flush
254 static void z8530_flush_fifo(struct z8530_channel *c) in z8530_flush_fifo() argument
256 read_zsreg(c, R1); in z8530_flush_fifo()
257 read_zsreg(c, R1); in z8530_flush_fifo()
258 read_zsreg(c, R1); in z8530_flush_fifo()
259 read_zsreg(c, R1); in z8530_flush_fifo()
260 if (c->dev->type == Z85230) { in z8530_flush_fifo()
261 read_zsreg(c, R1); in z8530_flush_fifo()
262 read_zsreg(c, R1); in z8530_flush_fifo()
263 read_zsreg(c, R1); in z8530_flush_fifo()
264 read_zsreg(c, R1); in z8530_flush_fifo()
270 * @c: The Z8530 channel to control;
279 static void z8530_rtsdtr(struct z8530_channel *c, int set) in z8530_rtsdtr() argument
282 c->regs[5] |= (RTS | DTR); in z8530_rtsdtr()
284 c->regs[5] &= ~(RTS | DTR); in z8530_rtsdtr()
285 write_zsreg(c, R5, c->regs[5]); in z8530_rtsdtr()
290 * @c: Z8530 channel to process
312 static void z8530_rx(struct z8530_channel *c) in z8530_rx() argument
318 if (!(read_zsreg(c, R0) & 1)) in z8530_rx()
320 ch = read_zsdata(c); in z8530_rx()
321 stat = read_zsreg(c, R1); in z8530_rx()
325 if (c->count < c->max) { in z8530_rx()
326 *c->dptr++ = ch; in z8530_rx()
327 c->count++; in z8530_rx()
335 if (c->skb) in z8530_rx()
336 c->dptr = c->skb->data; in z8530_rx()
337 c->count = 0; in z8530_rx()
339 pr_warn("%s: overrun\n", c->dev->name); in z8530_rx()
340 c->rx_overrun++; in z8530_rx()
343 c->rx_crc_err++; in z8530_rx()
351 z8530_rx_done(c); in z8530_rx()
352 write_zsctrl(c, RES_Rx_CRC); in z8530_rx()
358 write_zsctrl(c, ERR_RES); in z8530_rx()
359 write_zsctrl(c, RES_H_IUS); in z8530_rx()
364 * @c: Z8530 channel to process
372 static void z8530_tx(struct z8530_channel *c) in z8530_tx() argument
374 while (c->txcount) { in z8530_tx()
376 if (!(read_zsreg(c, R0) & 4)) in z8530_tx()
378 c->txcount--; in z8530_tx()
381 write_zsreg(c, R8, *c->tx_ptr++); in z8530_tx()
382 write_zsctrl(c, RES_H_IUS); in z8530_tx()
384 if (c->txcount == 0) { in z8530_tx()
385 write_zsctrl(c, RES_EOM_L); in z8530_tx()
386 write_zsreg(c, R10, c->regs[10] & ~ABUNDER); in z8530_tx()
393 write_zsctrl(c, RES_Tx_P); in z8530_tx()
395 z8530_tx_done(c); in z8530_tx()
396 write_zsctrl(c, RES_H_IUS); in z8530_tx()
565 * @c: Z8530 channel to shut up
572 static void z8530_rx_clear(struct z8530_channel *c) in z8530_rx_clear() argument
578 read_zsdata(c); in z8530_rx_clear()
579 stat = read_zsreg(c, R1); in z8530_rx_clear()
582 write_zsctrl(c, RES_Rx_CRC); in z8530_rx_clear()
585 write_zsctrl(c, ERR_RES); in z8530_rx_clear()
586 write_zsctrl(c, RES_H_IUS); in z8530_rx_clear()
591 * @c: Z8530 channel to shut up
598 static void z8530_tx_clear(struct z8530_channel *c) in z8530_tx_clear() argument
600 write_zsctrl(c, RES_Tx_P); in z8530_tx_clear()
601 write_zsctrl(c, RES_H_IUS); in z8530_tx_clear()
643 * channel). c->lock for both channels points to dev->lock
718 * @c: The Z8530 channel to open in synchronous PIO mode
723 int z8530_sync_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_open() argument
727 spin_lock_irqsave(c->lock, flags); in z8530_sync_open()
729 c->sync = 1; in z8530_sync_open()
730 c->mtu = dev->mtu + 64; in z8530_sync_open()
731 c->count = 0; in z8530_sync_open()
732 c->skb = NULL; in z8530_sync_open()
733 c->skb2 = NULL; in z8530_sync_open()
734 c->irqs = &z8530_sync; in z8530_sync_open()
737 z8530_rx_done(c); /* Load the frame ring */ in z8530_sync_open()
738 z8530_rx_done(c); /* Load the backup frame */ in z8530_sync_open()
739 z8530_rtsdtr(c, 1); in z8530_sync_open()
740 c->dma_tx = 0; in z8530_sync_open()
741 c->regs[R1] |= TxINT_ENAB; in z8530_sync_open()
742 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_open()
743 write_zsreg(c, R3, c->regs[R3] | RxENABLE); in z8530_sync_open()
745 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_open()
753 * @c: Z8530 channel to disassociate and move to idle
758 int z8530_sync_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_close() argument
763 spin_lock_irqsave(c->lock, flags); in z8530_sync_close()
764 c->irqs = &z8530_nop; in z8530_sync_close()
765 c->max = 0; in z8530_sync_close()
766 c->sync = 0; in z8530_sync_close()
768 chk = read_zsreg(c, R0); in z8530_sync_close()
769 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_close()
770 z8530_rtsdtr(c, 0); in z8530_sync_close()
772 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_close()
780 * @c: The Z8530 channel to configure in sync DMA mode.
786 int z8530_sync_dma_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_dma_open() argument
790 c->sync = 1; in z8530_sync_dma_open()
791 c->mtu = dev->mtu + 64; in z8530_sync_dma_open()
792 c->count = 0; in z8530_sync_dma_open()
793 c->skb = NULL; in z8530_sync_dma_open()
794 c->skb2 = NULL; in z8530_sync_dma_open()
798 c->rxdma_on = 0; in z8530_sync_dma_open()
799 c->txdma_on = 0; in z8530_sync_dma_open()
806 if (c->mtu > PAGE_SIZE / 2) in z8530_sync_dma_open()
809 c->rx_buf[0] = (void *)get_zeroed_page(GFP_KERNEL | GFP_DMA); in z8530_sync_dma_open()
810 if (!c->rx_buf[0]) in z8530_sync_dma_open()
812 c->rx_buf[1] = c->rx_buf[0] + PAGE_SIZE / 2; in z8530_sync_dma_open()
814 c->tx_dma_buf[0] = (void *)get_zeroed_page(GFP_KERNEL | GFP_DMA); in z8530_sync_dma_open()
815 if (!c->tx_dma_buf[0]) { in z8530_sync_dma_open()
816 free_page((unsigned long)c->rx_buf[0]); in z8530_sync_dma_open()
817 c->rx_buf[0] = NULL; in z8530_sync_dma_open()
820 c->tx_dma_buf[1] = c->tx_dma_buf[0] + PAGE_SIZE / 2; in z8530_sync_dma_open()
822 c->tx_dma_used = 0; in z8530_sync_dma_open()
823 c->dma_tx = 1; in z8530_sync_dma_open()
824 c->dma_num = 0; in z8530_sync_dma_open()
825 c->dma_ready = 1; in z8530_sync_dma_open()
830 spin_lock_irqsave(c->lock, cflags); in z8530_sync_dma_open()
835 c->regs[R14] |= DTRREQ; in z8530_sync_dma_open()
836 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_dma_open()
838 c->regs[R1] &= ~TxINT_ENAB; in z8530_sync_dma_open()
839 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
844 c->regs[R1] |= WT_FN_RDYFN; in z8530_sync_dma_open()
845 c->regs[R1] |= WT_RDY_RT; in z8530_sync_dma_open()
846 c->regs[R1] |= INT_ERR_Rx; in z8530_sync_dma_open()
847 c->regs[R1] &= ~TxINT_ENAB; in z8530_sync_dma_open()
848 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
849 c->regs[R1] |= WT_RDY_ENAB; in z8530_sync_dma_open()
850 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
860 disable_dma(c->rxdma); in z8530_sync_dma_open()
861 clear_dma_ff(c->rxdma); in z8530_sync_dma_open()
862 set_dma_mode(c->rxdma, DMA_MODE_READ | 0x10); in z8530_sync_dma_open()
863 set_dma_addr(c->rxdma, virt_to_bus(c->rx_buf[0])); in z8530_sync_dma_open()
864 set_dma_count(c->rxdma, c->mtu); in z8530_sync_dma_open()
865 enable_dma(c->rxdma); in z8530_sync_dma_open()
867 disable_dma(c->txdma); in z8530_sync_dma_open()
868 clear_dma_ff(c->txdma); in z8530_sync_dma_open()
869 set_dma_mode(c->txdma, DMA_MODE_WRITE); in z8530_sync_dma_open()
870 disable_dma(c->txdma); in z8530_sync_dma_open()
877 c->rxdma_on = 1; in z8530_sync_dma_open()
878 c->txdma_on = 1; in z8530_sync_dma_open()
879 c->tx_dma_used = 1; in z8530_sync_dma_open()
881 c->irqs = &z8530_dma_sync; in z8530_sync_dma_open()
882 z8530_rtsdtr(c, 1); in z8530_sync_dma_open()
883 write_zsreg(c, R3, c->regs[R3] | RxENABLE); in z8530_sync_dma_open()
885 spin_unlock_irqrestore(c->lock, cflags); in z8530_sync_dma_open()
894 * @c: Z8530 channel to move into discard mode
899 int z8530_sync_dma_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_dma_close() argument
904 c->irqs = &z8530_nop; in z8530_sync_dma_close()
905 c->max = 0; in z8530_sync_dma_close()
906 c->sync = 0; in z8530_sync_dma_close()
912 disable_dma(c->rxdma); in z8530_sync_dma_close()
913 clear_dma_ff(c->rxdma); in z8530_sync_dma_close()
915 c->rxdma_on = 0; in z8530_sync_dma_close()
917 disable_dma(c->txdma); in z8530_sync_dma_close()
918 clear_dma_ff(c->txdma); in z8530_sync_dma_close()
921 c->txdma_on = 0; in z8530_sync_dma_close()
922 c->tx_dma_used = 0; in z8530_sync_dma_close()
924 spin_lock_irqsave(c->lock, flags); in z8530_sync_dma_close()
929 c->regs[R1] &= ~WT_RDY_ENAB; in z8530_sync_dma_close()
930 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_close()
931 c->regs[R1] &= ~(WT_RDY_RT | WT_FN_RDYFN | INT_ERR_Rx); in z8530_sync_dma_close()
932 c->regs[R1] |= INT_ALL_Rx; in z8530_sync_dma_close()
933 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_close()
934 c->regs[R14] &= ~DTRREQ; in z8530_sync_dma_close()
935 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_dma_close()
937 if (c->rx_buf[0]) { in z8530_sync_dma_close()
938 free_page((unsigned long)c->rx_buf[0]); in z8530_sync_dma_close()
939 c->rx_buf[0] = NULL; in z8530_sync_dma_close()
941 if (c->tx_dma_buf[0]) { in z8530_sync_dma_close()
942 free_page((unsigned long)c->tx_dma_buf[0]); in z8530_sync_dma_close()
943 c->tx_dma_buf[0] = NULL; in z8530_sync_dma_close()
945 chk = read_zsreg(c, R0); in z8530_sync_dma_close()
946 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_dma_close()
947 z8530_rtsdtr(c, 0); in z8530_sync_dma_close()
949 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_dma_close()
958 * @c: The Z8530 channel to configure in sync DMA mode.
965 int z8530_sync_txdma_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_txdma_open() argument
970 c->sync = 1; in z8530_sync_txdma_open()
971 c->mtu = dev->mtu + 64; in z8530_sync_txdma_open()
972 c->count = 0; in z8530_sync_txdma_open()
973 c->skb = NULL; in z8530_sync_txdma_open()
974 c->skb2 = NULL; in z8530_sync_txdma_open()
981 if (c->mtu > PAGE_SIZE / 2) in z8530_sync_txdma_open()
984 c->tx_dma_buf[0] = (void *)get_zeroed_page(GFP_KERNEL | GFP_DMA); in z8530_sync_txdma_open()
985 if (!c->tx_dma_buf[0]) in z8530_sync_txdma_open()
988 c->tx_dma_buf[1] = c->tx_dma_buf[0] + PAGE_SIZE / 2; in z8530_sync_txdma_open()
990 spin_lock_irqsave(c->lock, cflags); in z8530_sync_txdma_open()
995 z8530_rx_done(c); in z8530_sync_txdma_open()
996 z8530_rx_done(c); in z8530_sync_txdma_open()
1001 c->rxdma_on = 0; in z8530_sync_txdma_open()
1002 c->txdma_on = 0; in z8530_sync_txdma_open()
1004 c->tx_dma_used = 0; in z8530_sync_txdma_open()
1005 c->dma_num = 0; in z8530_sync_txdma_open()
1006 c->dma_ready = 1; in z8530_sync_txdma_open()
1007 c->dma_tx = 1; in z8530_sync_txdma_open()
1014 c->regs[R14] |= DTRREQ; in z8530_sync_txdma_open()
1015 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_txdma_open()
1017 c->regs[R1] &= ~TxINT_ENAB; in z8530_sync_txdma_open()
1018 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_open()
1025 disable_dma(c->txdma); in z8530_sync_txdma_open()
1026 clear_dma_ff(c->txdma); in z8530_sync_txdma_open()
1027 set_dma_mode(c->txdma, DMA_MODE_WRITE); in z8530_sync_txdma_open()
1028 disable_dma(c->txdma); in z8530_sync_txdma_open()
1035 c->rxdma_on = 0; in z8530_sync_txdma_open()
1036 c->txdma_on = 1; in z8530_sync_txdma_open()
1037 c->tx_dma_used = 1; in z8530_sync_txdma_open()
1039 c->irqs = &z8530_txdma_sync; in z8530_sync_txdma_open()
1040 z8530_rtsdtr(c, 1); in z8530_sync_txdma_open()
1041 write_zsreg(c, R3, c->regs[R3] | RxENABLE); in z8530_sync_txdma_open()
1042 spin_unlock_irqrestore(c->lock, cflags); in z8530_sync_txdma_open()
1051 * @c: Z8530 channel to move into discard mode
1057 int z8530_sync_txdma_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_txdma_close() argument
1062 spin_lock_irqsave(c->lock, cflags); in z8530_sync_txdma_close()
1064 c->irqs = &z8530_nop; in z8530_sync_txdma_close()
1065 c->max = 0; in z8530_sync_txdma_close()
1066 c->sync = 0; in z8530_sync_txdma_close()
1073 disable_dma(c->txdma); in z8530_sync_txdma_close()
1074 clear_dma_ff(c->txdma); in z8530_sync_txdma_close()
1075 c->txdma_on = 0; in z8530_sync_txdma_close()
1076 c->tx_dma_used = 0; in z8530_sync_txdma_close()
1083 c->regs[R1] &= ~WT_RDY_ENAB; in z8530_sync_txdma_close()
1084 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_close()
1085 c->regs[R1] &= ~(WT_RDY_RT | WT_FN_RDYFN | INT_ERR_Rx); in z8530_sync_txdma_close()
1086 c->regs[R1] |= INT_ALL_Rx; in z8530_sync_txdma_close()
1087 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_close()
1088 c->regs[R14] &= ~DTRREQ; in z8530_sync_txdma_close()
1089 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_txdma_close()
1091 if (c->tx_dma_buf[0]) { in z8530_sync_txdma_close()
1092 free_page((unsigned long)c->tx_dma_buf[0]); in z8530_sync_txdma_close()
1093 c->tx_dma_buf[0] = NULL; in z8530_sync_txdma_close()
1095 chk = read_zsreg(c, R0); in z8530_sync_txdma_close()
1096 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_txdma_close()
1097 z8530_rtsdtr(c, 0); in z8530_sync_txdma_close()
1099 spin_unlock_irqrestore(c->lock, cflags); in z8530_sync_txdma_close()
1258 * @c: Z8530 channel to configure
1267 int z8530_channel_load(struct z8530_channel *c, u8 *rtable) in z8530_channel_load() argument
1271 spin_lock_irqsave(c->lock, flags); in z8530_channel_load()
1277 write_zsreg(c, R15, c->regs[15] | 1); in z8530_channel_load()
1278 write_zsreg(c, reg & 0x0F, *rtable); in z8530_channel_load()
1280 write_zsreg(c, R15, c->regs[15] & ~1); in z8530_channel_load()
1281 c->regs[reg] = *rtable++; in z8530_channel_load()
1283 c->rx_function = z8530_null_rx; in z8530_channel_load()
1284 c->skb = NULL; in z8530_channel_load()
1285 c->tx_skb = NULL; in z8530_channel_load()
1286 c->tx_next_skb = NULL; in z8530_channel_load()
1287 c->mtu = 1500; in z8530_channel_load()
1288 c->max = 0; in z8530_channel_load()
1289 c->count = 0; in z8530_channel_load()
1290 c->status = read_zsreg(c, R0); in z8530_channel_load()
1291 c->sync = 1; in z8530_channel_load()
1292 write_zsreg(c, R3, c->regs[R3] | RxENABLE); in z8530_channel_load()
1294 spin_unlock_irqrestore(c->lock, flags); in z8530_channel_load()
1301 * @c: The Z8530 channel to kick
1313 static void z8530_tx_begin(struct z8530_channel *c) in z8530_tx_begin() argument
1317 if (c->tx_skb) in z8530_tx_begin()
1320 c->tx_skb = c->tx_next_skb; in z8530_tx_begin()
1321 c->tx_next_skb = NULL; in z8530_tx_begin()
1322 c->tx_ptr = c->tx_next_ptr; in z8530_tx_begin()
1324 if (!c->tx_skb) { in z8530_tx_begin()
1326 if (c->dma_tx) { in z8530_tx_begin()
1328 disable_dma(c->txdma); in z8530_tx_begin()
1331 if (get_dma_residue(c->txdma)) { in z8530_tx_begin()
1332 c->netdevice->stats.tx_dropped++; in z8530_tx_begin()
1333 c->netdevice->stats.tx_fifo_errors++; in z8530_tx_begin()
1337 c->txcount = 0; in z8530_tx_begin()
1339 c->txcount = c->tx_skb->len; in z8530_tx_begin()
1341 if (c->dma_tx) { in z8530_tx_begin()
1349 disable_dma(c->txdma); in z8530_tx_begin()
1354 if (c->dev->type != Z85230) { in z8530_tx_begin()
1355 write_zsctrl(c, RES_Tx_CRC); in z8530_tx_begin()
1356 write_zsctrl(c, RES_EOM_L); in z8530_tx_begin()
1358 write_zsreg(c, R10, c->regs[10] & ~ABUNDER); in z8530_tx_begin()
1359 clear_dma_ff(c->txdma); in z8530_tx_begin()
1360 set_dma_addr(c->txdma, virt_to_bus(c->tx_ptr)); in z8530_tx_begin()
1361 set_dma_count(c->txdma, c->txcount); in z8530_tx_begin()
1362 enable_dma(c->txdma); in z8530_tx_begin()
1364 write_zsctrl(c, RES_EOM_L); in z8530_tx_begin()
1365 write_zsreg(c, R5, c->regs[R5] | TxENAB); in z8530_tx_begin()
1368 write_zsreg(c, R10, c->regs[10]); in z8530_tx_begin()
1369 write_zsctrl(c, RES_Tx_CRC); in z8530_tx_begin()
1371 while (c->txcount && (read_zsreg(c, R0) & Tx_BUF_EMP)) { in z8530_tx_begin()
1372 write_zsreg(c, R8, *c->tx_ptr++); in z8530_tx_begin()
1373 c->txcount--; in z8530_tx_begin()
1379 netif_wake_queue(c->netdevice); in z8530_tx_begin()
1384 * @c: The channel that completed a transmit.
1393 static void z8530_tx_done(struct z8530_channel *c) in z8530_tx_done() argument
1398 if (!c->tx_skb) in z8530_tx_done()
1401 skb = c->tx_skb; in z8530_tx_done()
1402 c->tx_skb = NULL; in z8530_tx_done()
1403 z8530_tx_begin(c); in z8530_tx_done()
1404 c->netdevice->stats.tx_packets++; in z8530_tx_done()
1405 c->netdevice->stats.tx_bytes += skb->len; in z8530_tx_done()
1411 * @c: The channel the packet arrived on
1417 void z8530_null_rx(struct z8530_channel *c, struct sk_buff *skb) in z8530_null_rx() argument
1425 * @c: The channel that completed a receive
1435 static void z8530_rx_done(struct z8530_channel *c) in z8530_rx_done() argument
1442 if (c->rxdma_on) { in z8530_rx_done()
1446 int ready = c->dma_ready; in z8530_rx_done()
1447 unsigned char *rxb = c->rx_buf[c->dma_num]; in z8530_rx_done()
1454 disable_dma(c->rxdma); in z8530_rx_done()
1455 clear_dma_ff(c->rxdma); in z8530_rx_done()
1456 c->rxdma_on = 0; in z8530_rx_done()
1457 ct = c->mtu - get_dma_residue(c->rxdma); in z8530_rx_done()
1460 c->dma_ready = 0; in z8530_rx_done()
1467 c->dma_num ^= 1; in z8530_rx_done()
1468 set_dma_mode(c->rxdma, DMA_MODE_READ | 0x10); in z8530_rx_done()
1469 set_dma_addr(c->rxdma, virt_to_bus(c->rx_buf[c->dma_num])); in z8530_rx_done()
1470 set_dma_count(c->rxdma, c->mtu); in z8530_rx_done()
1471 c->rxdma_on = 1; in z8530_rx_done()
1472 enable_dma(c->rxdma); in z8530_rx_done()
1476 write_zsreg(c, R0, RES_Rx_CRC); in z8530_rx_done()
1481 netdev_warn(c->netdevice, "DMA flip overrun!\n"); in z8530_rx_done()
1495 c->netdevice->stats.rx_dropped++; in z8530_rx_done()
1496 netdev_warn(c->netdevice, "Memory squeeze\n"); in z8530_rx_done()
1500 c->netdevice->stats.rx_packets++; in z8530_rx_done()
1501 c->netdevice->stats.rx_bytes += ct; in z8530_rx_done()
1503 c->dma_ready = 1; in z8530_rx_done()
1506 skb = c->skb; in z8530_rx_done()
1519 ct = c->count; in z8530_rx_done()
1521 c->skb = c->skb2; in z8530_rx_done()
1522 c->count = 0; in z8530_rx_done()
1523 c->max = c->mtu; in z8530_rx_done()
1524 if (c->skb) { in z8530_rx_done()
1525 c->dptr = c->skb->data; in z8530_rx_done()
1526 c->max = c->mtu; in z8530_rx_done()
1528 c->count = 0; in z8530_rx_done()
1529 c->max = 0; in z8530_rx_done()
1533 c->skb2 = dev_alloc_skb(c->mtu); in z8530_rx_done()
1534 if (c->skb2) in z8530_rx_done()
1535 skb_put(c->skb2, c->mtu); in z8530_rx_done()
1537 c->netdevice->stats.rx_packets++; in z8530_rx_done()
1538 c->netdevice->stats.rx_bytes += ct; in z8530_rx_done()
1544 c->rx_function(c, skb); in z8530_rx_done()
1546 c->netdevice->stats.rx_dropped++; in z8530_rx_done()
1547 netdev_err(c->netdevice, "Lost a frame\n"); in z8530_rx_done()
1571 * @c: The channel to use
1582 netdev_tx_t z8530_queue_xmit(struct z8530_channel *c, struct sk_buff *skb) in z8530_queue_xmit() argument
1586 netif_stop_queue(c->netdevice); in z8530_queue_xmit()
1587 if (c->tx_next_skb) in z8530_queue_xmit()
1595 if (c->dma_tx && in z8530_queue_xmit()
1604 c->tx_next_ptr = c->tx_dma_buf[c->tx_dma_used]; in z8530_queue_xmit()
1605 c->tx_dma_used ^= 1; /* Flip temp buffer */ in z8530_queue_xmit()
1606 skb_copy_from_linear_data(skb, c->tx_next_ptr, skb->len); in z8530_queue_xmit()
1608 c->tx_next_ptr = skb->data; in z8530_queue_xmit()
1611 c->tx_next_skb = skb; in z8530_queue_xmit()
1614 spin_lock_irqsave(c->lock, flags); in z8530_queue_xmit()
1615 z8530_tx_begin(c); in z8530_queue_xmit()
1616 spin_unlock_irqrestore(c->lock, flags); in z8530_queue_xmit()