Lines Matching full:c

4  *	(c) Copyright 1998 Alan Cox <alan@lxorguk.ukuu.org.uk>
5 * (c) Copyright 2000, 2001 Red Hat Inc
108 static void z8530_rx_done(struct z8530_channel *c);
109 static void z8530_tx_done(struct z8530_channel *c);
114 * @c: Z8530 channel to read from (2 per chip)
123 static inline u8 read_zsreg(struct z8530_channel *c, u8 reg) in read_zsreg() argument
126 z8530_write_port(c->ctrlio, reg); in read_zsreg()
127 return z8530_read_port(c->ctrlio); in read_zsreg()
132 * @c: The Z8530 channel to read the data port from
138 static inline u8 read_zsdata(struct z8530_channel *c) in read_zsdata() argument
141 r=z8530_read_port(c->dataio); in read_zsdata()
147 * @c: The Z8530 channel
155 * Assumes c->lock is held.
157 static inline void write_zsreg(struct z8530_channel *c, u8 reg, u8 val) in write_zsreg() argument
160 z8530_write_port(c->ctrlio, reg); in write_zsreg()
161 z8530_write_port(c->ctrlio, val); in write_zsreg()
167 * @c: The Z8530 channel
173 static inline void write_zsctrl(struct z8530_channel *c, u8 val) in write_zsctrl() argument
175 z8530_write_port(c->ctrlio, val); in write_zsctrl()
180 * @c: The Z8530 channel
187 static inline void write_zsdata(struct z8530_channel *c, u8 val) in write_zsdata() argument
189 z8530_write_port(c->dataio, val); in write_zsdata()
263 * @c: Channel to flush
273 static void z8530_flush_fifo(struct z8530_channel *c) in z8530_flush_fifo() argument
275 read_zsreg(c, R1); in z8530_flush_fifo()
276 read_zsreg(c, R1); in z8530_flush_fifo()
277 read_zsreg(c, R1); in z8530_flush_fifo()
278 read_zsreg(c, R1); in z8530_flush_fifo()
279 if(c->dev->type==Z85230) in z8530_flush_fifo()
281 read_zsreg(c, R1); in z8530_flush_fifo()
282 read_zsreg(c, R1); in z8530_flush_fifo()
283 read_zsreg(c, R1); in z8530_flush_fifo()
284 read_zsreg(c, R1); in z8530_flush_fifo()
290 * @c: The Z8530 channel to control;
299 static void z8530_rtsdtr(struct z8530_channel *c, int set) in z8530_rtsdtr() argument
302 c->regs[5] |= (RTS | DTR); in z8530_rtsdtr()
304 c->regs[5] &= ~(RTS | DTR); in z8530_rtsdtr()
305 write_zsreg(c, R5, c->regs[5]); in z8530_rtsdtr()
310 * @c: Z8530 channel to process
332 static void z8530_rx(struct z8530_channel *c) in z8530_rx() argument
339 if(!(read_zsreg(c, R0)&1)) in z8530_rx()
341 ch=read_zsdata(c); in z8530_rx()
342 stat=read_zsreg(c, R1); in z8530_rx()
347 if(c->count < c->max) in z8530_rx()
349 *c->dptr++=ch; in z8530_rx()
350 c->count++; in z8530_rx()
362 if(c->skb) in z8530_rx()
363 c->dptr=c->skb->data; in z8530_rx()
364 c->count=0; in z8530_rx()
367 pr_warn("%s: overrun\n", c->dev->name); in z8530_rx()
368 c->rx_overrun++; in z8530_rx()
372 c->rx_crc_err++; in z8530_rx()
383 z8530_rx_done(c); in z8530_rx()
384 write_zsctrl(c, RES_Rx_CRC); in z8530_rx()
391 write_zsctrl(c, ERR_RES); in z8530_rx()
392 write_zsctrl(c, RES_H_IUS); in z8530_rx()
398 * @c: Z8530 channel to process
406 static void z8530_tx(struct z8530_channel *c) in z8530_tx() argument
408 while(c->txcount) { in z8530_tx()
410 if(!(read_zsreg(c, R0)&4)) in z8530_tx()
412 c->txcount--; in z8530_tx()
416 write_zsreg(c, R8, *c->tx_ptr++); in z8530_tx()
417 write_zsctrl(c, RES_H_IUS); in z8530_tx()
419 if(c->txcount==0) in z8530_tx()
421 write_zsctrl(c, RES_EOM_L); in z8530_tx()
422 write_zsreg(c, R10, c->regs[10]&~ABUNDER); in z8530_tx()
431 write_zsctrl(c, RES_Tx_P); in z8530_tx()
433 z8530_tx_done(c); in z8530_tx()
434 write_zsctrl(c, RES_H_IUS); in z8530_tx()
617 * @c: Z8530 channel to shut up
625 static void z8530_rx_clear(struct z8530_channel *c) in z8530_rx_clear() argument
632 read_zsdata(c); in z8530_rx_clear()
633 stat=read_zsreg(c, R1); in z8530_rx_clear()
636 write_zsctrl(c, RES_Rx_CRC); in z8530_rx_clear()
640 write_zsctrl(c, ERR_RES); in z8530_rx_clear()
641 write_zsctrl(c, RES_H_IUS); in z8530_rx_clear()
646 * @c: Z8530 channel to shut up
653 static void z8530_tx_clear(struct z8530_channel *c) in z8530_tx_clear() argument
655 write_zsctrl(c, RES_Tx_P); in z8530_tx_clear()
656 write_zsctrl(c, RES_H_IUS); in z8530_tx_clear()
699 * channel). c->lock for both channels points to dev->lock
779 * @c: The Z8530 channel to open in synchronous PIO mode
785 int z8530_sync_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_open() argument
789 spin_lock_irqsave(c->lock, flags); in z8530_sync_open()
791 c->sync = 1; in z8530_sync_open()
792 c->mtu = dev->mtu+64; in z8530_sync_open()
793 c->count = 0; in z8530_sync_open()
794 c->skb = NULL; in z8530_sync_open()
795 c->skb2 = NULL; in z8530_sync_open()
796 c->irqs = &z8530_sync; in z8530_sync_open()
799 z8530_rx_done(c); /* Load the frame ring */ in z8530_sync_open()
800 z8530_rx_done(c); /* Load the backup frame */ in z8530_sync_open()
801 z8530_rtsdtr(c,1); in z8530_sync_open()
802 c->dma_tx = 0; in z8530_sync_open()
803 c->regs[R1]|=TxINT_ENAB; in z8530_sync_open()
804 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_open()
805 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_sync_open()
807 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_open()
817 * @c: Z8530 channel to disassociate and move to idle
823 int z8530_sync_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_close() argument
828 spin_lock_irqsave(c->lock, flags); in z8530_sync_close()
829 c->irqs = &z8530_nop; in z8530_sync_close()
830 c->max = 0; in z8530_sync_close()
831 c->sync = 0; in z8530_sync_close()
833 chk=read_zsreg(c,R0); in z8530_sync_close()
834 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_close()
835 z8530_rtsdtr(c,0); in z8530_sync_close()
837 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_close()
846 * @c: The Z8530 channel to configure in sync DMA mode.
853 int z8530_sync_dma_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_dma_open() argument
857 c->sync = 1; in z8530_sync_dma_open()
858 c->mtu = dev->mtu+64; in z8530_sync_dma_open()
859 c->count = 0; in z8530_sync_dma_open()
860 c->skb = NULL; in z8530_sync_dma_open()
861 c->skb2 = NULL; in z8530_sync_dma_open()
865 c->rxdma_on = 0; in z8530_sync_dma_open()
866 c->txdma_on = 0; in z8530_sync_dma_open()
874 if(c->mtu > PAGE_SIZE/2) in z8530_sync_dma_open()
877 c->rx_buf[0]=(void *)get_zeroed_page(GFP_KERNEL|GFP_DMA); in z8530_sync_dma_open()
878 if(c->rx_buf[0]==NULL) in z8530_sync_dma_open()
880 c->rx_buf[1]=c->rx_buf[0]+PAGE_SIZE/2; in z8530_sync_dma_open()
882 c->tx_dma_buf[0]=(void *)get_zeroed_page(GFP_KERNEL|GFP_DMA); in z8530_sync_dma_open()
883 if(c->tx_dma_buf[0]==NULL) in z8530_sync_dma_open()
885 free_page((unsigned long)c->rx_buf[0]); in z8530_sync_dma_open()
886 c->rx_buf[0]=NULL; in z8530_sync_dma_open()
889 c->tx_dma_buf[1]=c->tx_dma_buf[0]+PAGE_SIZE/2; in z8530_sync_dma_open()
891 c->tx_dma_used=0; in z8530_sync_dma_open()
892 c->dma_tx = 1; in z8530_sync_dma_open()
893 c->dma_num=0; in z8530_sync_dma_open()
894 c->dma_ready=1; in z8530_sync_dma_open()
900 spin_lock_irqsave(c->lock, cflags); in z8530_sync_dma_open()
906 c->regs[R14]|= DTRREQ; in z8530_sync_dma_open()
907 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_dma_open()
909 c->regs[R1]&= ~TxINT_ENAB; in z8530_sync_dma_open()
910 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
916 c->regs[R1]|= WT_FN_RDYFN; in z8530_sync_dma_open()
917 c->regs[R1]|= WT_RDY_RT; in z8530_sync_dma_open()
918 c->regs[R1]|= INT_ERR_Rx; in z8530_sync_dma_open()
919 c->regs[R1]&= ~TxINT_ENAB; in z8530_sync_dma_open()
920 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
921 c->regs[R1]|= WT_RDY_ENAB; in z8530_sync_dma_open()
922 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_open()
934 disable_dma(c->rxdma); in z8530_sync_dma_open()
935 clear_dma_ff(c->rxdma); in z8530_sync_dma_open()
936 set_dma_mode(c->rxdma, DMA_MODE_READ|0x10); in z8530_sync_dma_open()
937 set_dma_addr(c->rxdma, virt_to_bus(c->rx_buf[0])); in z8530_sync_dma_open()
938 set_dma_count(c->rxdma, c->mtu); in z8530_sync_dma_open()
939 enable_dma(c->rxdma); in z8530_sync_dma_open()
941 disable_dma(c->txdma); in z8530_sync_dma_open()
942 clear_dma_ff(c->txdma); in z8530_sync_dma_open()
943 set_dma_mode(c->txdma, DMA_MODE_WRITE); in z8530_sync_dma_open()
944 disable_dma(c->txdma); in z8530_sync_dma_open()
952 c->rxdma_on = 1; in z8530_sync_dma_open()
953 c->txdma_on = 1; in z8530_sync_dma_open()
954 c->tx_dma_used = 1; in z8530_sync_dma_open()
956 c->irqs = &z8530_dma_sync; in z8530_sync_dma_open()
957 z8530_rtsdtr(c,1); in z8530_sync_dma_open()
958 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_sync_dma_open()
960 spin_unlock_irqrestore(c->lock, cflags); in z8530_sync_dma_open()
970 * @c: Z8530 channel to move into discard mode
976 int z8530_sync_dma_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_dma_close() argument
981 c->irqs = &z8530_nop; in z8530_sync_dma_close()
982 c->max = 0; in z8530_sync_dma_close()
983 c->sync = 0; in z8530_sync_dma_close()
990 disable_dma(c->rxdma); in z8530_sync_dma_close()
991 clear_dma_ff(c->rxdma); in z8530_sync_dma_close()
993 c->rxdma_on = 0; in z8530_sync_dma_close()
995 disable_dma(c->txdma); in z8530_sync_dma_close()
996 clear_dma_ff(c->txdma); in z8530_sync_dma_close()
999 c->txdma_on = 0; in z8530_sync_dma_close()
1000 c->tx_dma_used = 0; in z8530_sync_dma_close()
1002 spin_lock_irqsave(c->lock, flags); in z8530_sync_dma_close()
1008 c->regs[R1]&= ~WT_RDY_ENAB; in z8530_sync_dma_close()
1009 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_close()
1010 c->regs[R1]&= ~(WT_RDY_RT|WT_FN_RDYFN|INT_ERR_Rx); in z8530_sync_dma_close()
1011 c->regs[R1]|= INT_ALL_Rx; in z8530_sync_dma_close()
1012 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_dma_close()
1013 c->regs[R14]&= ~DTRREQ; in z8530_sync_dma_close()
1014 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_dma_close()
1016 if(c->rx_buf[0]) in z8530_sync_dma_close()
1018 free_page((unsigned long)c->rx_buf[0]); in z8530_sync_dma_close()
1019 c->rx_buf[0]=NULL; in z8530_sync_dma_close()
1021 if(c->tx_dma_buf[0]) in z8530_sync_dma_close()
1023 free_page((unsigned long)c->tx_dma_buf[0]); in z8530_sync_dma_close()
1024 c->tx_dma_buf[0]=NULL; in z8530_sync_dma_close()
1026 chk=read_zsreg(c,R0); in z8530_sync_dma_close()
1027 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_dma_close()
1028 z8530_rtsdtr(c,0); in z8530_sync_dma_close()
1030 spin_unlock_irqrestore(c->lock, flags); in z8530_sync_dma_close()
1040 * @c: The Z8530 channel to configure in sync DMA mode.
1047 int z8530_sync_txdma_open(struct net_device *dev, struct z8530_channel *c) in z8530_sync_txdma_open() argument
1052 c->sync = 1; in z8530_sync_txdma_open()
1053 c->mtu = dev->mtu+64; in z8530_sync_txdma_open()
1054 c->count = 0; in z8530_sync_txdma_open()
1055 c->skb = NULL; in z8530_sync_txdma_open()
1056 c->skb2 = NULL; in z8530_sync_txdma_open()
1064 if(c->mtu > PAGE_SIZE/2) in z8530_sync_txdma_open()
1067 c->tx_dma_buf[0]=(void *)get_zeroed_page(GFP_KERNEL|GFP_DMA); in z8530_sync_txdma_open()
1068 if(c->tx_dma_buf[0]==NULL) in z8530_sync_txdma_open()
1071 c->tx_dma_buf[1] = c->tx_dma_buf[0] + PAGE_SIZE/2; in z8530_sync_txdma_open()
1074 spin_lock_irqsave(c->lock, cflags); in z8530_sync_txdma_open()
1080 z8530_rx_done(c); in z8530_sync_txdma_open()
1081 z8530_rx_done(c); in z8530_sync_txdma_open()
1087 c->rxdma_on = 0; in z8530_sync_txdma_open()
1088 c->txdma_on = 0; in z8530_sync_txdma_open()
1090 c->tx_dma_used=0; in z8530_sync_txdma_open()
1091 c->dma_num=0; in z8530_sync_txdma_open()
1092 c->dma_ready=1; in z8530_sync_txdma_open()
1093 c->dma_tx = 1; in z8530_sync_txdma_open()
1102 c->regs[R14]|= DTRREQ; in z8530_sync_txdma_open()
1103 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_txdma_open()
1105 c->regs[R1]&= ~TxINT_ENAB; in z8530_sync_txdma_open()
1106 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_open()
1114 disable_dma(c->txdma); in z8530_sync_txdma_open()
1115 clear_dma_ff(c->txdma); in z8530_sync_txdma_open()
1116 set_dma_mode(c->txdma, DMA_MODE_WRITE); in z8530_sync_txdma_open()
1117 disable_dma(c->txdma); in z8530_sync_txdma_open()
1125 c->rxdma_on = 0; in z8530_sync_txdma_open()
1126 c->txdma_on = 1; in z8530_sync_txdma_open()
1127 c->tx_dma_used = 1; in z8530_sync_txdma_open()
1129 c->irqs = &z8530_txdma_sync; in z8530_sync_txdma_open()
1130 z8530_rtsdtr(c,1); in z8530_sync_txdma_open()
1131 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_sync_txdma_open()
1132 spin_unlock_irqrestore(c->lock, cflags); in z8530_sync_txdma_open()
1142 * @c: Z8530 channel to move into discard mode
1148 int z8530_sync_txdma_close(struct net_device *dev, struct z8530_channel *c) in z8530_sync_txdma_close() argument
1154 spin_lock_irqsave(c->lock, cflags); in z8530_sync_txdma_close()
1156 c->irqs = &z8530_nop; in z8530_sync_txdma_close()
1157 c->max = 0; in z8530_sync_txdma_close()
1158 c->sync = 0; in z8530_sync_txdma_close()
1166 disable_dma(c->txdma); in z8530_sync_txdma_close()
1167 clear_dma_ff(c->txdma); in z8530_sync_txdma_close()
1168 c->txdma_on = 0; in z8530_sync_txdma_close()
1169 c->tx_dma_used = 0; in z8530_sync_txdma_close()
1177 c->regs[R1]&= ~WT_RDY_ENAB; in z8530_sync_txdma_close()
1178 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_close()
1179 c->regs[R1]&= ~(WT_RDY_RT|WT_FN_RDYFN|INT_ERR_Rx); in z8530_sync_txdma_close()
1180 c->regs[R1]|= INT_ALL_Rx; in z8530_sync_txdma_close()
1181 write_zsreg(c, R1, c->regs[R1]); in z8530_sync_txdma_close()
1182 c->regs[R14]&= ~DTRREQ; in z8530_sync_txdma_close()
1183 write_zsreg(c, R14, c->regs[R14]); in z8530_sync_txdma_close()
1185 if(c->tx_dma_buf[0]) in z8530_sync_txdma_close()
1187 free_page((unsigned long)c->tx_dma_buf[0]); in z8530_sync_txdma_close()
1188 c->tx_dma_buf[0]=NULL; in z8530_sync_txdma_close()
1190 chk=read_zsreg(c,R0); in z8530_sync_txdma_close()
1191 write_zsreg(c, R3, c->regs[R3]); in z8530_sync_txdma_close()
1192 z8530_rtsdtr(c,0); in z8530_sync_txdma_close()
1194 spin_unlock_irqrestore(c->lock, cflags); in z8530_sync_txdma_close()
1369 * @c: Z8530 channel to configure
1378 int z8530_channel_load(struct z8530_channel *c, u8 *rtable) in z8530_channel_load() argument
1382 spin_lock_irqsave(c->lock, flags); in z8530_channel_load()
1388 write_zsreg(c, R15, c->regs[15]|1); in z8530_channel_load()
1389 write_zsreg(c, reg&0x0F, *rtable); in z8530_channel_load()
1391 write_zsreg(c, R15, c->regs[15]&~1); in z8530_channel_load()
1392 c->regs[reg]=*rtable++; in z8530_channel_load()
1394 c->rx_function=z8530_null_rx; in z8530_channel_load()
1395 c->skb=NULL; in z8530_channel_load()
1396 c->tx_skb=NULL; in z8530_channel_load()
1397 c->tx_next_skb=NULL; in z8530_channel_load()
1398 c->mtu=1500; in z8530_channel_load()
1399 c->max=0; in z8530_channel_load()
1400 c->count=0; in z8530_channel_load()
1401 c->status=read_zsreg(c, R0); in z8530_channel_load()
1402 c->sync=1; in z8530_channel_load()
1403 write_zsreg(c, R3, c->regs[R3]|RxENABLE); in z8530_channel_load()
1405 spin_unlock_irqrestore(c->lock, flags); in z8530_channel_load()
1414 * @c: The Z8530 channel to kick
1426 static void z8530_tx_begin(struct z8530_channel *c) in z8530_tx_begin() argument
1429 if(c->tx_skb) in z8530_tx_begin()
1432 c->tx_skb=c->tx_next_skb; in z8530_tx_begin()
1433 c->tx_next_skb=NULL; in z8530_tx_begin()
1434 c->tx_ptr=c->tx_next_ptr; in z8530_tx_begin()
1436 if(c->tx_skb==NULL) in z8530_tx_begin()
1439 if(c->dma_tx) in z8530_tx_begin()
1442 disable_dma(c->txdma); in z8530_tx_begin()
1446 if (get_dma_residue(c->txdma)) in z8530_tx_begin()
1448 c->netdevice->stats.tx_dropped++; in z8530_tx_begin()
1449 c->netdevice->stats.tx_fifo_errors++; in z8530_tx_begin()
1453 c->txcount=0; in z8530_tx_begin()
1457 c->txcount=c->tx_skb->len; in z8530_tx_begin()
1460 if(c->dma_tx) in z8530_tx_begin()
1470 disable_dma(c->txdma); in z8530_tx_begin()
1477 if(c->dev->type!=Z85230) in z8530_tx_begin()
1479 write_zsctrl(c, RES_Tx_CRC); in z8530_tx_begin()
1480 write_zsctrl(c, RES_EOM_L); in z8530_tx_begin()
1482 write_zsreg(c, R10, c->regs[10]&~ABUNDER); in z8530_tx_begin()
1483 clear_dma_ff(c->txdma); in z8530_tx_begin()
1484 set_dma_addr(c->txdma, virt_to_bus(c->tx_ptr)); in z8530_tx_begin()
1485 set_dma_count(c->txdma, c->txcount); in z8530_tx_begin()
1486 enable_dma(c->txdma); in z8530_tx_begin()
1488 write_zsctrl(c, RES_EOM_L); in z8530_tx_begin()
1489 write_zsreg(c, R5, c->regs[R5]|TxENAB); in z8530_tx_begin()
1495 write_zsreg(c, R10, c->regs[10]); in z8530_tx_begin()
1496 write_zsctrl(c, RES_Tx_CRC); in z8530_tx_begin()
1498 while(c->txcount && (read_zsreg(c,R0)&Tx_BUF_EMP)) in z8530_tx_begin()
1500 write_zsreg(c, R8, *c->tx_ptr++); in z8530_tx_begin()
1501 c->txcount--; in z8530_tx_begin()
1509 netif_wake_queue(c->netdevice); in z8530_tx_begin()
1514 * @c: The channel that completed a transmit.
1523 static void z8530_tx_done(struct z8530_channel *c) in z8530_tx_done() argument
1528 if (c->tx_skb == NULL) in z8530_tx_done()
1531 skb = c->tx_skb; in z8530_tx_done()
1532 c->tx_skb = NULL; in z8530_tx_done()
1533 z8530_tx_begin(c); in z8530_tx_done()
1534 c->netdevice->stats.tx_packets++; in z8530_tx_done()
1535 c->netdevice->stats.tx_bytes += skb->len; in z8530_tx_done()
1541 * @c: The channel the packet arrived on
1548 void z8530_null_rx(struct z8530_channel *c, struct sk_buff *skb) in z8530_null_rx() argument
1557 * @c: The channel that completed a receive
1568 static void z8530_rx_done(struct z8530_channel *c) in z8530_rx_done() argument
1577 if(c->rxdma_on) in z8530_rx_done()
1584 int ready=c->dma_ready; in z8530_rx_done()
1585 unsigned char *rxb=c->rx_buf[c->dma_num]; in z8530_rx_done()
1594 disable_dma(c->rxdma); in z8530_rx_done()
1595 clear_dma_ff(c->rxdma); in z8530_rx_done()
1596 c->rxdma_on=0; in z8530_rx_done()
1597 ct=c->mtu-get_dma_residue(c->rxdma); in z8530_rx_done()
1600 c->dma_ready=0; in z8530_rx_done()
1609 c->dma_num^=1; in z8530_rx_done()
1610 set_dma_mode(c->rxdma, DMA_MODE_READ|0x10); in z8530_rx_done()
1611 set_dma_addr(c->rxdma, virt_to_bus(c->rx_buf[c->dma_num])); in z8530_rx_done()
1612 set_dma_count(c->rxdma, c->mtu); in z8530_rx_done()
1613 c->rxdma_on = 1; in z8530_rx_done()
1614 enable_dma(c->rxdma); in z8530_rx_done()
1617 write_zsreg(c, R0, RES_Rx_CRC); in z8530_rx_done()
1622 netdev_warn(c->netdevice, "DMA flip overrun!\n"); in z8530_rx_done()
1636 c->netdevice->stats.rx_dropped++; in z8530_rx_done()
1637 netdev_warn(c->netdevice, "Memory squeeze\n"); in z8530_rx_done()
1641 c->netdevice->stats.rx_packets++; in z8530_rx_done()
1642 c->netdevice->stats.rx_bytes += ct; in z8530_rx_done()
1644 c->dma_ready = 1; in z8530_rx_done()
1647 skb = c->skb; in z8530_rx_done()
1661 ct=c->count; in z8530_rx_done()
1663 c->skb = c->skb2; in z8530_rx_done()
1664 c->count = 0; in z8530_rx_done()
1665 c->max = c->mtu; in z8530_rx_done()
1666 if (c->skb) { in z8530_rx_done()
1667 c->dptr = c->skb->data; in z8530_rx_done()
1668 c->max = c->mtu; in z8530_rx_done()
1670 c->count = 0; in z8530_rx_done()
1671 c->max = 0; in z8530_rx_done()
1675 c->skb2 = dev_alloc_skb(c->mtu); in z8530_rx_done()
1676 if (c->skb2 == NULL) in z8530_rx_done()
1677 netdev_warn(c->netdevice, "memory squeeze\n"); in z8530_rx_done()
1679 skb_put(c->skb2, c->mtu); in z8530_rx_done()
1680 c->netdevice->stats.rx_packets++; in z8530_rx_done()
1681 c->netdevice->stats.rx_bytes += ct; in z8530_rx_done()
1688 c->rx_function(c, skb); in z8530_rx_done()
1690 c->netdevice->stats.rx_dropped++; in z8530_rx_done()
1691 netdev_err(c->netdevice, "Lost a frame\n"); in z8530_rx_done()
1714 * @c: The channel to use
1726 netdev_tx_t z8530_queue_xmit(struct z8530_channel *c, struct sk_buff *skb) in z8530_queue_xmit() argument
1730 netif_stop_queue(c->netdevice); in z8530_queue_xmit()
1731 if(c->tx_next_skb) in z8530_queue_xmit()
1742 …if(c->dma_tx && ((unsigned long)(virt_to_bus(skb->data+skb->len))>=16*1024*1024 || spans_boundary(… in z8530_queue_xmit()
1751 c->tx_next_ptr=c->tx_dma_buf[c->tx_dma_used]; in z8530_queue_xmit()
1752 c->tx_dma_used^=1; /* Flip temp buffer */ in z8530_queue_xmit()
1753 skb_copy_from_linear_data(skb, c->tx_next_ptr, skb->len); in z8530_queue_xmit()
1756 c->tx_next_ptr=skb->data; in z8530_queue_xmit()
1758 c->tx_next_skb=skb; in z8530_queue_xmit()
1761 spin_lock_irqsave(c->lock, flags); in z8530_queue_xmit()
1762 z8530_tx_begin(c); in z8530_queue_xmit()
1763 spin_unlock_irqrestore(c->lock, flags); in z8530_queue_xmit()