Home
last modified time | relevance | path

Searched refs:seg_count (Results 1 – 16 of 16) sorted by relevance

/Linux-v5.4/drivers/char/agp/
Dcompat_ioctl.c72 if ((unsigned) ureserve.seg_count >= ~0U/sizeof(struct agp_segment32)) in compat_agpioc_reserve_wrap()
76 kreserve.seg_count = ureserve.seg_count; in compat_agpioc_reserve_wrap()
80 if (kreserve.seg_count == 0) { in compat_agpioc_reserve_wrap()
98 if (ureserve.seg_count >= 16384) in compat_agpioc_reserve_wrap()
101 usegment = kmalloc_array(ureserve.seg_count, in compat_agpioc_reserve_wrap()
107 ksegment = kmalloc_array(kreserve.seg_count, in compat_agpioc_reserve_wrap()
116 sizeof(*usegment) * ureserve.seg_count)) { in compat_agpioc_reserve_wrap()
122 for (seg = 0; seg < ureserve.seg_count; seg++) { in compat_agpioc_reserve_wrap()
Dfrontend.c170 seg = kzalloc((sizeof(struct agp_segment_priv) * region->seg_count), GFP_KERNEL); in agp_create_segment()
178 for (i = 0; i < region->seg_count; i++) { in agp_create_segment()
192 agp_add_seg_to_client(client, ret_seg, region->seg_count); in agp_create_segment()
809 if ((unsigned) reserve.seg_count >= ~0U/sizeof(struct agp_segment)) in agpioc_reserve_wrap()
814 if (reserve.seg_count == 0) { in agpioc_reserve_wrap()
830 if (reserve.seg_count >= 16384) in agpioc_reserve_wrap()
833 segment = kmalloc((sizeof(struct agp_segment) * reserve.seg_count), in agpioc_reserve_wrap()
840 sizeof(struct agp_segment) * reserve.seg_count)) { in agpioc_reserve_wrap()
Dcompat_ioctl.h66 compat_size_t seg_count; /* number of segments */ member
/Linux-v5.4/drivers/gpu/drm/
Ddrm_dma.c95 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown()
100 dma->bufs[i].seg_count); in drm_legacy_dma_takedown()
101 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown()
Ddrm_bufs.c677 if (entry->seg_count) { in drm_cleanup_buf_error()
678 for (i = 0; i < entry->seg_count; i++) { in drm_cleanup_buf_error()
685 entry->seg_count = 0; in drm_cleanup_buf_error()
854 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_agp()
985 entry->seg_count = count; in drm_legacy_addbufs_pci()
992 entry->seglist[entry->seg_count++] = dmah; in drm_legacy_addbufs_pci()
1022 entry->seg_count = count; in drm_legacy_addbufs_pci()
1062 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_pci()
1063 dma->page_count += entry->seg_count << page_order; in drm_legacy_addbufs_pci()
1064 dma->byte_count += PAGE_SIZE * (entry->seg_count << page_order); in drm_legacy_addbufs_pci()
[all …]
/Linux-v5.4/drivers/infiniband/core/
Dmad_rmpp.c588 paylen = (mad_send_wr->send_buf.seg_count * in send_next_seg()
593 if (mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) { in send_next_seg()
619 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in abort_send()
688 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) && in process_rmpp_ack()
695 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in process_rmpp_ack()
699 if (seg_num > mad_send_wr->send_buf.seg_count || in process_rmpp_ack()
715 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in process_rmpp_ack()
737 mad_send_wr->seg_num < mad_send_wr->send_buf.seg_count) { in process_rmpp_ack()
930 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in ib_process_rmpp_send_wc()
937 mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) in ib_process_rmpp_send_wc()
[all …]
Dmad.c1024 seg->num = ++send_buf->seg_count; in alloc_send_rmpp_list()
1186 if (mad_send_wr->send_buf.seg_count) in ib_get_payload()
/Linux-v5.4/include/drm/
Ddrm_legacy.h93 int seg_count; member
109 int seg_count; member
/Linux-v5.4/include/uapi/linux/
Dagpgart.h88 __kernel_size_t seg_count; /* number of segments */ member
/Linux-v5.4/include/linux/
Dagpgart.h67 size_t seg_count; /* number of segments */ member
/Linux-v5.4/drivers/net/ethernet/intel/ice/
Dice_flex_pipe.c276 for (i = 0; i < le32_to_cpu(pkg_hdr->seg_count); i++) { in ice_find_seg_in_pkg()
540 u32 seg_count; in ice_verify_pkg() local
553 seg_count = le32_to_cpu(pkg->seg_count); in ice_verify_pkg()
554 if (seg_count < 1) in ice_verify_pkg()
558 if (len < sizeof(*pkg) + ((seg_count - 1) * sizeof(pkg->seg_offset))) in ice_verify_pkg()
562 for (i = 0; i < seg_count; i++) { in ice_verify_pkg()
Dice_flex_type.h21 __le32 seg_count; member
/Linux-v5.4/drivers/memstick/core/
Dmspro_block.c169 unsigned int seg_count; member
612 if (msb->current_seg == msb->seg_count) { in h_mspro_block_transfer_data()
702 msb->seg_count = blk_rq_map_sg(msb->block_req->q, in mspro_block_issue_req()
706 if (!msb->seg_count) { in mspro_block_issue_req()
988 msb->seg_count = 1; in mspro_block_read_attributes()
1089 msb->seg_count = 1; in mspro_block_read_attributes()
/Linux-v5.4/drivers/net/ethernet/qlogic/
Dqla3xxx.c1939 if (tx_cb->seg_count == 0) { in ql_process_mac_tx_intr()
1951 tx_cb->seg_count--; in ql_process_mac_tx_intr()
1952 if (tx_cb->seg_count) { in ql_process_mac_tx_intr()
1953 for (i = 1; i < tx_cb->seg_count; i++) { in ql_process_mac_tx_intr()
2319 seg_cnt = tx_cb->seg_count; in ql_send_map()
2475 tx_cb->seg_count = ql_get_seg_count(qdev, in ql3xxx_send()
2477 if (tx_cb->seg_count == -1) { in ql3xxx_send()
3650 for (j = 1; j < tx_cb->seg_count; j++) { in ql_reset_work()
Dqla3xxx.h1039 int seg_count; member
/Linux-v5.4/include/rdma/
Dib_mad.h497 int seg_count; member