Searched refs:seg_count (Results 1 – 16 of 16) sorted by relevance
/linux/drivers/char/agp/ |
A D | compat_ioctl.c | 72 if ((unsigned) ureserve.seg_count >= ~0U/sizeof(struct agp_segment32)) in compat_agpioc_reserve_wrap() 76 kreserve.seg_count = ureserve.seg_count; in compat_agpioc_reserve_wrap() 80 if (kreserve.seg_count == 0) { in compat_agpioc_reserve_wrap() 98 if (ureserve.seg_count >= 16384) in compat_agpioc_reserve_wrap() 101 usegment = kmalloc_array(ureserve.seg_count, in compat_agpioc_reserve_wrap() 107 ksegment = kmalloc_array(kreserve.seg_count, in compat_agpioc_reserve_wrap() 116 sizeof(*usegment) * ureserve.seg_count)) { in compat_agpioc_reserve_wrap() 122 for (seg = 0; seg < ureserve.seg_count; seg++) { in compat_agpioc_reserve_wrap()
|
A D | frontend.c | 168 seg = kzalloc((sizeof(struct agp_segment_priv) * region->seg_count), GFP_KERNEL); in agp_create_segment() 176 for (i = 0; i < region->seg_count; i++) { in agp_create_segment() 190 agp_add_seg_to_client(client, ret_seg, region->seg_count); in agp_create_segment() 807 if ((unsigned) reserve.seg_count >= ~0U/sizeof(struct agp_segment)) in agpioc_reserve_wrap() 812 if (reserve.seg_count == 0) { in agpioc_reserve_wrap() 828 if (reserve.seg_count >= 16384) in agpioc_reserve_wrap() 831 segment = kmalloc((sizeof(struct agp_segment) * reserve.seg_count), in agpioc_reserve_wrap() 838 sizeof(struct agp_segment) * reserve.seg_count)) { in agpioc_reserve_wrap()
|
A D | compat_ioctl.h | 66 compat_size_t seg_count; /* number of segments */ member
|
/linux/drivers/gpu/drm/ |
A D | drm_dma.c | 96 if (dma->bufs[i].seg_count) { in drm_legacy_dma_takedown() 101 dma->bufs[i].seg_count); in drm_legacy_dma_takedown() 102 for (j = 0; j < dma->bufs[i].seg_count; j++) { in drm_legacy_dma_takedown()
|
A D | drm_bufs.c | 680 if (entry->seg_count) { in drm_cleanup_buf_error() 681 for (i = 0; i < entry->seg_count; i++) { in drm_cleanup_buf_error() 693 entry->seg_count = 0; in drm_cleanup_buf_error() 862 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_agp() 991 entry->seg_count = count; in drm_legacy_addbufs_pci() 1009 entry->seg_count = count; in drm_legacy_addbufs_pci() 1016 entry->seglist[entry->seg_count++] = dmah; in drm_legacy_addbufs_pci() 1046 entry->seg_count = count; in drm_legacy_addbufs_pci() 1086 dma->seg_count += entry->seg_count; in drm_legacy_addbufs_pci() 1087 dma->page_count += entry->seg_count << page_order; in drm_legacy_addbufs_pci() [all …]
|
/linux/drivers/infiniband/core/ |
A D | mad_rmpp.c | 579 paylen = (mad_send_wr->send_buf.seg_count * in send_next_seg() 584 if (mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) { in send_next_seg() 610 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in abort_send() 679 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) && in process_rmpp_ack() 686 if ((mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) || in process_rmpp_ack() 690 if (seg_num > mad_send_wr->send_buf.seg_count || in process_rmpp_ack() 706 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in process_rmpp_ack() 728 mad_send_wr->seg_num < mad_send_wr->send_buf.seg_count) { in process_rmpp_ack() 921 if (mad_send_wr->last_ack == mad_send_wr->send_buf.seg_count) { in ib_process_rmpp_send_wc() 928 mad_send_wr->seg_num == mad_send_wr->send_buf.seg_count) in ib_process_rmpp_send_wc() [all …]
|
A D | mad.c | 813 seg->num = ++send_buf->seg_count; in alloc_send_rmpp_list() 974 if (mad_send_wr->send_buf.seg_count) in ib_get_payload()
|
/linux/include/drm/ |
A D | drm_legacy.h | 97 int seg_count; member 113 int seg_count; member
|
/linux/include/linux/ |
A D | agpgart.h | 65 size_t seg_count; /* number of segments */ member
|
/linux/include/uapi/linux/ |
A D | agpgart.h | 88 __kernel_size_t seg_count; /* number of segments */ member
|
/linux/drivers/memstick/core/ |
A D | mspro_block.c | 169 unsigned int seg_count; member 612 if (msb->current_seg == msb->seg_count) { in h_mspro_block_transfer_data() 702 msb->seg_count = blk_rq_map_sg(msb->block_req->q, in mspro_block_issue_req() 706 if (!msb->seg_count) { in mspro_block_issue_req() 988 msb->seg_count = 1; in mspro_block_read_attributes() 1089 msb->seg_count = 1; in mspro_block_read_attributes()
|
/linux/drivers/net/ethernet/qlogic/ |
A D | qla3xxx.c | 1938 if (tx_cb->seg_count == 0) { in ql_process_mac_tx_intr() 1949 tx_cb->seg_count--; in ql_process_mac_tx_intr() 1950 if (tx_cb->seg_count) { in ql_process_mac_tx_intr() 1951 for (i = 1; i < tx_cb->seg_count; i++) { in ql_process_mac_tx_intr() 2314 seg_cnt = tx_cb->seg_count; in ql_send_map() 2470 tx_cb->seg_count = ql_get_seg_count(qdev, in ql3xxx_send() 2472 if (tx_cb->seg_count == -1) { in ql3xxx_send() 3633 for (j = 1; j < tx_cb->seg_count; j++) { in ql_reset_work()
|
A D | qla3xxx.h | 1038 int seg_count; member
|
/linux/drivers/net/ethernet/intel/ice/ |
A D | ice_flex_type.h | 26 __le32 seg_count; member
|
A D | ice_flex_pipe.c | 874 for (i = 0; i < le32_to_cpu(pkg_hdr->seg_count); i++) { in ice_find_seg_in_pkg() 1181 u32 seg_count; in ice_verify_pkg() local 1194 seg_count = le32_to_cpu(pkg->seg_count); in ice_verify_pkg() 1195 if (seg_count < 1) in ice_verify_pkg() 1199 if (len < struct_size(pkg, seg_offset, seg_count)) in ice_verify_pkg() 1203 for (i = 0; i < seg_count; i++) { in ice_verify_pkg()
|
/linux/include/rdma/ |
A D | ib_mad.h | 470 int seg_count; member
|
Completed in 63 milliseconds