/linux/arch/powerpc/platforms/ps3/ |
A D | mm.c | 541 list_add(&c->link, &r->chunk_list.head); in dma_sb_map_pages() 575 if (list_empty(&r->chunk_list.head)) { in dma_ioc0_map_pages() 580 last = list_entry(r->chunk_list.head.next, in dma_ioc0_map_pages() 611 list_add(&c->link, &r->chunk_list.head); in dma_ioc0_map_pages() 661 INIT_LIST_HEAD(&r->chunk_list.head); in dma_sb_region_create() 662 spin_lock_init(&r->chunk_list.lock); in dma_sb_region_create() 683 INIT_LIST_HEAD(&r->chunk_list.head); in dma_ioc0_region_create() 684 spin_lock_init(&r->chunk_list.lock); in dma_ioc0_region_create() 802 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_sb_map_area() 847 spin_lock_irqsave(&r->chunk_list.lock, flags); in dma_ioc0_map_area() [all …]
|
/linux/drivers/infiniband/hw/usnic/ |
A D | usnic_uiom.c | 66 static void usnic_uiom_put_pages(struct list_head *chunk_list, int dirty) in usnic_uiom_put_pages() argument 74 list_for_each_entry_safe(chunk, tmp, chunk_list, list) { in usnic_uiom_put_pages() 88 struct list_head *chunk_list = &uiomr->chunk_list; in usnic_uiom_get_pages() local 117 INIT_LIST_HEAD(chunk_list); in usnic_uiom_get_pages() 177 list_add_tail(&chunk->list, chunk_list); in usnic_uiom_get_pages() 185 usnic_uiom_put_pages(chunk_list, 0); in usnic_uiom_get_pages() 240 usnic_uiom_put_pages(&uiomr->chunk_list, dirty & writable); in __usnic_uiom_reg_release() 260 chunk = list_first_entry(&uiomr->chunk_list, struct usnic_uiom_chunk, in usnic_uiom_map_sorted_intervals() 415 usnic_uiom_put_pages(&uiomr->chunk_list, 0); in usnic_uiom_reg_get()
|
A D | usnic_uiom.h | 72 struct list_head chunk_list; member
|
/linux/net/sctp/ |
A D | output.c | 152 INIT_LIST_HEAD(&packet->chunk_list); in sctp_packet_init() 166 list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) { in sctp_packet_free() 232 list_add_tail(&pad->list, &pkt->chunk_list); in sctp_packet_bundle_pad() 372 list_add_tail(&chunk->list, &packet->chunk_list); in __sctp_packet_append_chunk() 458 list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, in sctp_packet_pack() 479 list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) { in sctp_packet_pack() 522 if (list_empty(&packet->chunk_list)) in sctp_packet_pack() 526 &packet->chunk_list); in sctp_packet_pack() 533 } while (!list_empty(&packet->chunk_list)); in sctp_packet_pack() 579 if (list_empty(&packet->chunk_list)) in sctp_packet_transmit() [all …]
|
/linux/drivers/net/ethernet/mellanox/mlx4/ |
A D | icm.h | 68 struct list_head chunk_list; member 100 iter->chunk = list_empty(&icm->chunk_list) ? in mlx4_icm_first() 101 NULL : list_entry(icm->chunk_list.next, in mlx4_icm_first() 114 if (iter->chunk->list.next == &iter->icm->chunk_list) { in mlx4_icm_next()
|
A D | icm.c | 86 list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) { in mlx4_free_icm() 155 INIT_LIST_HEAD(&icm->chunk_list); in mlx4_alloc_icm() 176 list_add_tail(&chunk->list, &icm->chunk_list); in mlx4_alloc_icm() 335 list_for_each_entry(chunk, &icm->chunk_list, list) { in mlx4_table_find()
|
/linux/drivers/infiniband/hw/efa/ |
A D | efa_verbs.c | 118 struct pbl_chunk_list chunk_list; member 1253 struct pbl_chunk_list *chunk_list = &pbl->phys.indirect.chunk_list; in pbl_chunk_list_create() local 1267 chunk_list->size = chunk_list_size; in pbl_chunk_list_create() 1271 if (!chunk_list->chunks) in pbl_chunk_list_create() 1281 if (!chunk_list->chunks[i].buf) in pbl_chunk_list_create() 1309 chunk_list->chunks[i].buf, in pbl_chunk_list_create() 1345 kfree(chunk_list->chunks[i].buf); in pbl_chunk_list_create() 1347 kfree(chunk_list->chunks); in pbl_chunk_list_create() 1353 struct pbl_chunk_list *chunk_list = &pbl->phys.indirect.chunk_list; in pbl_chunk_list_destroy() local 1359 kfree(chunk_list->chunks[i].buf); in pbl_chunk_list_destroy() [all …]
|
/linux/drivers/infiniband/hw/hns/ |
A D | hns_roce_hem.h | 84 struct list_head chunk_list; member 141 iter->chunk = list_empty(&hem->chunk_list) ? NULL : in hns_roce_hem_first() 142 list_entry(hem->chunk_list.next, in hns_roce_hem_first() 155 if (iter->chunk->list.next == &iter->hem->chunk_list) { in hns_roce_hem_next()
|
A D | hns_roce_hem.c | 270 INIT_LIST_HEAD(&hem->chunk_list); in hns_roce_alloc_hem() 285 list_add_tail(&chunk->list, &hem->chunk_list); in hns_roce_alloc_hem() 324 list_for_each_entry_safe(chunk, tmp, &hem->chunk_list, list) { in hns_roce_free_hem() 762 list_for_each_entry(chunk, &hem->chunk_list, list) { in hns_roce_table_find()
|
/linux/drivers/infiniband/hw/mthca/ |
A D | mthca_memfree.h | 59 struct list_head chunk_list; member 103 iter->chunk = list_empty(&icm->chunk_list) ? in mthca_icm_first() 104 NULL : list_entry(icm->chunk_list.next, in mthca_icm_first() 117 if (iter->chunk->list.next == &iter->icm->chunk_list) { in mthca_icm_next()
|
A D | mthca_memfree.c | 95 list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) { in mthca_free_icm() 153 INIT_LIST_HEAD(&icm->chunk_list); in mthca_alloc_icm() 167 list_add_tail(&chunk->list, &icm->chunk_list); in mthca_alloc_icm() 297 list_for_each_entry(chunk, &icm->chunk_list, list) { in mthca_table_find()
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
A D | dr_icm_pool.c | 184 list_del(&chunk->chunk_list); in dr_icm_chunk_destroy() 227 list_for_each_entry_safe(chunk, next, &buddy->hot_list, chunk_list) in dr_icm_buddy_destroy() 230 list_for_each_entry_safe(chunk, next, &buddy->used_list, chunk_list) in dr_icm_buddy_destroy() 274 INIT_LIST_HEAD(&chunk->chunk_list); in dr_icm_chunk_create() 277 list_add_tail(&chunk->chunk_list, &buddy_mem_pool->used_list); in dr_icm_chunk_create() 308 list_for_each_entry_safe(chunk, tmp_chunk, &buddy->hot_list, chunk_list) { in dr_icm_pool_sync_all_buddy_pools() 408 list_move_tail(&chunk->chunk_list, &buddy->hot_list); in mlx5dr_icm_free_chunk()
|
A D | dr_types.h | 1013 struct list_head chunk_list; member
|
/linux/drivers/gpu/drm/qxl/ |
A D | qxl_image.c | 51 list_add_tail(&chunk->head, &image->chunk_list); in qxl_allocate_chunk() 68 INIT_LIST_HEAD(&image->chunk_list); in qxl_image_alloc_objects() 90 list_for_each_entry_safe(chunk, tmp, &dimage->chunk_list, head) { in qxl_image_free_objects() 120 drv_chunk = list_first_entry(&dimage->chunk_list, struct qxl_drm_chunk, head); in qxl_image_init_helper()
|
A D | qxl_drv.h | 168 struct list_head chunk_list; member
|
/linux/drivers/s390/block/ |
A D | dasd_int.h | 682 dasd_init_chunklist(struct list_head *chunk_list, void *mem, in dasd_init_chunklist() argument 687 INIT_LIST_HEAD(chunk_list); in dasd_init_chunklist() 690 list_add(&chunk->list, chunk_list); in dasd_init_chunklist() 694 dasd_alloc_chunk(struct list_head *chunk_list, unsigned long size) in dasd_alloc_chunk() argument 699 list_for_each_entry(chunk, chunk_list, list) { in dasd_alloc_chunk() 716 dasd_free_chunk(struct list_head *chunk_list, void *mem) in dasd_free_chunk() argument 724 left = chunk_list; in dasd_free_chunk() 725 list_for_each(p, chunk_list) { in dasd_free_chunk() 731 if (left->next != chunk_list) { in dasd_free_chunk() 739 if (left != chunk_list) { in dasd_free_chunk()
|
/linux/drivers/crypto/ |
A D | n2_core.c | 685 struct list_head chunk_list; member 862 INIT_LIST_HEAD(&rctx->chunk_list); in n2_compute_chunks() 895 &rctx->chunk_list); in n2_compute_chunks() 922 list_add_tail(&chunk->entry, &rctx->chunk_list); in n2_compute_chunks() 936 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { in n2_chunk_complete() 963 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { in n2_do_ecb() 1020 list_for_each_entry_safe(c, tmp, &rctx->chunk_list, in n2_do_chaining() 1033 list_for_each_entry_safe_reverse(c, tmp, &rctx->chunk_list, in n2_do_chaining()
|
/linux/arch/powerpc/include/asm/ |
A D | ps3.h | 96 } chunk_list; member
|
/linux/include/net/sctp/ |
A D | structs.h | 712 struct list_head chunk_list; member
|