Home
last modified time | relevance | path

Searched refs:heap (Results 1 – 25 of 87) sorted by relevance

1234

/linux/lib/
A Dtest_min_heap.c33 struct min_heap *heap, in pop_verify_heap() argument
36 int *values = heap->data; in pop_verify_heap()
41 min_heap_pop(heap, funcs); in pop_verify_heap()
42 while (heap->nr > 0) { in pop_verify_heap()
57 min_heap_pop(heap, funcs); in pop_verify_heap()
66 struct min_heap heap = { in test_heapify_all() local
84 heap.nr = ARRAY_SIZE(values); in test_heapify_all()
99 struct min_heap heap = { in test_heap_push() local
118 while (heap.nr < heap.size) { in test_heap_push()
132 struct min_heap heap = { in test_heap_pop_push() local
[all …]
/linux/drivers/dma-buf/
A Ddma-heap.c66 dmabuf = heap->ops->allocate(heap, len, fd_flags, heap_flags); in dma_heap_buffer_alloc()
83 if (!heap) { in dma_heap_open()
202 return heap->priv; in dma_heap_get_drvdata()
214 return heap->name; in dma_heap_get_name()
246 heap = kzalloc(sizeof(*heap), GFP_KERNEL); in dma_heap_add()
247 if (!heap) in dma_heap_add()
267 ret = cdev_add(&heap->heap_cdev, heap->heap_devt, 1); in dma_heap_add()
276 heap->heap_devt, in dma_heap_add()
278 heap->name); in dma_heap_add()
289 return heap; in dma_heap_add()
[all …]
/linux/include/linux/
A Dmin_heap.h39 void *data = heap->data; in min_heapify()
42 if (pos * 2 + 1 >= heap->nr) in min_heapify()
74 min_heapify(heap, i, func); in min_heapify_all()
82 void *data = heap->data; in min_heap_pop()
88 heap->nr--; in min_heap_pop()
90 min_heapify(heap, 0, func); in min_heap_pop()
104 min_heapify(heap, 0, func); in min_heap_pop_push()
112 void *data = heap->data; in min_heap_push()
116 if (WARN_ONCE(heap->nr >= heap->size, "Pushing on a full heap")) in min_heap_push()
120 pos = heap->nr; in min_heap_push()
[all …]
A Ddma-heap.h24 struct dma_buf *(*allocate)(struct dma_heap *heap,
51 void *dma_heap_get_drvdata(struct dma_heap *heap);
60 const char *dma_heap_get_name(struct dma_heap *heap);
/linux/fs/ubifs/
A Dlprops.c68 heap->arr[hpos] = heap->arr[ppos]; in move_up_lpt_heap()
102 heap->arr[hpos] = heap->arr[ppos]; in adjust_lpt_heap()
121 if (cpos >= heap->cnt) in adjust_lpt_heap()
133 heap->arr[hpos] = heap->arr[cpos]; in adjust_lpt_heap()
141 if (cpos >= heap->cnt) in adjust_lpt_heap()
147 heap->arr[hpos] = heap->arr[cpos]; in adjust_lpt_heap()
171 if (heap->cnt >= heap->max_cnt) { in add_to_lpt_heap()
223 heap->cnt -= 1; in remove_from_lpt_heap()
225 heap->arr[hpos] = heap->arr[heap->cnt]; in remove_from_lpt_heap()
227 adjust_lpt_heap(c, heap, heap->arr[hpos], hpos, cat); in remove_from_lpt_heap()
[all …]
A Dfind.c45 struct ubifs_lpt_heap *heap; in valuable() local
51 heap = &c->lpt_heap[cat - 1]; in valuable()
52 if (heap->cnt < heap->max_cnt) in valuable()
129 struct ubifs_lpt_heap *heap; in scan_for_dirty() local
136 lprops = heap->arr[i]; in scan_for_dirty()
292 if (heap->cnt) { in ubifs_find_dirty_leb()
293 lp = heap->arr[0]; in ubifs_find_dirty_leb()
400 struct ubifs_lpt_heap *heap; in do_find_free_space() local
422 lprops = heap->arr[i]; in do_find_free_space()
819 struct ubifs_lpt_heap *heap; in find_dirty_idx_leb() local
[all …]
A Dlpt_commit.c764 struct ubifs_lpt_heap *heap; in populate_lsave() local
792 for (i = 0; i < heap->cnt; i++) { in populate_lsave()
793 c->lsave[cnt++] = heap->arr[i]->lnum; in populate_lsave()
797 heap = &c->lpt_heap[LPROPS_DIRTY - 1]; in populate_lsave()
798 for (i = 0; i < heap->cnt; i++) { in populate_lsave()
803 heap = &c->lpt_heap[LPROPS_FREE - 1]; in populate_lsave()
804 for (i = 0; i < heap->cnt; i++) { in populate_lsave()
1968 struct ubifs_lpt_heap *heap; in dbg_populate_lsave() local
1987 for (i = 0; i < heap->cnt; i++) in dbg_populate_lsave()
1990 for (i = 0; i < heap->cnt; i++) in dbg_populate_lsave()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
A Dbase.c258 mmu->type[mmu->type_nr].type = type | mmu->heap[heap].type; in nvkm_mmu_type()
259 mmu->type[mmu->type_nr].heap = heap; in nvkm_mmu_type()
269 mmu->heap[mmu->heap_nr].type = type; in nvkm_mmu_heap()
270 mmu->heap[mmu->heap_nr].size = size; in nvkm_mmu_heap()
282 int heap; in nvkm_mmu_host() local
285 heap = nvkm_mmu_heap(mmu, NVKM_MEM_HOST, ~0ULL); in nvkm_mmu_host()
286 nvkm_mmu_type(mmu, heap, type); in nvkm_mmu_host()
298 nvkm_mmu_type(mmu, heap, type); in nvkm_mmu_host()
323 u8 heap = NVKM_MEM_VRAM; in nvkm_mmu_vram() local
329 heap |= NVKM_MEM_COMP; in nvkm_mmu_vram()
[all …]
/linux/drivers/gpu/drm/nouveau/include/nvkm/core/
A Dmm.h12 u8 heap; member
34 int nvkm_mm_init(struct nvkm_mm *, u8 heap, u32 offset, u32 length, u32 block);
36 int nvkm_mm_head(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
38 int nvkm_mm_tail(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
44 nvkm_mm_heap_size(struct nvkm_mm *mm, u8 heap) in nvkm_mm_heap_size() argument
49 if (node->heap == heap) in nvkm_mm_heap_size()
/linux/drivers/gpu/drm/nouveau/nvkm/core/
A Dmm.c99 b->heap = a->heap; in region_head()
111 nvkm_mm_head(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_head() argument
122 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_head()
123 if (this->heap != heap) in nvkm_mm_head()
175 b->heap = a->heap; in region_tail()
186 nvkm_mm_tail(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_tail() argument
198 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_tail()
199 if (this->heap != heap) in nvkm_mm_tail()
240 nvkm_mm_init(struct nvkm_mm *mm, u8 heap, u32 offset, u32 length, u32 block) in nvkm_mm_init() argument
277 node->heap = heap; in nvkm_mm_init()
A Dgpuobj.c180 ret = nvkm_mm_head(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
183 ret = nvkm_mm_tail(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
211 return nvkm_mm_init(&gpuobj->heap, 0, 0, gpuobj->size, 1); in nvkm_gpuobj_ctor()
220 nvkm_mm_free(&gpuobj->parent->heap, &gpuobj->node); in nvkm_gpuobj_del()
221 nvkm_mm_fini(&gpuobj->heap); in nvkm_gpuobj_del()
/linux/drivers/gpu/drm/nouveau/nvif/
A Dmmu.c32 kfree(mmu->heap); in nvif_mmu_dtor()
50 mmu->heap = NULL; in nvif_mmu_ctor()
69 mmu->heap = kmalloc_array(mmu->heap_nr, sizeof(*mmu->heap), in nvif_mmu_ctor()
73 if (ret = -ENOMEM, !mmu->heap || !mmu->type) in nvif_mmu_ctor()
89 mmu->heap[i].size = args.size; in nvif_mmu_ctor()
109 mmu->type[i].heap = args.heap; in nvif_mmu_ctor()
/linux/Documentation/ABI/testing/
A Dsysfs-kernel-mm-cma6 heap name (also sometimes called CMA areas).
8 Each CMA heap subdirectory (that is, each
9 /sys/kernel/mm/cma/<cma-heap-name> directory) contains the
15 What: /sys/kernel/mm/cma/<cma-heap-name>/alloc_pages_success
21 What: /sys/kernel/mm/cma/<cma-heap-name>/alloc_pages_fail
/linux/drivers/md/bcache/
A Dmovinggc.c194 return (b = heap_peek(&ca->heap)) ? GC_SECTORS_USED(b) : 0; in bucket_heap_top()
212 ca->heap.used = 0; in bch_moving_gc()
221 if (!heap_full(&ca->heap)) { in bch_moving_gc()
223 heap_add(&ca->heap, b, bucket_cmp); in bch_moving_gc()
224 } else if (bucket_cmp(b, heap_peek(&ca->heap))) { in bch_moving_gc()
228 ca->heap.data[0] = b; in bch_moving_gc()
229 heap_sift(&ca->heap, 0, bucket_cmp); in bch_moving_gc()
234 heap_pop(&ca->heap, b, bucket_cmp); in bch_moving_gc()
238 while (heap_pop(&ca->heap, b, bucket_cmp)) in bch_moving_gc()
A Dutil.h40 #define init_heap(heap, _size, gfp) \ argument
43 (heap)->used = 0; \
44 (heap)->size = (_size); \
45 _bytes = (heap)->size * sizeof(*(heap)->data); \
46 (heap)->data = kvmalloc(_bytes, (gfp) & GFP_KERNEL); \
47 (heap)->data; \
50 #define free_heap(heap) \ argument
52 kvfree((heap)->data); \
53 (heap)->data = NULL; \
A Dalloc.c184 ca->heap.used = 0; in invalidate_buckets_lru()
190 if (!heap_full(&ca->heap)) in invalidate_buckets_lru()
191 heap_add(&ca->heap, b, bucket_max_cmp); in invalidate_buckets_lru()
192 else if (bucket_max_cmp(b, heap_peek(&ca->heap))) { in invalidate_buckets_lru()
193 ca->heap.data[0] = b; in invalidate_buckets_lru()
194 heap_sift(&ca->heap, 0, bucket_max_cmp); in invalidate_buckets_lru()
198 for (i = ca->heap.used / 2 - 1; i >= 0; --i) in invalidate_buckets_lru()
199 heap_sift(&ca->heap, i, bucket_min_cmp); in invalidate_buckets_lru()
202 if (!heap_pop(&ca->heap, b, bucket_min_cmp)) { in invalidate_buckets_lru()
/linux/lib/zlib_deflate/
A Ddeftree.c298 top = s->heap[SMALLEST]; \
299 s->heap[SMALLEST] = s->heap[s->heap_len--]; \
323 int v = s->heap[k]; in pqdownheap()
328 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { in pqdownheap()
335 s->heap[k] = s->heap[j]; k = j; in pqdownheap()
340 s->heap[k] = v; in pqdownheap()
379 n = s->heap[h]; in gen_bitlen()
420 m = s->heap[--h]; in gen_bitlen()
539 s->heap[--(s->heap_max)] = m; in build_tree()
552 s->heap[SMALLEST] = node++; in build_tree()
[all …]
/linux/drivers/dma-buf/heaps/
A Dcma_heap.c27 struct dma_heap *heap; member
32 struct cma_heap *heap; member
246 struct cma_heap *cma_heap = buffer->heap; in cma_heap_dma_buf_release()
274 static struct dma_buf *cma_heap_allocate(struct dma_heap *heap, in cma_heap_allocate() argument
279 struct cma_heap *cma_heap = dma_heap_get_drvdata(heap); in cma_heap_allocate()
338 buffer->heap = cma_heap; in cma_heap_allocate()
342 exp_info.exp_name = dma_heap_get_name(heap); in cma_heap_allocate()
382 cma_heap->heap = dma_heap_add(&exp_info); in __add_cma_heap()
383 if (IS_ERR(cma_heap->heap)) { in __add_cma_heap()
384 int ret = PTR_ERR(cma_heap->heap); in __add_cma_heap()
A DKconfig5 Choose this option to enable the system dmabuf heap. The system heap
12 Choose this option to enable dma-buf CMA heap. This heap is backed
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
A Dnv04.c31 struct nvkm_mm heap; member
103 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv04_instobj_dtor()
136 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, align ? align : 1, &iobj->node); in nv04_instobj_new()
167 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv04_instmem_oneinit()
205 nvkm_mm_fini(&imem->heap); in nv04_instmem_dtor()
A Dnv40.c32 struct nvkm_mm heap; member
103 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv40_instobj_dtor()
136 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, align ? align : 1, &iobj->node); in nv40_instobj_new()
179 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv40_instmem_oneinit()
221 nvkm_mm_fini(&imem->heap); in nv40_instmem_dtor()
/linux/drivers/gpu/drm/ttm/
A Dttm_resource.c80 const struct ttm_place *heap = &places[i]; in ttm_resource_places_compat() local
82 if (res->start < heap->fpfn || (heap->lpfn && in ttm_resource_places_compat()
83 (res->start + res->num_pages) > heap->lpfn)) in ttm_resource_places_compat()
86 if ((res->mem_type == heap->mem_type) && in ttm_resource_places_compat()
87 (!(heap->flags & TTM_PL_FLAG_CONTIGUOUS) || in ttm_resource_places_compat()
/linux/arch/x86/boot/compressed/
A Dmisc.c344 asmlinkage __visible void *extract_kernel(void *rmode, memptr heap, in extract_kernel() argument
384 free_mem_ptr = heap; /* Heap */ in extract_kernel()
385 free_mem_end_ptr = heap + BOOT_HEAP_SIZE; in extract_kernel()
427 if (heap > 0x3fffffffffffUL) in extract_kernel()
432 if (heap > ((-__PAGE_OFFSET-(128<<20)-1) & 0x7fffffff)) in extract_kernel()
/linux/tools/perf/util/
A Ds390-cpumsf.c170 struct auxtrace_heap heap; member
816 return auxtrace_heap__add(&sf->heap, queue_nr, ts); in s390_cpumsf_setup_queue()
852 if (!sf->heap.heap_cnt) in s390_cpumsf_process_queues()
855 if (sf->heap.heap_array[0].ordinal >= timestamp) in s390_cpumsf_process_queues()
858 queue_nr = sf->heap.heap_array[0].queue_nr; in s390_cpumsf_process_queues()
862 auxtrace_heap__pop(&sf->heap); in s390_cpumsf_process_queues()
863 if (sf->heap.heap_cnt) { in s390_cpumsf_process_queues()
864 ts = sf->heap.heap_array[0].ordinal + 1; in s390_cpumsf_process_queues()
873 auxtrace_heap__add(&sf->heap, queue_nr, ts); in s390_cpumsf_process_queues()
877 ret = auxtrace_heap__add(&sf->heap, queue_nr, ts); in s390_cpumsf_process_queues()
[all …]
/linux/arch/mips/boot/compressed/
A Dhead.S32 PTR_LA a0, (.heap) /* heap address */
51 .comm .heap,BOOT_HEAP_SIZE,4

Completed in 58 milliseconds

1234