/linux/mm/ |
A D | backing-dev.c | 53 struct bdi_writeback *wb = &bdi->wb; in bdi_debug_stats_show() local 293 memset(wb, 0, sizeof(*wb)); in wb_init() 464 if (wb && wb->blkcg_css != blkcg_css) { in cgwb_create() 466 wb = NULL; in cgwb_create() 469 if (wb) in cgwb_create() 473 wb = kmalloc(sizeof(*wb), gfp); in cgwb_create() 474 if (!wb) { in cgwb_create() 535 kfree(wb); in cgwb_create() 574 if (wb) { in wb_get_lookup() 585 return wb; in wb_get_lookup() [all …]
|
A D | page-writeback.c | 128 struct bdi_writeback *wb; member 894 struct bdi_writeback *wb = dtc->wb; in wb_position_ratio() local 1179 struct bdi_writeback *wb = dtc->wb; in wb_update_dirty_ratelimit() local 1337 struct bdi_writeback *wb = gdtc->wb; in __wb_update_bandwidth() local 1394 wb->dirtied_stamp = wb_stat(wb, WB_DIRTIED); in wb_bandwidth_estimate_start() 1395 wb->written_stamp = wb_stat(wb, WB_WRITTEN); in wb_bandwidth_estimate_start() 1514 struct bdi_writeback *wb = dtc->wb; in wb_dirty_limits() local 1796 wb->dirty_sleep = now; in balance_dirty_pages() 1891 if (!wb) in balance_dirty_pages_ratelimited() 1892 wb = &bdi->wb; in balance_dirty_pages_ratelimited() [all …]
|
/linux/fs/ |
A D | fs-writeback.c | 100 if (wb_has_dirty_io(wb) && list_empty(&wb->b_dirty) && in wb_io_lists_depopulated() 101 list_empty(&wb->b_io) && list_empty(&wb->b_more_io)) { in wb_io_lists_depopulated() 257 if (!wb) in __inode_attach_wb() 258 wb = &bdi->wb; in __inode_attach_wb() 284 if (wb != &wb->bdi->wb) in inode_cgwb_move_to_attached() 760 struct bdi_writeback *wb = wbc->wb; in wbc_detach_inode() local 766 if (!wb) in wbc_detach_inode() 1078 if (!wb) { in cgroup_writeback_by_id() 1181 return wb; in locked_inode_to_wb_and_lock_list() 1190 return wb; in inode_to_wb_and_lock_list() [all …]
|
/linux/include/linux/ |
A D | backing-dev.h | 75 wb_stat_mod(wb, item, 1); in inc_wb_stat() 203 return &bdi->wb; in wb_find_current() 211 if (likely(wb && wb->blkcg_css == task_css(current, io_cgrp_id))) in wb_find_current() 212 return wb; in wb_find_current() 232 if (wb && unlikely(!wb_tryget(wb))) in wb_get_create_current() 233 wb = NULL; in wb_get_create_current() 236 if (unlikely(!wb)) { in wb_get_create_current() 243 return wb; in wb_get_create_current() 285 return wbc->wb ? wbc->wb : &inode_to_bdi(inode)->wb; in inode_to_wb_wbc() 347 return &bdi->wb; in wb_find_current() [all …]
|
A D | backing-dev-defs.h | 231 if (wb != &wb->bdi->wb) in wb_tryget() 232 return percpu_ref_tryget(&wb->refcnt); in wb_tryget() 240 static inline void wb_get(struct bdi_writeback *wb) in wb_get() argument 242 if (wb != &wb->bdi->wb) in wb_get() 243 percpu_ref_get(&wb->refcnt); in wb_get() 253 if (WARN_ON_ONCE(!wb->bdi)) { in wb_put_many() 261 if (wb != &wb->bdi->wb) in wb_put_many() 262 percpu_ref_put_many(&wb->refcnt, nr); in wb_put_many() 269 static inline void wb_put(struct bdi_writeback *wb) in wb_put() argument 271 wb_put_many(wb, 1); in wb_put() [all …]
|
A D | writeback.h | 83 struct bdi_writeback *wb; /* wb this writeback is issued under */ member 113 ((wbc)->wb ? (wbc)->wb->blkcg_css : blkcg_root_css) 220 bool cleanup_offline_cgwb(struct bdi_writeback *wb); 287 if (wbc->wb) in wbc_init_bio() 288 bio_associate_blkg_from_css(bio, wbc->wb->blkcg_css); in wbc_init_bio() 371 unsigned long wb_calc_thresh(struct bdi_writeback *wb, unsigned long thresh); 373 void wb_update_bandwidth(struct bdi_writeback *wb); 375 bool wb_over_bg_thresh(struct bdi_writeback *wb);
|
/linux/drivers/gpu/drm/amd/display/dc/dml/dcn20/ |
A D | dcn20_fpu.c | 81 pipes[pipe_cnt].dout.wb.wb_src_height = wb_info->dwb_params.cnv_params.crop_height; in dcn20_populate_dml_writeback_from_context() 82 pipes[pipe_cnt].dout.wb.wb_src_width = wb_info->dwb_params.cnv_params.crop_width; in dcn20_populate_dml_writeback_from_context() 83 pipes[pipe_cnt].dout.wb.wb_dst_width = wb_info->dwb_params.dest_width; in dcn20_populate_dml_writeback_from_context() 84 pipes[pipe_cnt].dout.wb.wb_dst_height = wb_info->dwb_params.dest_height; in dcn20_populate_dml_writeback_from_context() 85 pipes[pipe_cnt].dout.wb.wb_htaps_luma = 1; in dcn20_populate_dml_writeback_from_context() 86 pipes[pipe_cnt].dout.wb.wb_vtaps_luma = 1; in dcn20_populate_dml_writeback_from_context() 89 pipes[pipe_cnt].dout.wb.wb_hratio = 1.0; in dcn20_populate_dml_writeback_from_context() 90 pipes[pipe_cnt].dout.wb.wb_vratio = 1.0; in dcn20_populate_dml_writeback_from_context() 93 pipes[pipe_cnt].dout.wb.wb_pixel_format = dm_420_8; in dcn20_populate_dml_writeback_from_context() 95 pipes[pipe_cnt].dout.wb.wb_pixel_format = dm_420_10; in dcn20_populate_dml_writeback_from_context() [all …]
|
/linux/include/trace/events/ |
A D | writeback.h | 158 if (wbc->wb) in __trace_wbc_assign_cgroup() 241 TP_ARGS(folio, wb), 257 __entry->bdi_id = wb->bdi->id; 349 TP_ARGS(wb, work), 388 TP_ARGS(wb, work)) 408 TP_PROTO(struct bdi_writeback *wb), 409 TP_ARGS(wb), 426 TP_ARGS(wb)) 499 TP_PROTO(struct bdi_writeback *wb, 577 TP_PROTO(struct bdi_writeback *wb, [all …]
|
/linux/drivers/staging/media/atomisp/pci/isp/kernels/wb/wb_1.0/ |
A D | ia_css_wb.host.c | 59 const struct sh_css_isp_wb_params *wb, in ia_css_wb_dump() argument 62 if (!wb) return; in ia_css_wb_dump() 65 "wb_gain_shift", wb->gain_shift); in ia_css_wb_dump() 67 "wb_gain_gr", wb->gain_gr); in ia_css_wb_dump() 69 "wb_gain_r", wb->gain_r); in ia_css_wb_dump() 71 "wb_gain_b", wb->gain_b); in ia_css_wb_dump() 73 "wb_gain_gb", wb->gain_gb); in ia_css_wb_dump()
|
/linux/drivers/net/wireless/microchip/wilc1000/ |
A D | spi.c | 313 .tx_buf = wb, in wilc_spi_tx_rx() 429 u8 wb[32], rb[32]; in wilc_spi_single_read() local 436 memset(wb, 0x0, sizeof(wb)); in wilc_spi_single_read() 438 c = (struct wilc_spi_cmd *)wb; in wilc_spi_single_read() 524 u8 wb[32], rb[32]; in wilc_spi_write_cmd() local 529 memset(wb, 0x0, sizeof(wb)); in wilc_spi_write_cmd() 531 c = (struct wilc_spi_cmd *)wb; in wilc_spi_write_cmd() 599 u8 wb[32], rb[32]; in wilc_spi_dma_rw() local 606 memset(wb, 0x0, sizeof(wb)); in wilc_spi_dma_rw() 728 u8 wb[32], rb[32]; in wilc_spi_special_cmd() local [all …]
|
/linux/drivers/gpu/drm/radeon/ |
A D | r600_dma.c | 55 if (rdev->wb.enabled) in r600_dma_get_rptr() 56 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr() 143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume() 147 if (rdev->wb.enabled) in r600_dma_resume() 243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test() 246 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test() 260 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test() 350 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test() 381 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test() [all …]
|
A D | radeon_device.c | 423 rdev->wb.enabled = false; in radeon_wb_disable() 437 if (rdev->wb.wb_obj) { in radeon_wb_fini() 444 rdev->wb.wb = NULL; in radeon_wb_fini() 445 rdev->wb.wb_obj = NULL; in radeon_wb_fini() 465 &rdev->wb.wb_obj); in radeon_wb_init() 476 &rdev->wb.gpu_addr); in radeon_wb_init() 483 r = radeon_bo_kmap(rdev->wb.wb_obj, (void **)&rdev->wb.wb); in radeon_wb_init() 493 memset((char *)rdev->wb.wb, 0, RADEON_GPU_PAGE_SIZE); in radeon_wb_init() 498 rdev->wb.enabled = false; in radeon_wb_init() 507 rdev->wb.enabled = true; in radeon_wb_init() [all …]
|
A D | cik_sdma.c | 67 if (rdev->wb.enabled) { in cik_sdma_get_rptr() 68 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr() 138 if (rdev->wb.enabled) { in cik_sdma_ring_ib_execute() 402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 404 if (rdev->wb.enabled) in cik_sdma_gfx_resume() 658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test() 661 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ring_test() 676 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in cik_sdma_ring_test() 715 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test() 718 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ib_test() [all …]
|
A D | ni_dma.c | 57 if (rdev->wb.enabled) { in cayman_dma_get_rptr() 58 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr() 127 if (rdev->wb.enabled) { in cayman_dma_ring_ib_execute() 222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume() 226 if (rdev->wb.enabled) in cayman_dma_resume()
|
/linux/Documentation/devicetree/bindings/interrupt-controller/ |
A D | qca,ath79-cpu-intc.txt | 5 qca,ddr-wb-channels and qca,ddr-wb-channel-interrupts properties. 20 - qca,ddr-wb-channel-interrupts: List of the interrupts needing a write 22 - qca,ddr-wb-channels: List of phandles to the write buffer channels for 23 each interrupt. If qca,ddr-wb-channel-interrupts is not present the interrupt 34 qca,ddr-wb-channel-interrupts = <2>, <3>, <4>, <5>; 35 qca,ddr-wb-channels = <&ddr_ctrl 3>, <&ddr_ctrl 2>, 43 #qca,ddr-wb-channel-cells = <1>;
|
/linux/security/apparmor/ |
A D | match.c | 673 #define inc_wb_pos(wb) \ argument 675 wb->pos = (wb->pos + 1) & (WB_HISTORY_SIZE - 1); \ 676 wb->len = (wb->len + 1) & (WB_HISTORY_SIZE - 1); \ 683 unsigned int pos = wb->pos; in is_loop() 686 if (wb->history[pos] < state) in is_loop() 715 AA_BUG(!wb); in leftmatch_fb() 730 wb->history[wb->pos] = state; in leftmatch_fb() 741 inc_wb_pos(wb); in leftmatch_fb() 749 wb->history[wb->pos] = state; in leftmatch_fb() 760 inc_wb_pos(wb); in leftmatch_fb() [all …]
|
/linux/drivers/usb/class/ |
A D | cdc-acm.c | 186 wb = &acm->wb[wbn]; in acm_wb_alloc() 187 if (!wb->use) { in acm_wb_alloc() 189 wb->len = 0; in acm_wb_alloc() 234 wb->urb->transfer_buffer = wb->buf; in acm_start_wb() 235 wb->urb->transfer_dma = wb->dmah; in acm_start_wb() 236 wb->urb->transfer_buffer_length = wb->len; in acm_start_wb() 816 wb = &acm->wb[wbn]; in acm_tty_write() 1111 for (wb = &acm->wb[0], i = 0; i < ACM_NW; i++, wb++) in acm_write_buffers_free() 1130 for (wb = &acm->wb[0], i = 0; i < ACM_NW; i++, wb++) { in acm_write_buffers_alloc() 1136 --wb; in acm_write_buffers_alloc() [all …]
|
/linux/scripts/ |
A D | extract-cert.c | 72 static BIO *wb; variable 80 if (!wb) { in write_cert() 81 wb = BIO_new_file(cert_dst, "wb"); in write_cert() 82 ERR(!wb, "%s", cert_dst); in write_cert() 85 ERR(!i2d_X509_bio(wb, x509), "%s", cert_dst); in write_cert() 146 if (wb && !x509) { in main() 159 BIO_free(wb); in main()
|
A D | kallsyms.c | 698 int wa, wb; in compare_symbols() local 708 wb = (sb->sym[0] == 'w') || (sb->sym[0] == 'W'); in compare_symbols() 709 if (wa != wb) in compare_symbols() 710 return wa - wb; in compare_symbols() 714 wb = may_be_linker_script_provide_symbol(sb); in compare_symbols() 715 if (wa != wb) in compare_symbols() 716 return wa - wb; in compare_symbols() 720 wb = strspn(sym_name(sb), "_"); in compare_symbols() 721 if (wa != wb) in compare_symbols() 722 return wa - wb; in compare_symbols()
|
/linux/Documentation/devicetree/bindings/memory-controllers/ |
A D | qca,ath79-ddr-controller.yaml | 29 "#qca,ddr-wb-channel-cells": 41 - "#qca,ddr-wb-channel-cells" 53 #qca,ddr-wb-channel-cells = <1>; 58 qca,ddr-wb-channel-interrupts = <2>, <3>, <4>, <5>; 59 qca,ddr-wb-channels = <&ddr_ctrl 3>, <&ddr_ctrl 2>,
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | sdma_v3_0.c | 353 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v3_0_ring_get_rptr() 370 wptr = ring->adev->wb.wb[ring->wptr_offs] >> 2; in sdma_v3_0_ring_get_wptr() 390 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local 392 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); in sdma_v3_0_ring_set_wptr() 395 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local 834 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring() 836 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ring() 851 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v3_0_ring_test_ring() 888 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ib() 890 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ib() [all …]
|
A D | amdgpu_ih.c | 97 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; in amdgpu_ih_ring_init() 98 ih->wptr_cpu = &adev->wb.wb[wptr_offs]; in amdgpu_ih_ring_init() 99 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; in amdgpu_ih_ring_init() 100 ih->rptr_cpu = &adev->wb.wb[rptr_offs]; in amdgpu_ih_ring_init()
|
A D | si_dma.c | 43 return ring->adev->wb.wb[ring->rptr_offs>>2]; in si_dma_ring_get_rptr() 157 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in si_dma_start() 216 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring() 218 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ring() 231 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ring() 268 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib() 270 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ib() 293 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ib()
|
A D | amdgpu_ring.c | 225 adev->wb.gpu_addr + (ring->trail_fence_offs * 4); in amdgpu_ring_init() 226 ring->trail_fence_cpu_addr = &adev->wb.wb[ring->trail_fence_offs]; in amdgpu_ring_init() 233 ring->cond_exe_gpu_addr = adev->wb.gpu_addr + (ring->cond_exe_offs * 4); in amdgpu_ring_init() 234 ring->cond_exe_cpu_addr = &adev->wb.wb[ring->cond_exe_offs]; in amdgpu_ring_init()
|
A D | mes_v10_1.c | 49 atomic64_set((atomic64_t *)&adev->wb.wb[ring->wptr_offs], in mes_v10_1_ring_set_wptr() 59 return ring->adev->wb.wb[ring->rptr_offs]; in mes_v10_1_ring_get_rptr() 68 &ring->adev->wb.wb[ring->wptr_offs]); in mes_v10_1_ring_get_wptr() 576 adev->wb.gpu_addr + (adev->mes.sch_ctx_offs * 4); in mes_v10_1_allocate_mem_slots() 578 (uint64_t *)&adev->wb.wb[adev->mes.sch_ctx_offs]; in mes_v10_1_allocate_mem_slots() 587 adev->wb.gpu_addr + (adev->mes.query_status_fence_offs * 4); in mes_v10_1_allocate_mem_slots() 589 (uint64_t *)&adev->wb.wb[adev->mes.query_status_fence_offs]; in mes_v10_1_allocate_mem_slots() 676 wb_gpu_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in mes_v10_1_mqd_init() 682 wb_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in mes_v10_1_mqd_init()
|