1 /* SPDX-License-Identifier: GPL-2.0 or Linux-OpenIB */ 2 /* Copyright (c) 2015 - 2019 Intel Corporation */ 3 #ifndef IRDMA_PBLE_H 4 #define IRDMA_PBLE_H 5 6 #define PBLE_SHIFT 6 7 #define PBLE_PER_PAGE 512 8 #define HMC_PAGED_BP_SHIFT 12 9 #define PBLE_512_SHIFT 9 10 #define PBLE_INVALID_IDX 0xffffffff 11 12 enum irdma_pble_level { 13 PBLE_LEVEL_0 = 0, 14 PBLE_LEVEL_1 = 1, 15 PBLE_LEVEL_2 = 2, 16 }; 17 18 enum irdma_alloc_type { 19 PBLE_NO_ALLOC = 0, 20 PBLE_SD_CONTIGOUS = 1, 21 PBLE_SD_PAGED = 2, 22 }; 23 24 struct irdma_chunk; 25 26 struct irdma_pble_chunkinfo { 27 struct irdma_chunk *pchunk; 28 u64 bit_idx; 29 u64 bits_used; 30 }; 31 32 struct irdma_pble_info { 33 u64 *addr; 34 u32 idx; 35 u32 cnt; 36 struct irdma_pble_chunkinfo chunkinfo; 37 }; 38 39 struct irdma_pble_level2 { 40 struct irdma_pble_info root; 41 struct irdma_pble_info *leaf; 42 struct irdma_virt_mem leafmem; 43 u32 leaf_cnt; 44 }; 45 46 struct irdma_pble_alloc { 47 u32 total_cnt; 48 enum irdma_pble_level level; 49 union { 50 struct irdma_pble_info level1; 51 struct irdma_pble_level2 level2; 52 }; 53 }; 54 55 struct sd_pd_idx { 56 u32 sd_idx; 57 u32 pd_idx; 58 u32 rel_pd_idx; 59 }; 60 61 struct irdma_add_page_info { 62 struct irdma_chunk *chunk; 63 struct irdma_hmc_sd_entry *sd_entry; 64 struct irdma_hmc_info *hmc_info; 65 struct sd_pd_idx idx; 66 u32 pages; 67 }; 68 69 struct irdma_chunk { 70 struct list_head list; 71 struct irdma_dma_info dmainfo; 72 void *bitmapbuf; 73 74 u32 sizeofbitmap; 75 u64 size; 76 void *vaddr; 77 u64 fpm_addr; 78 u32 pg_cnt; 79 enum irdma_alloc_type type; 80 struct irdma_sc_dev *dev; 81 struct irdma_virt_mem chunkmem; 82 }; 83 84 struct irdma_pble_prm { 85 struct list_head clist; 86 spinlock_t prm_lock; /* protect prm bitmap */ 87 u64 total_pble_alloc; 88 u64 free_pble_cnt; 89 u8 pble_shift; 90 }; 91 92 struct irdma_hmc_pble_rsrc { 93 u32 unallocated_pble; 94 struct mutex pble_mutex_lock; /* protect PBLE resource */ 95 struct irdma_sc_dev *dev; 96 u64 fpm_base_addr; 97 u64 next_fpm_addr; 98 struct irdma_pble_prm pinfo; 99 u64 allocdpbles; 100 u64 freedpbles; 101 u32 stats_direct_sds; 102 u32 stats_paged_sds; 103 u64 stats_alloc_ok; 104 u64 stats_alloc_fail; 105 u64 stats_alloc_freed; 106 u64 stats_lvl1; 107 u64 stats_lvl2; 108 }; 109 110 void irdma_destroy_pble_prm(struct irdma_hmc_pble_rsrc *pble_rsrc); 111 enum irdma_status_code 112 irdma_hmc_init_pble(struct irdma_sc_dev *dev, 113 struct irdma_hmc_pble_rsrc *pble_rsrc); 114 void irdma_free_pble(struct irdma_hmc_pble_rsrc *pble_rsrc, 115 struct irdma_pble_alloc *palloc); 116 enum irdma_status_code irdma_get_pble(struct irdma_hmc_pble_rsrc *pble_rsrc, 117 struct irdma_pble_alloc *palloc, 118 u32 pble_cnt, bool level1_only); 119 enum irdma_status_code irdma_prm_add_pble_mem(struct irdma_pble_prm *pprm, 120 struct irdma_chunk *pchunk); 121 enum irdma_status_code 122 irdma_prm_get_pbles(struct irdma_pble_prm *pprm, 123 struct irdma_pble_chunkinfo *chunkinfo, u64 mem_size, 124 u64 **vaddr, u64 *fpm_addr); 125 void irdma_prm_return_pbles(struct irdma_pble_prm *pprm, 126 struct irdma_pble_chunkinfo *chunkinfo); 127 void irdma_pble_acquire_lock(struct irdma_hmc_pble_rsrc *pble_rsrc, 128 unsigned long *flags); 129 void irdma_pble_release_lock(struct irdma_hmc_pble_rsrc *pble_rsrc, 130 unsigned long *flags); 131 void irdma_pble_free_paged_mem(struct irdma_chunk *chunk); 132 enum irdma_status_code irdma_pble_get_paged_mem(struct irdma_chunk *chunk, 133 u32 pg_cnt); 134 void irdma_prm_rem_bitmapmem(struct irdma_hw *hw, struct irdma_chunk *chunk); 135 #endif /* IRDMA_PBLE_H */ 136