Lines Matching refs:eq
3675 static void set_eq_cons_index_v1(struct hns_roce_eq *eq, u32 req_not) in set_eq_cons_index_v1() argument
3677 roce_raw_write((eq->cons_index & HNS_ROCE_V1_CONS_IDX_M) | in set_eq_cons_index_v1()
3678 (req_not << eq->log_entries), eq->db_reg); in set_eq_cons_index_v1()
3843 static struct hns_roce_aeqe *get_aeqe_v1(struct hns_roce_eq *eq, u32 entry) in get_aeqe_v1() argument
3845 unsigned long off = (entry & (eq->entries - 1)) * HNS_ROCE_AEQE_SIZE; in get_aeqe_v1()
3848 (eq->buf_list[off / HNS_ROCE_BA_SIZE].buf) + in get_aeqe_v1()
3852 static struct hns_roce_aeqe *next_aeqe_sw_v1(struct hns_roce_eq *eq) in next_aeqe_sw_v1() argument
3854 struct hns_roce_aeqe *aeqe = get_aeqe_v1(eq, eq->cons_index); in next_aeqe_sw_v1()
3857 !!(eq->cons_index & eq->entries)) ? aeqe : NULL; in next_aeqe_sw_v1()
3861 struct hns_roce_eq *eq) in hns_roce_v1_aeq_int() argument
3868 while ((aeqe = next_aeqe_sw_v1(eq))) { in hns_roce_v1_aeq_int()
3925 event_type, eq->eqn, eq->cons_index); in hns_roce_v1_aeq_int()
3929 eq->cons_index++; in hns_roce_v1_aeq_int()
3932 if (eq->cons_index > 2 * hr_dev->caps.aeqe_depth - 1) in hns_roce_v1_aeq_int()
3933 eq->cons_index = 0; in hns_roce_v1_aeq_int()
3936 set_eq_cons_index_v1(eq, 0); in hns_roce_v1_aeq_int()
3941 static struct hns_roce_ceqe *get_ceqe_v1(struct hns_roce_eq *eq, u32 entry) in get_ceqe_v1() argument
3943 unsigned long off = (entry & (eq->entries - 1)) * HNS_ROCE_CEQE_SIZE; in get_ceqe_v1()
3946 (eq->buf_list[off / HNS_ROCE_BA_SIZE].buf) + in get_ceqe_v1()
3950 static struct hns_roce_ceqe *next_ceqe_sw_v1(struct hns_roce_eq *eq) in next_ceqe_sw_v1() argument
3952 struct hns_roce_ceqe *ceqe = get_ceqe_v1(eq, eq->cons_index); in next_ceqe_sw_v1()
3956 (!!(eq->cons_index & eq->entries)) ? ceqe : NULL; in next_ceqe_sw_v1()
3960 struct hns_roce_eq *eq) in hns_roce_v1_ceq_int() argument
3966 while ((ceqe = next_ceqe_sw_v1(eq))) { in hns_roce_v1_ceq_int()
3977 ++eq->cons_index; in hns_roce_v1_ceq_int()
3980 if (eq->cons_index > in hns_roce_v1_ceq_int()
3982 eq->cons_index = 0; in hns_roce_v1_ceq_int()
3985 set_eq_cons_index_v1(eq, 0); in hns_roce_v1_ceq_int()
3992 struct hns_roce_eq *eq = eq_ptr; in hns_roce_v1_msix_interrupt_eq() local
3993 struct hns_roce_dev *hr_dev = eq->hr_dev; in hns_roce_v1_msix_interrupt_eq()
3996 if (eq->type_flag == HNS_ROCE_CEQ) in hns_roce_v1_msix_interrupt_eq()
3998 int_work = hns_roce_v1_ceq_int(hr_dev, eq); in hns_roce_v1_msix_interrupt_eq()
4001 int_work = hns_roce_v1_aeq_int(hr_dev, eq); in hns_roce_v1_msix_interrupt_eq()
4145 struct hns_roce_eq *eq) in hns_roce_v1_free_eq() argument
4147 int npages = (PAGE_ALIGN(eq->eqe_size * eq->entries) + in hns_roce_v1_free_eq()
4151 if (!eq->buf_list) in hns_roce_v1_free_eq()
4156 eq->buf_list[i].buf, eq->buf_list[i].map); in hns_roce_v1_free_eq()
4158 kfree(eq->buf_list); in hns_roce_v1_free_eq()
4187 struct hns_roce_eq *eq) in hns_roce_v1_create_eq() argument
4189 void __iomem *eqc = hr_dev->eq_table.eqc_base[eq->eqn]; in hns_roce_v1_create_eq()
4202 num_bas = (PAGE_ALIGN(eq->entries * eq->eqe_size) + in hns_roce_v1_create_eq()
4205 if ((eq->entries * eq->eqe_size) > HNS_ROCE_BA_SIZE) { in hns_roce_v1_create_eq()
4207 (eq->entries * eq->eqe_size), HNS_ROCE_BA_SIZE, in hns_roce_v1_create_eq()
4212 eq->buf_list = kcalloc(num_bas, sizeof(*eq->buf_list), GFP_KERNEL); in hns_roce_v1_create_eq()
4213 if (!eq->buf_list) in hns_roce_v1_create_eq()
4217 eq->buf_list[i].buf = dma_alloc_coherent(dev, HNS_ROCE_BA_SIZE, in hns_roce_v1_create_eq()
4220 if (!eq->buf_list[i].buf) { in hns_roce_v1_create_eq()
4225 eq->buf_list[i].map = tmp_dma_addr; in hns_roce_v1_create_eq()
4227 eq->cons_index = 0; in hns_roce_v1_create_eq()
4233 eq->log_entries); in hns_roce_v1_create_eq()
4238 writel((u32)(eq->buf_list[0].map >> 12), eqc + 4); in hns_roce_v1_create_eq()
4248 eq->buf_list[0].map >> 44); in hns_roce_v1_create_eq()
4264 dma_free_coherent(dev, HNS_ROCE_BA_SIZE, eq->buf_list[i].buf, in hns_roce_v1_create_eq()
4265 eq->buf_list[i].map); in hns_roce_v1_create_eq()
4267 kfree(eq->buf_list); in hns_roce_v1_create_eq()
4275 struct hns_roce_eq *eq; in hns_roce_v1_init_eq_table() local
4284 eq_table->eq = kcalloc(eq_num, sizeof(*eq_table->eq), GFP_KERNEL); in hns_roce_v1_init_eq_table()
4285 if (!eq_table->eq) in hns_roce_v1_init_eq_table()
4296 eq = &eq_table->eq[i]; in hns_roce_v1_init_eq_table()
4297 eq->hr_dev = hr_dev; in hns_roce_v1_init_eq_table()
4298 eq->eqn = i; in hns_roce_v1_init_eq_table()
4299 eq->irq = hr_dev->irq[i]; in hns_roce_v1_init_eq_table()
4300 eq->log_page_size = PAGE_SHIFT; in hns_roce_v1_init_eq_table()
4307 eq->type_flag = HNS_ROCE_CEQ; in hns_roce_v1_init_eq_table()
4308 eq->db_reg = hr_dev->reg_base + in hns_roce_v1_init_eq_table()
4311 eq->entries = hr_dev->caps.ceqe_depth; in hns_roce_v1_init_eq_table()
4312 eq->log_entries = ilog2(eq->entries); in hns_roce_v1_init_eq_table()
4313 eq->eqe_size = HNS_ROCE_CEQE_SIZE; in hns_roce_v1_init_eq_table()
4318 eq->type_flag = HNS_ROCE_AEQ; in hns_roce_v1_init_eq_table()
4319 eq->db_reg = hr_dev->reg_base + in hns_roce_v1_init_eq_table()
4321 eq->entries = hr_dev->caps.aeqe_depth; in hns_roce_v1_init_eq_table()
4322 eq->log_entries = ilog2(eq->entries); in hns_roce_v1_init_eq_table()
4323 eq->eqe_size = HNS_ROCE_AEQE_SIZE; in hns_roce_v1_init_eq_table()
4339 ret = hns_roce_v1_create_eq(hr_dev, &eq_table->eq[i]); in hns_roce_v1_init_eq_table()
4351 &eq_table->eq[j]); in hns_roce_v1_init_eq_table()
4370 free_irq(hr_dev->irq[j], &eq_table->eq[j]); in hns_roce_v1_init_eq_table()
4374 hns_roce_v1_free_eq(hr_dev, &eq_table->eq[i]); in hns_roce_v1_init_eq_table()
4379 kfree(eq_table->eq); in hns_roce_v1_init_eq_table()
4397 free_irq(hr_dev->irq[i], &eq_table->eq[i]); in hns_roce_v1_cleanup_eq_table()
4399 hns_roce_v1_free_eq(hr_dev, &eq_table->eq[i]); in hns_roce_v1_cleanup_eq_table()
4405 kfree(eq_table->eq); in hns_roce_v1_cleanup_eq_table()