Lines Matching refs:ppd

101 static void clear_sdma_activelist(struct qib_pportdata *ppd)  in clear_sdma_activelist()  argument
105 list_for_each_entry_safe(txp, txp_next, &ppd->sdma_activelist, list) { in clear_sdma_activelist()
112 unmap_desc(ppd, idx); in clear_sdma_activelist()
113 if (++idx == ppd->sdma_descq_cnt) in clear_sdma_activelist()
124 struct qib_pportdata *ppd = from_tasklet(ppd, t, in sdma_sw_clean_up_task() local
128 spin_lock_irqsave(&ppd->sdma_lock, flags); in sdma_sw_clean_up_task()
140 qib_sdma_make_progress(ppd); in sdma_sw_clean_up_task()
142 clear_sdma_activelist(ppd); in sdma_sw_clean_up_task()
148 ppd->sdma_descq_removed = ppd->sdma_descq_added; in sdma_sw_clean_up_task()
155 ppd->sdma_descq_tail = 0; in sdma_sw_clean_up_task()
156 ppd->sdma_descq_head = 0; in sdma_sw_clean_up_task()
157 ppd->sdma_head_dma[0] = 0; in sdma_sw_clean_up_task()
158 ppd->sdma_generation = 0; in sdma_sw_clean_up_task()
160 __qib_sdma_process_event(ppd, qib_sdma_event_e40_sw_cleaned); in sdma_sw_clean_up_task()
162 spin_unlock_irqrestore(&ppd->sdma_lock, flags); in sdma_sw_clean_up_task()
170 static void sdma_hw_start_up(struct qib_pportdata *ppd) in sdma_hw_start_up() argument
172 struct qib_sdma_state *ss = &ppd->sdma_state; in sdma_hw_start_up()
176 ppd->dd->f_sendctrl(ppd, QIB_SENDCTRL_DISARM_BUF(bufno)); in sdma_hw_start_up()
178 ppd->dd->f_sdma_hw_start_up(ppd); in sdma_hw_start_up()
181 static void sdma_sw_tear_down(struct qib_pportdata *ppd) in sdma_sw_tear_down() argument
183 struct qib_sdma_state *ss = &ppd->sdma_state; in sdma_sw_tear_down()
189 static void sdma_start_sw_clean_up(struct qib_pportdata *ppd) in sdma_start_sw_clean_up() argument
191 tasklet_hi_schedule(&ppd->sdma_sw_clean_up_task); in sdma_start_sw_clean_up()
194 static void sdma_set_state(struct qib_pportdata *ppd, in sdma_set_state() argument
197 struct qib_sdma_state *ss = &ppd->sdma_state; in sdma_set_state()
227 ppd->dd->f_sdma_sendctrl(ppd, ss->current_op); in sdma_set_state()
230 static void unmap_desc(struct qib_pportdata *ppd, unsigned head) in unmap_desc() argument
232 __le64 *descqp = &ppd->sdma_descq[head].qw[0]; in unmap_desc()
242 dma_unmap_single(&ppd->dd->pcidev->dev, addr, len, DMA_TO_DEVICE); in unmap_desc()
245 static int alloc_sdma(struct qib_pportdata *ppd) in alloc_sdma() argument
247 ppd->sdma_descq_cnt = sdma_descq_cnt; in alloc_sdma()
248 if (!ppd->sdma_descq_cnt) in alloc_sdma()
249 ppd->sdma_descq_cnt = 256; in alloc_sdma()
252 ppd->sdma_descq = dma_alloc_coherent(&ppd->dd->pcidev->dev, in alloc_sdma()
253 ppd->sdma_descq_cnt * sizeof(u64[2]), &ppd->sdma_descq_phys, in alloc_sdma()
256 if (!ppd->sdma_descq) { in alloc_sdma()
257 qib_dev_err(ppd->dd, in alloc_sdma()
263 ppd->sdma_head_dma = dma_alloc_coherent(&ppd->dd->pcidev->dev, in alloc_sdma()
264 PAGE_SIZE, &ppd->sdma_head_phys, GFP_KERNEL); in alloc_sdma()
265 if (!ppd->sdma_head_dma) { in alloc_sdma()
266 qib_dev_err(ppd->dd, in alloc_sdma()
270 ppd->sdma_head_dma[0] = 0; in alloc_sdma()
274 dma_free_coherent(&ppd->dd->pcidev->dev, in alloc_sdma()
275 ppd->sdma_descq_cnt * sizeof(u64[2]), (void *)ppd->sdma_descq, in alloc_sdma()
276 ppd->sdma_descq_phys); in alloc_sdma()
277 ppd->sdma_descq = NULL; in alloc_sdma()
278 ppd->sdma_descq_phys = 0; in alloc_sdma()
280 ppd->sdma_descq_cnt = 0; in alloc_sdma()
284 static void free_sdma(struct qib_pportdata *ppd) in free_sdma() argument
286 struct qib_devdata *dd = ppd->dd; in free_sdma()
288 if (ppd->sdma_head_dma) { in free_sdma()
290 (void *)ppd->sdma_head_dma, in free_sdma()
291 ppd->sdma_head_phys); in free_sdma()
292 ppd->sdma_head_dma = NULL; in free_sdma()
293 ppd->sdma_head_phys = 0; in free_sdma()
296 if (ppd->sdma_descq) { in free_sdma()
298 ppd->sdma_descq_cnt * sizeof(u64[2]), in free_sdma()
299 ppd->sdma_descq, ppd->sdma_descq_phys); in free_sdma()
300 ppd->sdma_descq = NULL; in free_sdma()
301 ppd->sdma_descq_phys = 0; in free_sdma()
305 static inline void make_sdma_desc(struct qib_pportdata *ppd, in make_sdma_desc() argument
316 sdmadesc[0] |= (ppd->sdma_generation & 3ULL) << in make_sdma_desc()
325 int qib_sdma_make_progress(struct qib_pportdata *ppd) in qib_sdma_make_progress() argument
329 struct qib_devdata *dd = ppd->dd; in qib_sdma_make_progress()
334 hwhead = dd->f_sdma_gethead(ppd); in qib_sdma_make_progress()
342 if (!list_empty(&ppd->sdma_activelist)) { in qib_sdma_make_progress()
343 lp = ppd->sdma_activelist.next; in qib_sdma_make_progress()
348 while (ppd->sdma_descq_head != hwhead) { in qib_sdma_make_progress()
351 (idx == ppd->sdma_descq_head)) { in qib_sdma_make_progress()
352 unmap_desc(ppd, ppd->sdma_descq_head); in qib_sdma_make_progress()
353 if (++idx == ppd->sdma_descq_cnt) in qib_sdma_make_progress()
358 ppd->sdma_descq_removed++; in qib_sdma_make_progress()
361 if (++ppd->sdma_descq_head == ppd->sdma_descq_cnt) in qib_sdma_make_progress()
362 ppd->sdma_descq_head = 0; in qib_sdma_make_progress()
365 if (txp && txp->next_descq_idx == ppd->sdma_descq_head) { in qib_sdma_make_progress()
371 if (list_empty(&ppd->sdma_activelist)) in qib_sdma_make_progress()
374 lp = ppd->sdma_activelist.next; in qib_sdma_make_progress()
383 qib_verbs_sdma_desc_avail(ppd, qib_sdma_descq_freecnt(ppd)); in qib_sdma_make_progress()
390 void qib_sdma_intr(struct qib_pportdata *ppd) in qib_sdma_intr() argument
394 spin_lock_irqsave(&ppd->sdma_lock, flags); in qib_sdma_intr()
396 __qib_sdma_intr(ppd); in qib_sdma_intr()
398 spin_unlock_irqrestore(&ppd->sdma_lock, flags); in qib_sdma_intr()
401 void __qib_sdma_intr(struct qib_pportdata *ppd) in __qib_sdma_intr() argument
403 if (__qib_sdma_running(ppd)) { in __qib_sdma_intr()
404 qib_sdma_make_progress(ppd); in __qib_sdma_intr()
405 if (!list_empty(&ppd->sdma_userpending)) in __qib_sdma_intr()
406 qib_user_sdma_send_desc(ppd, &ppd->sdma_userpending); in __qib_sdma_intr()
410 int qib_setup_sdma(struct qib_pportdata *ppd) in qib_setup_sdma() argument
412 struct qib_devdata *dd = ppd->dd; in qib_setup_sdma()
416 ret = alloc_sdma(ppd); in qib_setup_sdma()
421 ppd->dd->f_sdma_init_early(ppd); in qib_setup_sdma()
422 spin_lock_irqsave(&ppd->sdma_lock, flags); in qib_setup_sdma()
423 sdma_set_state(ppd, qib_sdma_state_s00_hw_down); in qib_setup_sdma()
424 spin_unlock_irqrestore(&ppd->sdma_lock, flags); in qib_setup_sdma()
427 kref_init(&ppd->sdma_state.kref); in qib_setup_sdma()
428 init_completion(&ppd->sdma_state.comp); in qib_setup_sdma()
430 ppd->sdma_generation = 0; in qib_setup_sdma()
431 ppd->sdma_descq_head = 0; in qib_setup_sdma()
432 ppd->sdma_descq_removed = 0; in qib_setup_sdma()
433 ppd->sdma_descq_added = 0; in qib_setup_sdma()
435 ppd->sdma_intrequest = 0; in qib_setup_sdma()
436 INIT_LIST_HEAD(&ppd->sdma_userpending); in qib_setup_sdma()
438 INIT_LIST_HEAD(&ppd->sdma_activelist); in qib_setup_sdma()
440 tasklet_setup(&ppd->sdma_sw_clean_up_task, sdma_sw_clean_up_task); in qib_setup_sdma()
442 ret = dd->f_init_sdma_regs(ppd); in qib_setup_sdma()
446 qib_sdma_process_event(ppd, qib_sdma_event_e10_go_hw_start); in qib_setup_sdma()
451 qib_teardown_sdma(ppd); in qib_setup_sdma()
456 void qib_teardown_sdma(struct qib_pportdata *ppd) in qib_teardown_sdma() argument
458 qib_sdma_process_event(ppd, qib_sdma_event_e00_go_hw_down); in qib_teardown_sdma()
465 sdma_finalput(&ppd->sdma_state); in qib_teardown_sdma()
467 free_sdma(ppd); in qib_teardown_sdma()
470 int qib_sdma_running(struct qib_pportdata *ppd) in qib_sdma_running() argument
475 spin_lock_irqsave(&ppd->sdma_lock, flags); in qib_sdma_running()
476 ret = __qib_sdma_running(ppd); in qib_sdma_running()
477 spin_unlock_irqrestore(&ppd->sdma_lock, flags); in qib_sdma_running()
489 static void complete_sdma_err_req(struct qib_pportdata *ppd, in complete_sdma_err_req() argument
498 list_add_tail(&tx->txreq.list, &ppd->sdma_activelist); in complete_sdma_err_req()
499 clear_sdma_activelist(ppd); in complete_sdma_err_req()
511 int qib_sdma_verbs_send(struct qib_pportdata *ppd, in qib_sdma_verbs_send() argument
526 spin_lock_irqsave(&ppd->sdma_lock, flags); in qib_sdma_verbs_send()
529 if (unlikely(!__qib_sdma_running(ppd))) { in qib_sdma_verbs_send()
530 complete_sdma_err_req(ppd, tx); in qib_sdma_verbs_send()
534 if (tx->txreq.sg_count > qib_sdma_descq_freecnt(ppd)) { in qib_sdma_verbs_send()
535 if (qib_sdma_make_progress(ppd)) in qib_sdma_verbs_send()
537 if (ppd->dd->flags & QIB_HAS_SDMA_TIMEOUT) in qib_sdma_verbs_send()
538 ppd->dd->f_sdma_set_desc_cnt(ppd, in qib_sdma_verbs_send()
539 ppd->sdma_descq_cnt / 2); in qib_sdma_verbs_send()
544 make_sdma_desc(ppd, sdmadesc, (u64) tx->txreq.addr, dwoffset, 0); in qib_sdma_verbs_send()
551 tail = ppd->sdma_descq_tail; in qib_sdma_verbs_send()
552 descqp = &ppd->sdma_descq[tail].qw[0]; in qib_sdma_verbs_send()
557 if (++tail == ppd->sdma_descq_cnt) { in qib_sdma_verbs_send()
559 descqp = &ppd->sdma_descq[0].qw[0]; in qib_sdma_verbs_send()
560 ++ppd->sdma_generation; in qib_sdma_verbs_send()
571 addr = dma_map_single(&ppd->dd->pcidev->dev, sge->vaddr, in qib_sdma_verbs_send()
573 if (dma_mapping_error(&ppd->dd->pcidev->dev, addr)) { in qib_sdma_verbs_send()
578 make_sdma_desc(ppd, sdmadesc, (u64) addr, dw, dwoffset); in qib_sdma_verbs_send()
587 if (++tail == ppd->sdma_descq_cnt) { in qib_sdma_verbs_send()
589 descqp = &ppd->sdma_descq[0].qw[0]; in qib_sdma_verbs_send()
590 ++ppd->sdma_generation; in qib_sdma_verbs_send()
598 descqp = &ppd->sdma_descq[ppd->sdma_descq_cnt].qw[0]; in qib_sdma_verbs_send()
608 ppd->dd->f_sdma_update_tail(ppd, tail); in qib_sdma_verbs_send()
609 ppd->sdma_descq_added += tx->txreq.sg_count; in qib_sdma_verbs_send()
610 list_add_tail(&tx->txreq.list, &ppd->sdma_activelist); in qib_sdma_verbs_send()
616 tail = ppd->sdma_descq_cnt - 1; in qib_sdma_verbs_send()
619 if (tail == ppd->sdma_descq_tail) in qib_sdma_verbs_send()
621 unmap_desc(ppd, tail); in qib_sdma_verbs_send()
654 dev = &ppd->dd->verbs_dev; in qib_sdma_verbs_send()
659 ibp = &ppd->ibport_data; in qib_sdma_verbs_send()
673 spin_unlock_irqrestore(&ppd->sdma_lock, flags); in qib_sdma_verbs_send()
680 void dump_sdma_state(struct qib_pportdata *ppd) in dump_sdma_state() argument
690 head = ppd->sdma_descq_head; in dump_sdma_state()
691 tail = ppd->sdma_descq_tail; in dump_sdma_state()
692 cnt = qib_sdma_descq_freecnt(ppd); in dump_sdma_state()
693 descq = ppd->sdma_descq; in dump_sdma_state()
695 qib_dev_porterr(ppd->dd, ppd->port, in dump_sdma_state()
697 qib_dev_porterr(ppd->dd, ppd->port, in dump_sdma_state()
699 qib_dev_porterr(ppd->dd, ppd->port, in dump_sdma_state()
718 qib_dev_porterr(ppd->dd, ppd->port, in dump_sdma_state()
721 if (++head == ppd->sdma_descq_cnt) in dump_sdma_state()
726 list_for_each_entry_safe(txp, txpnext, &ppd->sdma_activelist, in dump_sdma_state()
728 qib_dev_porterr(ppd->dd, ppd->port, in dump_sdma_state()
733 void qib_sdma_process_event(struct qib_pportdata *ppd, in qib_sdma_process_event() argument
738 spin_lock_irqsave(&ppd->sdma_lock, flags); in qib_sdma_process_event()
740 __qib_sdma_process_event(ppd, event); in qib_sdma_process_event()
742 if (ppd->sdma_state.current_state == qib_sdma_state_s99_running) in qib_sdma_process_event()
743 qib_verbs_sdma_desc_avail(ppd, qib_sdma_descq_freecnt(ppd)); in qib_sdma_process_event()
745 spin_unlock_irqrestore(&ppd->sdma_lock, flags); in qib_sdma_process_event()
748 void __qib_sdma_process_event(struct qib_pportdata *ppd, in __qib_sdma_process_event() argument
751 struct qib_sdma_state *ss = &ppd->sdma_state; in __qib_sdma_process_event()
769 sdma_get(&ppd->sdma_state); in __qib_sdma_process_event()
770 sdma_set_state(ppd, in __qib_sdma_process_event()
776 sdma_sw_tear_down(ppd); in __qib_sdma_process_event()
796 sdma_set_state(ppd, qib_sdma_state_s00_hw_down); in __qib_sdma_process_event()
797 sdma_sw_tear_down(ppd); in __qib_sdma_process_event()
802 sdma_set_state(ppd, ss->go_s99_running ? in __qib_sdma_process_event()
830 sdma_set_state(ppd, qib_sdma_state_s00_hw_down); in __qib_sdma_process_event()
831 sdma_sw_tear_down(ppd); in __qib_sdma_process_event()
838 sdma_set_state(ppd, qib_sdma_state_s99_running); in __qib_sdma_process_event()
861 sdma_set_state(ppd, qib_sdma_state_s00_hw_down); in __qib_sdma_process_event()
871 sdma_set_state(ppd, in __qib_sdma_process_event()
873 sdma_hw_start_up(ppd); in __qib_sdma_process_event()
894 sdma_set_state(ppd, qib_sdma_state_s00_hw_down); in __qib_sdma_process_event()
895 sdma_start_sw_clean_up(ppd); in __qib_sdma_process_event()
907 sdma_set_state(ppd, in __qib_sdma_process_event()
909 sdma_start_sw_clean_up(ppd); in __qib_sdma_process_event()
928 sdma_set_state(ppd, qib_sdma_state_s00_hw_down); in __qib_sdma_process_event()
929 sdma_start_sw_clean_up(ppd); in __qib_sdma_process_event()
943 sdma_set_state(ppd, in __qib_sdma_process_event()
945 ppd->dd->f_sdma_hw_clean_up(ppd); in __qib_sdma_process_event()
962 sdma_set_state(ppd, qib_sdma_state_s00_hw_down); in __qib_sdma_process_event()
963 sdma_start_sw_clean_up(ppd); in __qib_sdma_process_event()
976 sdma_set_state(ppd, in __qib_sdma_process_event()
978 sdma_start_sw_clean_up(ppd); in __qib_sdma_process_event()
981 sdma_set_state(ppd, qib_sdma_state_s50_hw_halt_wait); in __qib_sdma_process_event()
985 sdma_set_state(ppd, in __qib_sdma_process_event()
987 sdma_start_sw_clean_up(ppd); in __qib_sdma_process_event()
990 sdma_set_state(ppd, qib_sdma_state_s50_hw_halt_wait); in __qib_sdma_process_event()