Lines Matching refs:txn
216 static void *alloc_dma(struct dmm_txn *txn, size_t sz, dma_addr_t *pa) in alloc_dma() argument
219 struct refill_engine *engine = txn->engine_handle; in alloc_dma()
222 txn->current_pa = round_up(txn->current_pa, 16); in alloc_dma()
223 txn->current_va = (void *)round_up((long)txn->current_va, 16); in alloc_dma()
225 ptr = txn->current_va; in alloc_dma()
226 *pa = txn->current_pa; in alloc_dma()
228 txn->current_pa += sz; in alloc_dma()
229 txn->current_va += sz; in alloc_dma()
231 BUG_ON((txn->current_va - engine->refill_va) > REFILL_BUFFER_SIZE); in alloc_dma()
314 struct dmm_txn *txn = NULL; in dmm_txn_init() local
337 txn = &engine->txn; in dmm_txn_init()
339 txn->engine_handle = engine; in dmm_txn_init()
340 txn->last_pat = NULL; in dmm_txn_init()
341 txn->current_va = engine->refill_va; in dmm_txn_init()
342 txn->current_pa = engine->refill_pa; in dmm_txn_init()
344 return txn; in dmm_txn_init()
351 static void dmm_txn_append(struct dmm_txn *txn, struct pat_area *area, in dmm_txn_append() argument
357 struct refill_engine *engine = txn->engine_handle; in dmm_txn_append()
362 pat = alloc_dma(txn, sizeof(*pat), &pat_pa); in dmm_txn_append()
364 if (txn->last_pat) in dmm_txn_append()
365 txn->last_pat->next_pa = (u32)pat_pa; in dmm_txn_append()
378 data = alloc_dma(txn, 4*i, &data_pa); in dmm_txn_append()
390 txn->last_pat = pat; in dmm_txn_append()
398 static int dmm_txn_commit(struct dmm_txn *txn, bool wait) in dmm_txn_commit() argument
401 struct refill_engine *engine = txn->engine_handle; in dmm_txn_commit()
404 if (!txn->last_pat) { in dmm_txn_commit()
410 txn->last_pat->next_pa = 0; in dmm_txn_commit()
421 readl(&txn->last_pat->next_pa); in dmm_txn_commit()
471 struct dmm_txn *txn; in fill() local
488 txn = dmm_txn_init(omap_dmm, area->tcm); in fill()
489 if (IS_ERR_OR_NULL(txn)) in fill()
498 dmm_txn_append(txn, &p_area, pages, npages, roll); in fill()
503 ret = dmm_txn_commit(txn, wait); in fill()