Lines Matching refs:nq
672 static void put_tag(struct nullb_queue *nq, unsigned int tag) in put_tag() argument
674 clear_bit_unlock(tag, nq->tag_map); in put_tag()
676 if (waitqueue_active(&nq->wait)) in put_tag()
677 wake_up(&nq->wait); in put_tag()
680 static unsigned int get_tag(struct nullb_queue *nq) in get_tag() argument
685 tag = find_first_zero_bit(nq->tag_map, nq->queue_depth); in get_tag()
686 if (tag >= nq->queue_depth) in get_tag()
688 } while (test_and_set_bit_lock(tag, nq->tag_map)); in get_tag()
695 put_tag(cmd->nq, cmd->tag); in free_cmd()
700 static struct nullb_cmd *__alloc_cmd(struct nullb_queue *nq) in __alloc_cmd() argument
705 tag = get_tag(nq); in __alloc_cmd()
707 cmd = &nq->cmds[tag]; in __alloc_cmd()
710 cmd->nq = nq; in __alloc_cmd()
711 if (nq->dev->irqmode == NULL_IRQ_TIMER) { in __alloc_cmd()
722 static struct nullb_cmd *alloc_cmd(struct nullb_queue *nq, int can_wait) in alloc_cmd() argument
727 cmd = __alloc_cmd(nq); in alloc_cmd()
732 prepare_to_wait(&nq->wait, &wait, TASK_UNINTERRUPTIBLE); in alloc_cmd()
733 cmd = __alloc_cmd(nq); in alloc_cmd()
740 finish_wait(&nq->wait, &wait); in alloc_cmd()
746 int queue_mode = cmd->nq->dev->queue_mode; in end_cmd()
770 ktime_t kt = cmd->nq->dev->completion_nsec; in null_cmd_end_timer()
1212 struct nullb *nullb = cmd->nq->dev->nullb; in null_handle_rq()
1239 struct nullb *nullb = cmd->nq->dev->nullb; in null_handle_bio()
1280 struct nullb_device *dev = cmd->nq->dev; in null_handle_throttled()
1303 struct badblocks *bb = &cmd->nq->dev->badblocks; in null_handle_badblocks()
1318 struct nullb_device *dev = cmd->nq->dev; in null_handle_memory_backed()
1334 struct nullb_device *dev = cmd->nq->dev; in nullb_zero_read_cmd_buffer()
1361 switch (cmd->nq->dev->irqmode) { in nullb_complete_cmd()
1363 switch (cmd->nq->dev->queue_mode) { in nullb_complete_cmd()
1389 struct nullb_device *dev = cmd->nq->dev; in null_process_cmd()
1407 struct nullb_device *dev = cmd->nq->dev; in null_handle_cmd()
1478 struct nullb_queue *nq = nullb_to_queue(nullb); in null_submit_bio() local
1481 cmd = alloc_cmd(nq, 1); in null_submit_bio()
1560 struct nullb_queue *nq = hctx->driver_data; in null_poll() local
1564 spin_lock(&nq->poll_lock); in null_poll()
1565 list_splice_init(&nq->poll_list, &list); in null_poll()
1566 spin_unlock(&nq->poll_lock); in null_poll()
1592 struct nullb_queue *nq = hctx->driver_data; in null_timeout_rq() local
1594 spin_lock(&nq->poll_lock); in null_timeout_rq()
1596 spin_unlock(&nq->poll_lock); in null_timeout_rq()
1616 struct nullb_queue *nq = hctx->driver_data; in null_queue_rq() local
1623 if (!is_poll && nq->dev->irqmode == NULL_IRQ_TIMER) { in null_queue_rq()
1629 cmd->nq = nq; in null_queue_rq()
1639 nq->requeue_selection++; in null_queue_rq()
1640 if (nq->requeue_selection & 1) in null_queue_rq()
1649 spin_lock(&nq->poll_lock); in null_queue_rq()
1650 list_add_tail(&bd->rq->queuelist, &nq->poll_list); in null_queue_rq()
1651 spin_unlock(&nq->poll_lock); in null_queue_rq()
1660 static void cleanup_queue(struct nullb_queue *nq) in cleanup_queue() argument
1662 kfree(nq->tag_map); in cleanup_queue()
1663 kfree(nq->cmds); in cleanup_queue()
1678 struct nullb_queue *nq = hctx->driver_data; in null_exit_hctx() local
1679 struct nullb *nullb = nq->dev->nullb; in null_exit_hctx()
1684 static void null_init_queue(struct nullb *nullb, struct nullb_queue *nq) in null_init_queue() argument
1686 init_waitqueue_head(&nq->wait); in null_init_queue()
1687 nq->queue_depth = nullb->queue_depth; in null_init_queue()
1688 nq->dev = nullb->dev; in null_init_queue()
1689 INIT_LIST_HEAD(&nq->poll_list); in null_init_queue()
1690 spin_lock_init(&nq->poll_lock); in null_init_queue()
1697 struct nullb_queue *nq; in null_init_hctx() local
1704 nq = &nullb->queues[hctx_idx]; in null_init_hctx()
1705 hctx->driver_data = nq; in null_init_hctx()
1706 null_init_queue(nullb, nq); in null_init_hctx()
1788 static int setup_commands(struct nullb_queue *nq) in setup_commands() argument
1793 nq->cmds = kcalloc(nq->queue_depth, sizeof(*cmd), GFP_KERNEL); in setup_commands()
1794 if (!nq->cmds) in setup_commands()
1797 tag_size = ALIGN(nq->queue_depth, BITS_PER_LONG) / BITS_PER_LONG; in setup_commands()
1798 nq->tag_map = kcalloc(tag_size, sizeof(unsigned long), GFP_KERNEL); in setup_commands()
1799 if (!nq->tag_map) { in setup_commands()
1800 kfree(nq->cmds); in setup_commands()
1804 for (i = 0; i < nq->queue_depth; i++) { in setup_commands()
1805 cmd = &nq->cmds[i]; in setup_commands()
1830 struct nullb_queue *nq; in init_driver_queues() local
1834 nq = &nullb->queues[i]; in init_driver_queues()
1836 null_init_queue(nullb, nq); in init_driver_queues()
1838 ret = setup_commands(nq); in init_driver_queues()