Lines Matching refs:nq

614 static void put_tag(struct nullb_queue *nq, unsigned int tag)  in put_tag()  argument
616 clear_bit_unlock(tag, nq->tag_map); in put_tag()
618 if (waitqueue_active(&nq->wait)) in put_tag()
619 wake_up(&nq->wait); in put_tag()
622 static unsigned int get_tag(struct nullb_queue *nq) in get_tag() argument
627 tag = find_first_zero_bit(nq->tag_map, nq->queue_depth); in get_tag()
628 if (tag >= nq->queue_depth) in get_tag()
630 } while (test_and_set_bit_lock(tag, nq->tag_map)); in get_tag()
637 put_tag(cmd->nq, cmd->tag); in free_cmd()
642 static struct nullb_cmd *__alloc_cmd(struct nullb_queue *nq) in __alloc_cmd() argument
647 tag = get_tag(nq); in __alloc_cmd()
649 cmd = &nq->cmds[tag]; in __alloc_cmd()
652 cmd->nq = nq; in __alloc_cmd()
653 if (nq->dev->irqmode == NULL_IRQ_TIMER) { in __alloc_cmd()
664 static struct nullb_cmd *alloc_cmd(struct nullb_queue *nq, int can_wait) in alloc_cmd() argument
669 cmd = __alloc_cmd(nq); in alloc_cmd()
674 prepare_to_wait(&nq->wait, &wait, TASK_UNINTERRUPTIBLE); in alloc_cmd()
675 cmd = __alloc_cmd(nq); in alloc_cmd()
682 finish_wait(&nq->wait, &wait); in alloc_cmd()
688 int queue_mode = cmd->nq->dev->queue_mode; in end_cmd()
712 ktime_t kt = cmd->nq->dev->completion_nsec; in null_cmd_end_timer()
1149 struct nullb *nullb = cmd->nq->dev->nullb; in null_handle_rq()
1183 struct nullb *nullb = cmd->nq->dev->nullb; in null_handle_bio()
1232 struct nullb_device *dev = cmd->nq->dev; in null_handle_throttled()
1255 struct badblocks *bb = &cmd->nq->dev->badblocks; in null_handle_badblocks()
1268 struct nullb_device *dev = cmd->nq->dev; in null_handle_memory_backed()
1281 struct nullb_device *dev = cmd->nq->dev; in nullb_zero_read_cmd_buffer()
1308 switch (cmd->nq->dev->irqmode) { in nullb_complete_cmd()
1310 switch (cmd->nq->dev->queue_mode) { in nullb_complete_cmd()
1336 struct nullb_device *dev = cmd->nq->dev; in null_process_cmd()
1354 struct nullb_device *dev = cmd->nq->dev; in null_handle_cmd()
1425 struct nullb_queue *nq = nullb_to_queue(nullb); in null_submit_bio() local
1428 cmd = alloc_cmd(nq, 1); in null_submit_bio()
1476 struct nullb_queue *nq = hctx->driver_data; in null_queue_rq() local
1482 if (nq->dev->irqmode == NULL_IRQ_TIMER) { in null_queue_rq()
1488 cmd->nq = nq; in null_queue_rq()
1498 nq->requeue_selection++; in null_queue_rq()
1499 if (nq->requeue_selection & 1) in null_queue_rq()
1512 static void cleanup_queue(struct nullb_queue *nq) in cleanup_queue() argument
1514 kfree(nq->tag_map); in cleanup_queue()
1515 kfree(nq->cmds); in cleanup_queue()
1530 struct nullb_queue *nq = hctx->driver_data; in null_exit_hctx() local
1531 struct nullb *nullb = nq->dev->nullb; in null_exit_hctx()
1536 static void null_init_queue(struct nullb *nullb, struct nullb_queue *nq) in null_init_queue() argument
1538 init_waitqueue_head(&nq->wait); in null_init_queue()
1539 nq->queue_depth = nullb->queue_depth; in null_init_queue()
1540 nq->dev = nullb->dev; in null_init_queue()
1547 struct nullb_queue *nq; in null_init_hctx() local
1554 nq = &nullb->queues[hctx_idx]; in null_init_hctx()
1555 hctx->driver_data = nq; in null_init_hctx()
1556 null_init_queue(nullb, nq); in null_init_hctx()
1631 static int setup_commands(struct nullb_queue *nq) in setup_commands() argument
1636 nq->cmds = kcalloc(nq->queue_depth, sizeof(*cmd), GFP_KERNEL); in setup_commands()
1637 if (!nq->cmds) in setup_commands()
1640 tag_size = ALIGN(nq->queue_depth, BITS_PER_LONG) / BITS_PER_LONG; in setup_commands()
1641 nq->tag_map = kcalloc(tag_size, sizeof(unsigned long), GFP_KERNEL); in setup_commands()
1642 if (!nq->tag_map) { in setup_commands()
1643 kfree(nq->cmds); in setup_commands()
1647 for (i = 0; i < nq->queue_depth; i++) { in setup_commands()
1648 cmd = &nq->cmds[i]; in setup_commands()
1669 struct nullb_queue *nq; in init_driver_queues() local
1673 nq = &nullb->queues[i]; in init_driver_queues()
1675 null_init_queue(nullb, nq); in init_driver_queues()
1677 ret = setup_commands(nq); in init_driver_queues()