Home
last modified time | relevance | path

Searched refs:queue_flags (Results 1 – 21 of 21) sorted by relevance

/OK3568_Linux_fs/kernel/include/linux/
H A Dblkdev.h441 unsigned long queue_flags; member
653 #define blk_queue_stopped(q) test_bit(QUEUE_FLAG_STOPPED, &(q)->queue_flags)
654 #define blk_queue_dying(q) test_bit(QUEUE_FLAG_DYING, &(q)->queue_flags)
655 #define blk_queue_dead(q) test_bit(QUEUE_FLAG_DEAD, &(q)->queue_flags)
656 #define blk_queue_init_done(q) test_bit(QUEUE_FLAG_INIT_DONE, &(q)->queue_flags)
657 #define blk_queue_nomerges(q) test_bit(QUEUE_FLAG_NOMERGES, &(q)->queue_flags)
659 test_bit(QUEUE_FLAG_NOXMERGES, &(q)->queue_flags)
660 #define blk_queue_nonrot(q) test_bit(QUEUE_FLAG_NONROT, &(q)->queue_flags)
662 test_bit(QUEUE_FLAG_STABLE_WRITES, &(q)->queue_flags)
663 #define blk_queue_io_stat(q) test_bit(QUEUE_FLAG_IO_STAT, &(q)->queue_flags)
[all …]
H A Dblk-mq.h541 test_bit(QUEUE_FLAG_FAIL_IO, &q->queue_flags)) in blk_should_fake_timeout()
/OK3568_Linux_fs/kernel/block/
H A Dblk-sysfs.c266 bit = test_bit(QUEUE_FLAG_##flag, &q->queue_flags); \
347 bool set = test_bit(QUEUE_FLAG_SAME_COMP, &q->queue_flags); in queue_rq_affinity_show()
348 bool force = test_bit(QUEUE_FLAG_SAME_FORCE, &q->queue_flags); in queue_rq_affinity_show()
414 return queue_var_show(test_bit(QUEUE_FLAG_POLL, &q->queue_flags), page); in queue_poll_show()
513 if (test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in queue_wc_show()
543 return sprintf(page, "%u\n", test_bit(QUEUE_FLAG_FUA, &q->queue_flags)); in queue_fua_show()
787 if (test_bit(QUEUE_FLAG_POLL_STATS, &q->queue_flags)) in blk_release_queue()
H A Dblk-mq-tag.c30 if (!test_bit(QUEUE_FLAG_HCTX_ACTIVE, &q->queue_flags) && in __blk_mq_tag_busy()
31 !test_and_set_bit(QUEUE_FLAG_HCTX_ACTIVE, &q->queue_flags)) in __blk_mq_tag_busy()
64 &q->queue_flags)) in __blk_mq_tag_idle()
H A Dblk-timeout.c43 int set = test_bit(QUEUE_FLAG_FAIL_IO, &disk->queue->queue_flags); in part_timeout_show()
H A Dblk-core.c88 set_bit(flag, &q->queue_flags); in blk_queue_flag_set()
99 clear_bit(flag, &q->queue_flags); in blk_queue_flag_clear()
113 return test_and_set_bit(flag, &q->queue_flags); in blk_queue_flag_test_and_set()
849 !test_bit(QUEUE_FLAG_WC, &q->queue_flags)) { in submit_bio_checks()
857 if (!test_bit(QUEUE_FLAG_POLL, &q->queue_flags)) in submit_bio_checks()
H A Dblk-mq.h309 if (!test_bit(QUEUE_FLAG_HCTX_ACTIVE, &q->queue_flags)) in hctx_may_queue()
H A Dblk-flush.c388 unsigned long fflags = q->queue_flags; /* may change, cache */ in blk_insert_flush()
H A Dblk-mq.c655 !test_bit(QUEUE_FLAG_SAME_COMP, &rq->q->queue_flags)) in blk_mq_complete_need_ipi()
660 (!test_bit(QUEUE_FLAG_SAME_FORCE, &rq->q->queue_flags) && in blk_mq_complete_need_ipi()
742 if (test_bit(QUEUE_FLAG_STATS, &q->queue_flags)) { in blk_mq_start_request()
3342 q->queue_flags |= QUEUE_FLAG_MQ_DEFAULT; in blk_mq_init_allocated_queue()
3823 if (test_bit(QUEUE_FLAG_POLL_STATS, &q->queue_flags) || in blk_poll_stats_enable()
3836 if (!test_bit(QUEUE_FLAG_POLL_STATS, &q->queue_flags) || in blk_mq_poll_stats_start()
3982 !test_bit(QUEUE_FLAG_POLL, &q->queue_flags)) in blk_poll()
H A Dblk-settings.c793 wbt_set_write_cache(q, test_bit(QUEUE_FLAG_WC, &q->queue_flags)); in blk_queue_write_cache()
H A Dblk-wbt.c841 rwb->wc = test_bit(QUEUE_FLAG_WC, &q->queue_flags); in wbt_init()
H A Dblk-mq-debugfs.c141 blk_flags_show(m, q->queue_flags, blk_queue_flag_name, in queue_state_show()
/OK3568_Linux_fs/kernel/drivers/target/
H A Dtarget_core_iblock.c702 if (test_bit(QUEUE_FLAG_FUA, &q->queue_flags)) { in iblock_execute_rw()
705 else if (!test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in iblock_execute_rw()
858 return test_bit(QUEUE_FLAG_WC, &q->queue_flags); in iblock_get_write_cache()
/OK3568_Linux_fs/kernel/kernel/sched/
H A Dcore.c5481 int queue_flags = DEQUEUE_SAVE | DEQUEUE_MOVE | DEQUEUE_NOCLOCK; in __sched_setscheduler() local
5685 queue_flags &= ~DEQUEUE_MOVE; in __sched_setscheduler()
5691 dequeue_task(rq, p, queue_flags); in __sched_setscheduler()
5710 queue_flags |= ENQUEUE_HEAD; in __sched_setscheduler()
5712 enqueue_task(rq, p, queue_flags); in __sched_setscheduler()
7960 int queued, running, queue_flags = in sched_move_task() local
7972 dequeue_task(rq, tsk, queue_flags); in sched_move_task()
7979 enqueue_task(rq, tsk, queue_flags); in sched_move_task()
/OK3568_Linux_fs/kernel/drivers/block/xen-blkback/
H A Dxenbus.c519 if (q && test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in xen_vbd_create()
/OK3568_Linux_fs/kernel/drivers/md/
H A Draid5-ppl.c1328 if (test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in ppl_init_child_log()
H A Ddm-table.c1874 return q && (q->queue_flags & flush); in device_flush_capable()
H A Draid5-cache.c3099 log->need_cache_flush = test_bit(QUEUE_FLAG_WC, &q->queue_flags) != 0; in r5l_init_log()
/OK3568_Linux_fs/kernel/drivers/block/
H A Dloop.c1219 if (test_bit(QUEUE_FLAG_WC, &lo->lo_queue->queue_flags)) in __loop_clr_fd()
/OK3568_Linux_fs/kernel/fs/btrfs/
H A Ddisk-io.c3732 if (!test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in write_dev_flush()
/OK3568_Linux_fs/kernel/drivers/nvme/host/
H A Dcore.c921 WARN_ON_ONCE(!test_bit(QUEUE_FLAG_POLL, &q->queue_flags)); in nvme_execute_rq_polled()