Lines Matching refs:bdev_get_queue
1021 static inline struct request_queue *bdev_get_queue(struct block_device *bdev) in bdev_get_queue() function
1291 return &bdev_get_queue(bdev)->limits; in bdev_limits()
1341 return queue_emulates_zone_append(bdev_get_queue(bdev)); in bdev_emulates_zone_append()
1352 return queue_max_segments(bdev_get_queue(bdev)); in bdev_max_segments()
1369 return queue_logical_block_size(bdev_get_queue(bdev)); in bdev_logical_block_size()
1379 return queue_physical_block_size(bdev_get_queue(bdev)); in bdev_physical_block_size()
1389 return queue_io_min(bdev_get_queue(bdev)); in bdev_io_min()
1399 return queue_io_opt(bdev_get_queue(bdev)); in bdev_io_opt()
1411 return queue_zone_write_granularity(bdev_get_queue(bdev)); in bdev_zone_write_granularity()
1446 return blk_queue_nonrot(bdev_get_queue(bdev)); in bdev_nonrot()
1456 struct request_queue *q = bdev_get_queue(bdev); in bdev_stable_writes()
1472 return blk_queue_write_cache(bdev_get_queue(bdev)); in bdev_write_cache()
1487 return blk_queue_is_zoned(bdev_get_queue(bdev)); in bdev_is_zoned()
1497 struct request_queue *q = bdev_get_queue(bdev); in bdev_zone_sectors()
1590 return queue_dma_alignment(bdev_get_queue(bdev)); in bdev_dma_alignment()
1862 return queue_atomic_write_unit_min_bytes(bdev_get_queue(bdev)); in bdev_atomic_write_unit_min_bytes()
1870 return queue_atomic_write_unit_max_bytes(bdev_get_queue(bdev)); in bdev_atomic_write_unit_max_bytes()