Lines Matching refs:io_end

56 struct ext4_io_end_vec *ext4_alloc_io_end_vec(ext4_io_end_t *io_end)  in ext4_alloc_io_end_vec()  argument
64 list_add_tail(&io_end_vec->list, &io_end->list_vec); in ext4_alloc_io_end_vec()
68 static void ext4_free_io_end_vec(ext4_io_end_t *io_end) in ext4_free_io_end_vec() argument
72 if (list_empty(&io_end->list_vec)) in ext4_free_io_end_vec()
74 list_for_each_entry_safe(io_end_vec, tmp, &io_end->list_vec, list) { in ext4_free_io_end_vec()
80 struct ext4_io_end_vec *ext4_last_io_end_vec(ext4_io_end_t *io_end) in ext4_last_io_end_vec() argument
82 BUG_ON(list_empty(&io_end->list_vec)); in ext4_last_io_end_vec()
83 return list_last_entry(&io_end->list_vec, struct ext4_io_end_vec, list); in ext4_last_io_end_vec()
153 static void ext4_release_io_end(ext4_io_end_t *io_end) in ext4_release_io_end() argument
157 BUG_ON(!list_empty(&io_end->list)); in ext4_release_io_end()
158 BUG_ON(io_end->flag & EXT4_IO_END_UNWRITTEN); in ext4_release_io_end()
159 WARN_ON(io_end->handle); in ext4_release_io_end()
161 for (bio = io_end->bio; bio; bio = next_bio) { in ext4_release_io_end()
166 ext4_free_io_end_vec(io_end); in ext4_release_io_end()
167 kmem_cache_free(io_end_cachep, io_end); in ext4_release_io_end()
178 static int ext4_end_io_end(ext4_io_end_t *io_end) in ext4_end_io_end() argument
180 struct inode *inode = io_end->inode; in ext4_end_io_end()
181 handle_t *handle = io_end->handle; in ext4_end_io_end()
186 io_end, inode->i_ino, io_end->list.next, io_end->list.prev); in ext4_end_io_end()
188 io_end->handle = NULL; /* Following call will use up the handle */ in ext4_end_io_end()
189 ret = ext4_convert_unwritten_io_end_vec(handle, io_end); in ext4_end_io_end()
196 ext4_clear_io_unwritten_flag(io_end); in ext4_end_io_end()
197 ext4_release_io_end(io_end); in ext4_end_io_end()
205 ext4_io_end_t *io_end, *io_end0, *io_end1; in dump_completed_IO() local
211 list_for_each_entry(io_end, head, list) { in dump_completed_IO()
212 cur = &io_end->list; in dump_completed_IO()
219 io_end, inode->i_ino, io_end0, io_end1); in dump_completed_IO()
225 static void ext4_add_complete_io(ext4_io_end_t *io_end) in ext4_add_complete_io() argument
227 struct ext4_inode_info *ei = EXT4_I(io_end->inode); in ext4_add_complete_io()
228 struct ext4_sb_info *sbi = EXT4_SB(io_end->inode->i_sb); in ext4_add_complete_io()
233 WARN_ON(!(io_end->flag & EXT4_IO_END_UNWRITTEN)); in ext4_add_complete_io()
234 WARN_ON(!io_end->handle && sbi->s_journal); in ext4_add_complete_io()
239 list_add_tail(&io_end->list, &ei->i_rsv_conversion_list); in ext4_add_complete_io()
246 ext4_io_end_t *io_end; in ext4_do_flush_completed_IO() local
258 io_end = list_entry(unwritten.next, ext4_io_end_t, list); in ext4_do_flush_completed_IO()
259 BUG_ON(!(io_end->flag & EXT4_IO_END_UNWRITTEN)); in ext4_do_flush_completed_IO()
260 list_del_init(&io_end->list); in ext4_do_flush_completed_IO()
262 err = ext4_end_io_end(io_end); in ext4_do_flush_completed_IO()
281 ext4_io_end_t *io_end = kmem_cache_zalloc(io_end_cachep, flags); in ext4_init_io_end() local
283 if (io_end) { in ext4_init_io_end()
284 io_end->inode = inode; in ext4_init_io_end()
285 INIT_LIST_HEAD(&io_end->list); in ext4_init_io_end()
286 INIT_LIST_HEAD(&io_end->list_vec); in ext4_init_io_end()
287 atomic_set(&io_end->count, 1); in ext4_init_io_end()
289 return io_end; in ext4_init_io_end()
292 void ext4_put_io_end_defer(ext4_io_end_t *io_end) in ext4_put_io_end_defer() argument
294 if (atomic_dec_and_test(&io_end->count)) { in ext4_put_io_end_defer()
295 if (!(io_end->flag & EXT4_IO_END_UNWRITTEN) || in ext4_put_io_end_defer()
296 list_empty(&io_end->list_vec)) { in ext4_put_io_end_defer()
297 ext4_release_io_end(io_end); in ext4_put_io_end_defer()
300 ext4_add_complete_io(io_end); in ext4_put_io_end_defer()
304 int ext4_put_io_end(ext4_io_end_t *io_end) in ext4_put_io_end() argument
308 if (atomic_dec_and_test(&io_end->count)) { in ext4_put_io_end()
309 if (io_end->flag & EXT4_IO_END_UNWRITTEN) { in ext4_put_io_end()
310 err = ext4_convert_unwritten_io_end_vec(io_end->handle, in ext4_put_io_end()
311 io_end); in ext4_put_io_end()
312 io_end->handle = NULL; in ext4_put_io_end()
313 ext4_clear_io_unwritten_flag(io_end); in ext4_put_io_end()
315 ext4_release_io_end(io_end); in ext4_put_io_end()
320 ext4_io_end_t *ext4_get_io_end(ext4_io_end_t *io_end) in ext4_get_io_end() argument
322 atomic_inc(&io_end->count); in ext4_get_io_end()
323 return io_end; in ext4_get_io_end()
329 ext4_io_end_t *io_end = bio->bi_private; in ext4_end_bio() local
333 if (WARN_ONCE(!io_end, "io_end is NULL: %s: sector %Lu len %u err %d\n", in ext4_end_bio()
345 struct inode *inode = io_end->inode; in ext4_end_bio()
356 if (io_end->flag & EXT4_IO_END_UNWRITTEN) { in ext4_end_bio()
362 bio->bi_private = xchg(&io_end->bio, bio); in ext4_end_bio()
363 ext4_put_io_end_defer(io_end); in ext4_end_bio()
369 ext4_put_io_end_defer(io_end); in ext4_end_bio()
382 io->io_bio->bi_write_hint = io->io_end->inode->i_write_hint; in ext4_io_submit()
394 io->io_end = NULL; in ext4_io_submit_init()
411 bio->bi_private = ext4_get_io_end(io->io_end); in io_submit_init_bio()