Lines Matching full:upper
3041 INIT_LIST_HEAD(&node->upper); in btrfs_backref_alloc_node()
3111 ASSERT(list_empty(&node->upper)); in btrfs_backref_drop_node()
3123 * upper edges and any uncached nodes in the path.
3136 while (!list_empty(&node->upper)) { in btrfs_backref_cleanup_node()
3137 edge = list_entry(node->upper.next, struct btrfs_backref_edge, in btrfs_backref_cleanup_node()
3140 list_del(&edge->list[UPPER]); in btrfs_backref_cleanup_node()
3166 struct btrfs_backref_node *upper, in btrfs_backref_link_edge() argument
3169 ASSERT(upper && lower && upper->level == lower->level + 1); in btrfs_backref_link_edge()
3171 edge->node[UPPER] = upper; in btrfs_backref_link_edge()
3173 list_add_tail(&edge->list[LOWER], &lower->upper); in btrfs_backref_link_edge()
3175 list_add_tail(&edge->list[UPPER], &upper->lower); in btrfs_backref_link_edge()
3194 struct btrfs_backref_node *upper; in handle_direct_tree_backref() local
3227 upper = btrfs_backref_alloc_node(cache, ref_key->offset, in handle_direct_tree_backref()
3229 if (!upper) { in handle_direct_tree_backref()
3235 * Backrefs for the upper level block isn't cached, add the in handle_direct_tree_backref()
3238 list_add_tail(&edge->list[UPPER], &cache->pending_edge); in handle_direct_tree_backref()
3241 upper = rb_entry(rb_node, struct btrfs_backref_node, rb_node); in handle_direct_tree_backref()
3242 ASSERT(upper->checked); in handle_direct_tree_backref()
3243 INIT_LIST_HEAD(&edge->list[UPPER]); in handle_direct_tree_backref()
3245 btrfs_backref_link_edge(edge, cur, upper, LINK_LOWER); in handle_direct_tree_backref()
3270 struct btrfs_backref_node *upper; in handle_indirect_tree_backref() local
3365 upper = btrfs_backref_alloc_node(cache, eb->start, in handle_indirect_tree_backref()
3367 if (!upper) { in handle_indirect_tree_backref()
3373 upper->owner = btrfs_header_owner(eb); in handle_indirect_tree_backref()
3379 btrfs_backref_free_node(cache, upper); in handle_indirect_tree_backref()
3389 upper->checked = 0; in handle_indirect_tree_backref()
3391 upper->checked = 1; in handle_indirect_tree_backref()
3398 if (!upper->checked && need_check) { in handle_indirect_tree_backref()
3400 list_add_tail(&edge->list[UPPER], in handle_indirect_tree_backref()
3403 if (upper->checked) in handle_indirect_tree_backref()
3405 INIT_LIST_HEAD(&edge->list[UPPER]); in handle_indirect_tree_backref()
3408 upper = rb_entry(rb_node, struct btrfs_backref_node, in handle_indirect_tree_backref()
3410 ASSERT(upper->checked); in handle_indirect_tree_backref()
3411 INIT_LIST_HEAD(&edge->list[UPPER]); in handle_indirect_tree_backref()
3412 if (!upper->owner) in handle_indirect_tree_backref()
3413 upper->owner = btrfs_header_owner(eb); in handle_indirect_tree_backref()
3415 btrfs_backref_link_edge(edge, lower, upper, LINK_LOWER); in handle_indirect_tree_backref()
3421 lower = upper; in handle_indirect_tree_backref()
3422 upper = NULL; in handle_indirect_tree_backref()
3432 * NOTE: Even if the function returned 0, @cur is not yet cached as its upper
3470 if (!list_empty(&cur->upper)) { in btrfs_backref_add_tree_node()
3475 ASSERT(list_is_singular(&cur->upper)); in btrfs_backref_add_tree_node()
3476 edge = list_entry(cur->upper.next, struct btrfs_backref_edge, in btrfs_backref_add_tree_node()
3478 ASSERT(list_empty(&edge->list[UPPER])); in btrfs_backref_add_tree_node()
3479 exist = edge->node[UPPER]; in btrfs_backref_add_tree_node()
3481 * Add the upper level block to pending list if we need check in btrfs_backref_add_tree_node()
3485 list_add_tail(&edge->list[UPPER], &cache->pending_edge); in btrfs_backref_add_tree_node()
3582 list_for_each_entry(edge, &start->upper, list[LOWER]) in btrfs_backref_finish_upper_links()
3583 list_add_tail(&edge->list[UPPER], &pending_edge); in btrfs_backref_finish_upper_links()
3586 struct btrfs_backref_node *upper; in btrfs_backref_finish_upper_links() local
3590 struct btrfs_backref_edge, list[UPPER]); in btrfs_backref_finish_upper_links()
3591 list_del_init(&edge->list[UPPER]); in btrfs_backref_finish_upper_links()
3592 upper = edge->node[UPPER]; in btrfs_backref_finish_upper_links()
3596 if (upper->detached) { in btrfs_backref_finish_upper_links()
3601 if (list_empty(&lower->upper)) in btrfs_backref_finish_upper_links()
3609 * So if we have upper->rb_node populated, this means a cache in btrfs_backref_finish_upper_links()
3610 * hit. We only need to link the edge, as @upper and all its in btrfs_backref_finish_upper_links()
3613 if (!RB_EMPTY_NODE(&upper->rb_node)) { in btrfs_backref_finish_upper_links()
3614 list_add_tail(&edge->list[UPPER], &upper->lower); in btrfs_backref_finish_upper_links()
3619 if (!upper->checked) { in btrfs_backref_finish_upper_links()
3624 rb_node = rb_simple_insert(&cache->rb_root, upper->bytenr, in btrfs_backref_finish_upper_links()
3625 &upper->rb_node); in btrfs_backref_finish_upper_links()
3627 btrfs_backref_panic(cache->fs_info, upper->bytenr, -EEXIST); in btrfs_backref_finish_upper_links()
3631 list_add_tail(&edge->list[UPPER], &upper->lower); in btrfs_backref_finish_upper_links()
3635 * to finish the upper linkage in btrfs_backref_finish_upper_links()
3637 list_for_each_entry(edge, &upper->upper, list[LOWER]) in btrfs_backref_finish_upper_links()
3638 list_add_tail(&edge->list[UPPER], &pending_edge); in btrfs_backref_finish_upper_links()
3647 struct btrfs_backref_node *upper; in btrfs_backref_error_cleanup() local
3657 struct btrfs_backref_edge, list[UPPER]); in btrfs_backref_error_cleanup()
3658 list_del(&edge->list[UPPER]); in btrfs_backref_error_cleanup()
3661 upper = edge->node[UPPER]; in btrfs_backref_error_cleanup()
3665 * Lower is no longer linked to any upper backref nodes and in btrfs_backref_error_cleanup()
3668 if (list_empty(&lower->upper) && in btrfs_backref_error_cleanup()
3672 if (!RB_EMPTY_NODE(&upper->rb_node)) in btrfs_backref_error_cleanup()
3675 /* Add this guy's upper edges to the list to process */ in btrfs_backref_error_cleanup()
3676 list_for_each_entry(edge, &upper->upper, list[LOWER]) in btrfs_backref_error_cleanup()
3677 list_add_tail(&edge->list[UPPER], in btrfs_backref_error_cleanup()
3679 if (list_empty(&upper->upper)) in btrfs_backref_error_cleanup()
3680 list_add(&upper->list, &cache->useless_node); in btrfs_backref_error_cleanup()