Lines Matching full:upper
2505 INIT_LIST_HEAD(&node->upper);
2528 * upper edges and any uncached nodes in the path.
2536 struct btrfs_backref_node *upper; local
2543 while (!list_empty(&node->upper)) {
2544 edge = list_entry(node->upper.next, struct btrfs_backref_edge,
2546 upper = edge->node[UPPER];
2548 list_del(&edge->list[UPPER]);
2551 if (RB_EMPTY_NODE(&upper->rb_node)) {
2552 BUG_ON(!list_empty(&node->upper));
2554 node = upper;
2562 if (list_empty(&upper->lower)) {
2563 list_add_tail(&upper->lower, &cache->leaves);
2564 upper->lowest = 1;
2621 struct btrfs_backref_node *upper; local
2654 upper = btrfs_backref_alloc_node(cache, ref_key->offset,
2656 if (!upper) {
2662 * Backrefs for the upper level block isn't cached, add the
2665 list_add_tail(&edge->list[UPPER], &cache->pending_edge);
2668 upper = rb_entry(rb_node, struct btrfs_backref_node, rb_node);
2669 ASSERT(upper->checked);
2670 INIT_LIST_HEAD(&edge->list[UPPER]);
2672 btrfs_backref_link_edge(edge, cur, upper, LINK_LOWER);
2695 struct btrfs_backref_node *upper; local
2786 upper = btrfs_backref_alloc_node(cache, eb->start,
2788 if (!upper) {
2794 upper->owner = btrfs_header_owner(eb);
2796 upper->cowonly = 1;
2803 upper->checked = 0;
2805 upper->checked = 1;
2812 if (!upper->checked && need_check) {
2814 list_add_tail(&edge->list[UPPER],
2817 if (upper->checked)
2819 INIT_LIST_HEAD(&edge->list[UPPER]);
2822 upper = rb_entry(rb_node, struct btrfs_backref_node,
2824 ASSERT(upper->checked);
2825 INIT_LIST_HEAD(&edge->list[UPPER]);
2826 if (!upper->owner)
2827 upper->owner = btrfs_header_owner(eb);
2829 btrfs_backref_link_edge(edge, lower, upper, LINK_LOWER);
2835 lower = upper;
2836 upper = NULL;
2846 * NOTE: Even if the function returned 0, @cur is not yet cached as its upper
2883 if (!list_empty(&cur->upper)) {
2888 ASSERT(list_is_singular(&cur->upper));
2889 edge = list_entry(cur->upper.next, struct btrfs_backref_edge,
2891 ASSERT(list_empty(&edge->list[UPPER]));
2892 exist = edge->node[UPPER];
2894 * Add the upper level block to pending list if we need check
2898 list_add_tail(&edge->list[UPPER], &cache->pending_edge);
3005 list_for_each_entry(edge, &start->upper, list[LOWER])
3006 list_add_tail(&edge->list[UPPER], &pending_edge);
3009 struct btrfs_backref_node *upper; local
3013 struct btrfs_backref_edge, list[UPPER]);
3014 list_del_init(&edge->list[UPPER]);
3015 upper = edge->node[UPPER];
3019 if (upper->detached) {
3024 if (list_empty(&lower->upper))
3032 * So if we have upper->rb_node populated, this means a cache
3033 * hit. We only need to link the edge, as @upper and all its
3036 if (!RB_EMPTY_NODE(&upper->rb_node)) {
3037 if (upper->lowest) {
3038 list_del_init(&upper->lower);
3039 upper->lowest = 0;
3042 list_add_tail(&edge->list[UPPER], &upper->lower);
3047 if (!upper->checked) {
3053 if (start->cowonly != upper->cowonly) {
3059 if (!upper->cowonly) {
3060 rb_node = rb_simple_insert(&cache->rb_root, upper->bytenr,
3061 &upper->rb_node);
3064 upper->bytenr, -EEXIST);
3069 list_add_tail(&edge->list[UPPER], &upper->lower);
3073 * to finish the upper linkage
3075 list_for_each_entry(edge, &upper->upper, list[LOWER])
3076 list_add_tail(&edge->list[UPPER], &pending_edge);
3085 struct btrfs_backref_node *upper; local
3095 struct btrfs_backref_edge, list[UPPER]);
3096 list_del(&edge->list[UPPER]);
3099 upper = edge->node[UPPER];
3103 * Lower is no longer linked to any upper backref nodes and
3106 if (list_empty(&lower->upper) &&
3110 if (!RB_EMPTY_NODE(&upper->rb_node))
3113 /* Add this guy's upper edges to the list to process */
3114 list_for_each_entry(edge, &upper->upper, list[LOWER])
3115 list_add_tail(&edge->list[UPPER],
3117 if (list_empty(&upper->upper))
3118 list_add(&upper->list, &cache->useless_node);