1ed477c4cSUpinder Malhi /* 2ed477c4cSUpinder Malhi * Copyright (c) 2014, Cisco Systems, Inc. All rights reserved. 3ed477c4cSUpinder Malhi * 43805eadeSJeff Squyres * This software is available to you under a choice of one of two 53805eadeSJeff Squyres * licenses. You may choose to be licensed under the terms of the GNU 63805eadeSJeff Squyres * General Public License (GPL) Version 2, available from the file 73805eadeSJeff Squyres * COPYING in the main directory of this source tree, or the 83805eadeSJeff Squyres * BSD license below: 93805eadeSJeff Squyres * 103805eadeSJeff Squyres * Redistribution and use in source and binary forms, with or 113805eadeSJeff Squyres * without modification, are permitted provided that the following 123805eadeSJeff Squyres * conditions are met: 133805eadeSJeff Squyres * 143805eadeSJeff Squyres * - Redistributions of source code must retain the above 153805eadeSJeff Squyres * copyright notice, this list of conditions and the following 163805eadeSJeff Squyres * disclaimer. 173805eadeSJeff Squyres * 183805eadeSJeff Squyres * - Redistributions in binary form must reproduce the above 193805eadeSJeff Squyres * copyright notice, this list of conditions and the following 203805eadeSJeff Squyres * disclaimer in the documentation and/or other materials 213805eadeSJeff Squyres * provided with the distribution. 22ed477c4cSUpinder Malhi * 23ed477c4cSUpinder Malhi * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 24ed477c4cSUpinder Malhi * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 25ed477c4cSUpinder Malhi * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 26ed477c4cSUpinder Malhi * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 27ed477c4cSUpinder Malhi * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 28ed477c4cSUpinder Malhi * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 29ed477c4cSUpinder Malhi * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 30ed477c4cSUpinder Malhi * SOFTWARE. 31ed477c4cSUpinder Malhi * 32ed477c4cSUpinder Malhi */ 33ed477c4cSUpinder Malhi 34e3cf00d0SUpinder Malhi #include <linux/init.h> 35e3cf00d0SUpinder Malhi #include <linux/list.h> 36e3cf00d0SUpinder Malhi #include <linux/slab.h> 37e3cf00d0SUpinder Malhi #include <linux/list_sort.h> 38e3cf00d0SUpinder Malhi 39e3cf00d0SUpinder Malhi #include <linux/interval_tree_generic.h> 40e3cf00d0SUpinder Malhi #include "usnic_uiom_interval_tree.h" 41e3cf00d0SUpinder Malhi 42e3cf00d0SUpinder Malhi #define START(node) ((node)->start) 43e3cf00d0SUpinder Malhi #define LAST(node) ((node)->last) 44e3cf00d0SUpinder Malhi 45e3cf00d0SUpinder Malhi #define MAKE_NODE(node, start, end, ref_cnt, flags, err, err_out) \ 46e3cf00d0SUpinder Malhi do { \ 47e3cf00d0SUpinder Malhi node = usnic_uiom_interval_node_alloc(start, \ 48e3cf00d0SUpinder Malhi end, ref_cnt, flags); \ 49e3cf00d0SUpinder Malhi if (!node) { \ 50e3cf00d0SUpinder Malhi err = -ENOMEM; \ 51e3cf00d0SUpinder Malhi goto err_out; \ 52e3cf00d0SUpinder Malhi } \ 53e3cf00d0SUpinder Malhi } while (0) 54e3cf00d0SUpinder Malhi 55e3cf00d0SUpinder Malhi #define MARK_FOR_ADD(node, list) (list_add_tail(&node->link, list)) 56e3cf00d0SUpinder Malhi 57e3cf00d0SUpinder Malhi #define MAKE_NODE_AND_APPEND(node, start, end, ref_cnt, flags, err, \ 58e3cf00d0SUpinder Malhi err_out, list) \ 59e3cf00d0SUpinder Malhi do { \ 60e3cf00d0SUpinder Malhi MAKE_NODE(node, start, end, \ 61e3cf00d0SUpinder Malhi ref_cnt, flags, err, \ 62e3cf00d0SUpinder Malhi err_out); \ 63e3cf00d0SUpinder Malhi MARK_FOR_ADD(node, list); \ 64e3cf00d0SUpinder Malhi } while (0) 65e3cf00d0SUpinder Malhi 66e3cf00d0SUpinder Malhi #define FLAGS_EQUAL(flags1, flags2, mask) \ 67e3cf00d0SUpinder Malhi (((flags1) & (mask)) == ((flags2) & (mask))) 68e3cf00d0SUpinder Malhi 69e3cf00d0SUpinder Malhi static struct usnic_uiom_interval_node* 70e3cf00d0SUpinder Malhi usnic_uiom_interval_node_alloc(long int start, long int last, int ref_cnt, 71e3cf00d0SUpinder Malhi int flags) 72e3cf00d0SUpinder Malhi { 73e3cf00d0SUpinder Malhi struct usnic_uiom_interval_node *interval = kzalloc(sizeof(*interval), 74e3cf00d0SUpinder Malhi GFP_ATOMIC); 75e3cf00d0SUpinder Malhi if (!interval) 76e3cf00d0SUpinder Malhi return NULL; 77e3cf00d0SUpinder Malhi 78e3cf00d0SUpinder Malhi interval->start = start; 79e3cf00d0SUpinder Malhi interval->last = last; 80e3cf00d0SUpinder Malhi interval->flags = flags; 81e3cf00d0SUpinder Malhi interval->ref_cnt = ref_cnt; 82e3cf00d0SUpinder Malhi 83e3cf00d0SUpinder Malhi return interval; 84e3cf00d0SUpinder Malhi } 85e3cf00d0SUpinder Malhi 86e3cf00d0SUpinder Malhi static int interval_cmp(void *priv, struct list_head *a, struct list_head *b) 87e3cf00d0SUpinder Malhi { 88e3cf00d0SUpinder Malhi struct usnic_uiom_interval_node *node_a, *node_b; 89e3cf00d0SUpinder Malhi 90e3cf00d0SUpinder Malhi node_a = list_entry(a, struct usnic_uiom_interval_node, link); 91e3cf00d0SUpinder Malhi node_b = list_entry(b, struct usnic_uiom_interval_node, link); 92e3cf00d0SUpinder Malhi 93e3cf00d0SUpinder Malhi /* long to int */ 94e3cf00d0SUpinder Malhi if (node_a->start < node_b->start) 95e3cf00d0SUpinder Malhi return -1; 96e3cf00d0SUpinder Malhi else if (node_a->start > node_b->start) 97e3cf00d0SUpinder Malhi return 1; 98e3cf00d0SUpinder Malhi 99e3cf00d0SUpinder Malhi return 0; 100e3cf00d0SUpinder Malhi } 101e3cf00d0SUpinder Malhi 102e3cf00d0SUpinder Malhi static void 103*f808c13fSDavidlohr Bueso find_intervals_intersection_sorted(struct rb_root_cached *root, 104*f808c13fSDavidlohr Bueso unsigned long start, unsigned long last, 105e3cf00d0SUpinder Malhi struct list_head *list) 106e3cf00d0SUpinder Malhi { 107e3cf00d0SUpinder Malhi struct usnic_uiom_interval_node *node; 108e3cf00d0SUpinder Malhi 109e3cf00d0SUpinder Malhi INIT_LIST_HEAD(list); 110e3cf00d0SUpinder Malhi 111e3cf00d0SUpinder Malhi for (node = usnic_uiom_interval_tree_iter_first(root, start, last); 112e3cf00d0SUpinder Malhi node; 113e3cf00d0SUpinder Malhi node = usnic_uiom_interval_tree_iter_next(node, start, last)) 114e3cf00d0SUpinder Malhi list_add_tail(&node->link, list); 115e3cf00d0SUpinder Malhi 116e3cf00d0SUpinder Malhi list_sort(NULL, list, interval_cmp); 117e3cf00d0SUpinder Malhi } 118e3cf00d0SUpinder Malhi 119e3cf00d0SUpinder Malhi int usnic_uiom_get_intervals_diff(unsigned long start, unsigned long last, 120e3cf00d0SUpinder Malhi int flags, int flag_mask, 121*f808c13fSDavidlohr Bueso struct rb_root_cached *root, 122e3cf00d0SUpinder Malhi struct list_head *diff_set) 123e3cf00d0SUpinder Malhi { 124e3cf00d0SUpinder Malhi struct usnic_uiom_interval_node *interval, *tmp; 125e3cf00d0SUpinder Malhi int err = 0; 126e3cf00d0SUpinder Malhi long int pivot = start; 127e3cf00d0SUpinder Malhi LIST_HEAD(intersection_set); 128e3cf00d0SUpinder Malhi 129e3cf00d0SUpinder Malhi INIT_LIST_HEAD(diff_set); 130e3cf00d0SUpinder Malhi 131e3cf00d0SUpinder Malhi find_intervals_intersection_sorted(root, start, last, 132e3cf00d0SUpinder Malhi &intersection_set); 133e3cf00d0SUpinder Malhi 134e3cf00d0SUpinder Malhi list_for_each_entry(interval, &intersection_set, link) { 135e3cf00d0SUpinder Malhi if (pivot < interval->start) { 136e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, pivot, interval->start - 1, 137e3cf00d0SUpinder Malhi 1, flags, err, err_out, 138e3cf00d0SUpinder Malhi diff_set); 139e3cf00d0SUpinder Malhi pivot = interval->start; 140e3cf00d0SUpinder Malhi } 141e3cf00d0SUpinder Malhi 142e3cf00d0SUpinder Malhi /* 143e3cf00d0SUpinder Malhi * Invariant: Set [start, pivot] is either in diff_set or root, 144e3cf00d0SUpinder Malhi * but not in both. 145e3cf00d0SUpinder Malhi */ 146e3cf00d0SUpinder Malhi 147e3cf00d0SUpinder Malhi if (pivot > interval->last) { 148e3cf00d0SUpinder Malhi continue; 149e3cf00d0SUpinder Malhi } else if (pivot <= interval->last && 150e3cf00d0SUpinder Malhi FLAGS_EQUAL(interval->flags, flags, 151e3cf00d0SUpinder Malhi flag_mask)) { 152e3cf00d0SUpinder Malhi pivot = interval->last + 1; 153e3cf00d0SUpinder Malhi } 154e3cf00d0SUpinder Malhi } 155e3cf00d0SUpinder Malhi 156e3cf00d0SUpinder Malhi if (pivot <= last) 157e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, pivot, last, 1, flags, err, err_out, 158e3cf00d0SUpinder Malhi diff_set); 159e3cf00d0SUpinder Malhi 160e3cf00d0SUpinder Malhi return 0; 161e3cf00d0SUpinder Malhi 162e3cf00d0SUpinder Malhi err_out: 163e3cf00d0SUpinder Malhi list_for_each_entry_safe(interval, tmp, diff_set, link) { 164e3cf00d0SUpinder Malhi list_del(&interval->link); 165e3cf00d0SUpinder Malhi kfree(interval); 166e3cf00d0SUpinder Malhi } 167e3cf00d0SUpinder Malhi 168e3cf00d0SUpinder Malhi return err; 169e3cf00d0SUpinder Malhi } 170e3cf00d0SUpinder Malhi 171e3cf00d0SUpinder Malhi void usnic_uiom_put_interval_set(struct list_head *intervals) 172e3cf00d0SUpinder Malhi { 173e3cf00d0SUpinder Malhi struct usnic_uiom_interval_node *interval, *tmp; 174e3cf00d0SUpinder Malhi list_for_each_entry_safe(interval, tmp, intervals, link) 175e3cf00d0SUpinder Malhi kfree(interval); 176e3cf00d0SUpinder Malhi } 177e3cf00d0SUpinder Malhi 178*f808c13fSDavidlohr Bueso int usnic_uiom_insert_interval(struct rb_root_cached *root, unsigned long start, 179e3cf00d0SUpinder Malhi unsigned long last, int flags) 180e3cf00d0SUpinder Malhi { 181e3cf00d0SUpinder Malhi struct usnic_uiom_interval_node *interval, *tmp; 182e3cf00d0SUpinder Malhi unsigned long istart, ilast; 183e3cf00d0SUpinder Malhi int iref_cnt, iflags; 184e3cf00d0SUpinder Malhi unsigned long lpivot = start; 185e3cf00d0SUpinder Malhi int err = 0; 186e3cf00d0SUpinder Malhi LIST_HEAD(to_add); 187e3cf00d0SUpinder Malhi LIST_HEAD(intersection_set); 188e3cf00d0SUpinder Malhi 189e3cf00d0SUpinder Malhi find_intervals_intersection_sorted(root, start, last, 190e3cf00d0SUpinder Malhi &intersection_set); 191e3cf00d0SUpinder Malhi 192e3cf00d0SUpinder Malhi list_for_each_entry(interval, &intersection_set, link) { 193e3cf00d0SUpinder Malhi /* 194e3cf00d0SUpinder Malhi * Invariant - lpivot is the left edge of next interval to be 195e3cf00d0SUpinder Malhi * inserted 196e3cf00d0SUpinder Malhi */ 197e3cf00d0SUpinder Malhi istart = interval->start; 198e3cf00d0SUpinder Malhi ilast = interval->last; 199e3cf00d0SUpinder Malhi iref_cnt = interval->ref_cnt; 200e3cf00d0SUpinder Malhi iflags = interval->flags; 201e3cf00d0SUpinder Malhi 202e3cf00d0SUpinder Malhi if (istart < lpivot) { 203e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, istart, lpivot - 1, iref_cnt, 204e3cf00d0SUpinder Malhi iflags, err, err_out, &to_add); 205e3cf00d0SUpinder Malhi } else if (istart > lpivot) { 206e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, lpivot, istart - 1, 1, flags, 207e3cf00d0SUpinder Malhi err, err_out, &to_add); 208e3cf00d0SUpinder Malhi lpivot = istart; 209e3cf00d0SUpinder Malhi } else { 210e3cf00d0SUpinder Malhi lpivot = istart; 211e3cf00d0SUpinder Malhi } 212e3cf00d0SUpinder Malhi 213e3cf00d0SUpinder Malhi if (ilast > last) { 214e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, lpivot, last, iref_cnt + 1, 215e3cf00d0SUpinder Malhi iflags | flags, err, err_out, 216e3cf00d0SUpinder Malhi &to_add); 217e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, last + 1, ilast, iref_cnt, 218e3cf00d0SUpinder Malhi iflags, err, err_out, &to_add); 219e3cf00d0SUpinder Malhi } else { 220e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, lpivot, ilast, iref_cnt + 1, 221e3cf00d0SUpinder Malhi iflags | flags, err, err_out, 222e3cf00d0SUpinder Malhi &to_add); 223e3cf00d0SUpinder Malhi } 224e3cf00d0SUpinder Malhi 225e3cf00d0SUpinder Malhi lpivot = ilast + 1; 226e3cf00d0SUpinder Malhi } 227e3cf00d0SUpinder Malhi 228e3cf00d0SUpinder Malhi if (lpivot <= last) 229e3cf00d0SUpinder Malhi MAKE_NODE_AND_APPEND(tmp, lpivot, last, 1, flags, err, err_out, 230e3cf00d0SUpinder Malhi &to_add); 231e3cf00d0SUpinder Malhi 232e3cf00d0SUpinder Malhi list_for_each_entry_safe(interval, tmp, &intersection_set, link) { 233e3cf00d0SUpinder Malhi usnic_uiom_interval_tree_remove(interval, root); 234e3cf00d0SUpinder Malhi kfree(interval); 235e3cf00d0SUpinder Malhi } 236e3cf00d0SUpinder Malhi 237e3cf00d0SUpinder Malhi list_for_each_entry(interval, &to_add, link) 238e3cf00d0SUpinder Malhi usnic_uiom_interval_tree_insert(interval, root); 239e3cf00d0SUpinder Malhi 240e3cf00d0SUpinder Malhi return 0; 241e3cf00d0SUpinder Malhi 242e3cf00d0SUpinder Malhi err_out: 243e3cf00d0SUpinder Malhi list_for_each_entry_safe(interval, tmp, &to_add, link) 244e3cf00d0SUpinder Malhi kfree(interval); 245e3cf00d0SUpinder Malhi 246e3cf00d0SUpinder Malhi return err; 247e3cf00d0SUpinder Malhi } 248e3cf00d0SUpinder Malhi 249*f808c13fSDavidlohr Bueso void usnic_uiom_remove_interval(struct rb_root_cached *root, 250*f808c13fSDavidlohr Bueso unsigned long start, unsigned long last, 251*f808c13fSDavidlohr Bueso struct list_head *removed) 252e3cf00d0SUpinder Malhi { 253e3cf00d0SUpinder Malhi struct usnic_uiom_interval_node *interval; 254e3cf00d0SUpinder Malhi 255e3cf00d0SUpinder Malhi for (interval = usnic_uiom_interval_tree_iter_first(root, start, last); 256e3cf00d0SUpinder Malhi interval; 257e3cf00d0SUpinder Malhi interval = usnic_uiom_interval_tree_iter_next(interval, 258e3cf00d0SUpinder Malhi start, 259e3cf00d0SUpinder Malhi last)) { 260e3cf00d0SUpinder Malhi if (--interval->ref_cnt == 0) 261e3cf00d0SUpinder Malhi list_add_tail(&interval->link, removed); 262e3cf00d0SUpinder Malhi } 263e3cf00d0SUpinder Malhi 264e3cf00d0SUpinder Malhi list_for_each_entry(interval, removed, link) 265e3cf00d0SUpinder Malhi usnic_uiom_interval_tree_remove(interval, root); 266e3cf00d0SUpinder Malhi } 267e3cf00d0SUpinder Malhi 268e3cf00d0SUpinder Malhi INTERVAL_TREE_DEFINE(struct usnic_uiom_interval_node, rb, 269e3cf00d0SUpinder Malhi unsigned long, __subtree_last, 270e3cf00d0SUpinder Malhi START, LAST, , usnic_uiom_interval_tree) 271