/usr/include/ntirpc/misc/rbtree_x.h is in libntirpc-dev 1.3.1-1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 | #ifndef _RBTREE_X_H
#define _RBTREE_X_H
#include <misc/portable.h>
#include <inttypes.h>
#include <misc/stdio.h>
#include <misc/rbtree.h>
struct rbtree_x_part {
CACHE_PAD(0);
pthread_rwlock_t lock;
pthread_mutex_t mtx;
void *u1;
void *u2;
struct opr_rbtree t;
struct opr_rbtree_node **cache;
CACHE_PAD(1);
};
struct rbtree_x {
uint32_t npart;
uint32_t flags;
int32_t cachesz;
struct rbtree_x_part *tree;
};
#define RBT_X_FLAG_NONE 0x0000
#define RBT_X_FLAG_ALLOC 0x0001
/* Cache strategies.
*
* In read-through strategy, entries are always inserted in the
* tree, but lookups may be O(1) when an entry is shadowed in cache.
*
* In the write through strategy, t->cache and t->tree partition
* t, and t->cache is always consulted first.
*/
#define RBT_X_FLAG_CACHE_RT 0x0002
#define RBT_X_FLAG_CACHE_WT 0x0004
#define rbtx_idx_of_scalar(xt, k) ((k)%((xt)->npart))
#define rbtx_partition_of_ix(xt, ix) ((xt)->tree+(ix))
#define rbtx_partition_of_scalar(xt, k) \
(rbtx_partition_of_ix((xt), rbtx_idx_of_scalar((xt), (k))))
extern int rbtx_init(struct rbtree_x *xt, opr_rbtree_cmpf_t cmpf,
uint32_t npart, uint32_t flags);
static inline struct opr_rbtree_node *rbtree_x_cached_lookup(
struct rbtree_x *xt,
struct rbtree_x_part *t,
struct opr_rbtree_node *nk, uint64_t hk)
{
struct opr_rbtree_node *nv_cached, *nv = NULL;
uint32_t offset;
if (!t)
t = rbtx_partition_of_scalar(xt, hk);
offset = hk % xt->cachesz;
nv_cached = t->cache[offset];
if (nv_cached) {
if (t->t.cmpf(nv_cached, nk) == 0) {
nv = nv_cached;
goto out;
}
}
nv = opr_rbtree_lookup(&t->t, nk);
if (nv && (xt->flags & RBT_X_FLAG_CACHE_RT))
t->cache[offset] = nv;
__warnx(TIRPC_DEBUG_FLAG_RBTREE,
"rbtree_x_cached_lookup: t %p nk %p nv %p" "(%s hk %" PRIx64
" slot/offset %d)", t, nk, nv, (nv_cached) ? "CACHED" : "", hk,
offset);
out:
return (nv);
}
static inline struct opr_rbtree_node *rbtree_x_cached_insert(
struct rbtree_x *xt,
struct rbtree_x_part *t,
struct opr_rbtree_node *nk, uint64_t hk)
{
struct opr_rbtree_node *v_cached, *nv = NULL;
uint32_t offset;
int cix = rbtx_idx_of_scalar(xt, hk);
struct rbtree_x_part *ct = rbtx_partition_of_ix(xt, cix);
if (!t)
t = ct;
offset = hk % xt->cachesz;
v_cached = t->cache[offset];
__warnx(TIRPC_DEBUG_FLAG_RBTREE,
"rbtree_x_cached_insert: cix %d ct %p t %p inserting %p "
"(%s hk %" PRIx64 " slot/offset %d) flags %d", cix, ct, t, nk,
(v_cached) ? "rbt" : "cache", hk, offset, xt->flags);
if (xt->flags & RBT_X_FLAG_CACHE_WT) {
if (!v_cached) {
nv = t->cache[offset] = nk;
} else {
nv = opr_rbtree_insert(&t->t, nk);
if (!nv)
nv = nk;
}
} else {
/* RBT_X_FLAG_CACHE_RT */
nv = v_cached = t->cache[offset] = nk;
(void)opr_rbtree_insert(&t->t, nk);
}
return (nv);
}
static inline void rbtree_x_cached_remove(struct rbtree_x *xt,
struct rbtree_x_part *t,
struct opr_rbtree_node *nk,
uint64_t hk)
{
struct opr_rbtree_node *v_cached;
uint32_t offset;
int cix = rbtx_idx_of_scalar(xt, hk);
struct rbtree_x_part *ct = rbtx_partition_of_ix(xt, cix);
if (!t)
t = ct;
offset = hk % xt->cachesz;
v_cached = t->cache[offset];
__warnx(TIRPC_DEBUG_FLAG_RBTREE,
"rbtree_x_cached_remove: cix %d ct %p t %p removing %p "
"(%s hk %" PRIx64 " slot/offset %d) flags %d", cix, ct, t, nk,
(v_cached) ? "cache" : "rbt", hk, offset, xt->flags);
if (xt->flags & RBT_X_FLAG_CACHE_WT) {
if (v_cached && (t->t.cmpf(nk, v_cached) == 0))
t->cache[offset] = NULL;
else
return (opr_rbtree_remove(&t->t, nk));
} else {
/* RBT_X_FLAG_CACHE_RT */
if (v_cached && (t->t.cmpf(nk, v_cached) == 0))
t->cache[offset] = NULL;
return (opr_rbtree_remove(&t->t, nk));
}
}
#endif /* _RBTREE_X_H */
|