/* ... Generic macros ...................................................... */
-#define INLINE_DECLARE(P) STATIC P
+#define INLINE_DECLARE(P) static P
#define INLINE_DEFINE
#ifndef BV_UNIT
# define BV_UNIT unsigned char
#endif
-#define BV_SIZE(I) ((((I) % CHAR_BIT) != 0) + ((I) / CHAR_BIT))
-
#define BITS(T) (CHAR_BIT * sizeof(T))
+#define BV_SIZE(I) (((((I) % BITS(BV_UNIT)) != 0) + ((I) / BITS(BV_UNIT))) * sizeof(BV_UNIT))
+
/* 0 <= I < CHAR_BIT * sizeof(T) */
#define BV_MASK_LOWER(T, I) (~((~((T) 0)) << (I)))
/* 0 < I <= CHAR_BIT * sizeof(T) */
#define BV_MASK_HIGHER(T, I) ((~((T) 0)) << (BITS(T) - (I)))
-#define BV_DO_ALIGNED(T, A) \
- mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
- if (fs + l <= BITS(T)) { \
- /* Branching is apparently useless, \
- * but since we can't portably shift \
- * CHAR_BITS from a char... \
- * Actually, we only copy up to this */ \
- if (fs + l < BITS(T)) \
- mask &= BV_MASK_LOWER(T, fs + l); \
- *t = (*t & ~mask) | (*f & mask); \
- } else { \
- size_t lo, lk; \
- *t = (*t & ~mask) | (*f & mask); \
- ++t; \
- ++f; \
- l -= (BITS(T) - ts); \
- lo = l % BITS(T); \
- lk = l / BITS(T); \
- BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
- if (lo) { \
- mask = BV_MASK_LOWER(T, lo); \
- t[lk] = (t[lk] & ~mask) \
- | (f[lk] & mask); \
- } \
+#define BV_DO_ALIGNED_FORWARD(T, A) \
+ mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
+ if (fs + l <= BITS(T)) { \
+ /* Branching is apparently useless, \
+ * but since we can't portably shift \
+ * CHAR_BITS from a char... \
+ * Actually, we only copy up to this */ \
+ if (fs + l < BITS(T)) \
+ mask &= BV_MASK_LOWER(T, fs + l); \
+ *t = (*t & ~mask) | (*f & mask); \
+ } else { \
+ size_t lo, lk; \
+ *t = (*t & ~mask) | (*f & mask); \
+ ++t; \
+ ++f; \
+ l -= (BITS(T) - ts); \
+ lo = l % BITS(T); \
+ lk = l / BITS(T); \
+ BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
+ t += lk; \
+ f += lk; \
+ if (lo) { \
+ mask = BV_MASK_LOWER(T, lo); \
+ *t = (*t & ~mask) | (*f & mask); \
+ } \
+ }
+
+#define BV_DO_ALIGNED_BACKWARD(T, A) \
+ if (fs + l <= BITS(T)) { \
+ mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
+ /* Branching is apparently useless, \
+ * but since we can't portably shift \
+ * CHAR_BITS from a char... \
+ * Actually, we only copy up to this */ \
+ if (fs + l < BITS(T)) \
+ mask &= BV_MASK_LOWER(T, fs + l); \
+ *t = (*t & ~mask) | (*f & mask); \
+ } else { \
+ size_t lo, lk; \
+ l -= (BITS(T) - ts); \
+ lo = l % BITS(T); \
+ lk = l / BITS(T); \
+ ++t; \
+ ++f; \
+ if (lo) { \
+ mask = BV_MASK_LOWER(T, lo); \
+ t[lk] = (t[lk] & ~mask) | (f[lk] & mask); \
+ } \
+ BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
+ mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
+ t[-1] = (t[-1] & ~mask) | (f[-1] & mask); \
}
#define BV_DO_LEFT_FORWARD(T, A) \
*t = (*t & ~(mask << step)) | ((*f & mask) << step); \
} else { \
l -= (BITS(T) - fs); \
- ins = ((*f & mask) << step) | (*t & BV_MASK_HIGHER(T, ts)); \
+ ins = ((*f & mask) << step); \
+ if (ts) \
+ ins |= (*t & BV_MASK_HIGHER(T, ts)); \
--f; \
offset = l % BITS(T); \
- begin = f - l / BITS(T) - (offset > step); \
- while (f > begin) { \
+ begin = f - l / BITS(T) + (offset <= step); \
+ while (f >= begin) { \
BV_##A##_UNIT_RIGHT_BACKWARD(T, t, f, step); \
--t; --f; \
} \
T ins, mask, *t = (T *) t_;
const T *f = (const T *) f_, *end;
- if (!l)
- return;
-
t += ts / BITS(T);
ts %= BITS(T);
fs %= BITS(T);
if (ts == fs) {
- BV_DO_ALIGNED(T, COPY);
+ BV_DO_ALIGNED_FORWARD(T, COPY);
} else if (ts < fs) {
BV_DO_RIGHT_FORWARD(T, COPY);
} else { /* ts > fs */
T ins, tmp, mask, *bv = (T *) bv_, *t, *f;
const T *begin, *end;
- if (!l)
+ if (ts == fs)
return;
to = ts % BITS(T);
fo = fs % BITS(T);
- if (to == fo) {
+ if (ts < fs) {
t = bv + ts / BITS(T);
ts = to;
f = bv + fs / BITS(T);
fs = fo;
- BV_DO_ALIGNED(T, MOVE);
- } else if (ts < fs) {
- t = bv + ts / BITS(T);
- ts = to;
- f = bv + fs / BITS(T);
- fs = fo;
- if (ts < fs) {
+ if (ts == fs) {
+ BV_DO_ALIGNED_FORWARD(T, MOVE);
+ } else if (ts < fs) {
BV_DO_RIGHT_FORWARD(T, MOVE);
} else { /* ts > fs */
BV_DO_LEFT_FORWARD(T, MOVE);
}
- } else { /* ts > fs */
+ } else if (to == fo) {
+ t = bv + ts / BITS(T);
+ ts = to;
+ f = bv + fs / BITS(T);
+ fs = fo;
+ BV_DO_ALIGNED_BACKWARD(T, MOVE);
+ } else { /* ts > fs */
size_t z;
BV_MOVE_INIT_REVERSE(T, t, ts);
BV_MOVE_INIT_REVERSE(T, f, fs);
#endif /* INLINE_DEFINE */
#undef T
+/* ... Test if zero ........................................................ */
+
+#define T BV_UNIT
+INLINE_DECLARE(int bv_zero(const void *bv_, size_t s, size_t l))
+#ifdef INLINE_DEFINE
+{
+ size_t o;
+ T mask;
+ const T *bv = (const T *) bv_, *end;
+
+ bv += s / BITS(T);
+ o = s % BITS(T);
+
+ mask = BV_MASK_HIGHER(T, BITS(T) - o);
+ if (o + l <= BITS(T)) {
+ if (o + l < BITS(T))
+ mask &= BV_MASK_LOWER(T, o + l);
+ if (*bv & mask)
+ return 0;
+ } else {
+ if (*bv & mask)
+ return 0;
+ ++bv;
+ l -= (BITS(T) - o);
+ end = bv + l / BITS(T);
+ for (; bv < end; ++bv) {
+ if (*bv)
+ return 0;
+ }
+ o = l % BITS(T);
+ if (o) {
+ mask = BV_MASK_LOWER(T, o);
+ if (*bv & mask)
+ return 0;
+ }
+ }
+
+ return 1;
+}
+#endif /* INLINE_DEFINE */
+#undef T
+
/* ... Compare ............................................................. */
#define BV_EQ(T, B1, B2) \
size_t o, k;
T mask, *bv = (T *) bv_;
- if (!l)
- return;
-
if (f)
- f = ~0;
+ f = ~0u;
bv += s / BITS(T);
o = s % BITS(T);