4 /* === Bit vectors ========================================================= */
6 /* ... Generic macros ...................................................... */
8 #define INLINE_DECLARE(P) STATIC P
12 # define BV_UNIT unsigned char
15 #define BITS(T) (CHAR_BIT * sizeof(T))
17 #define BV_SIZE(I) (((((I) % BITS(BV_UNIT)) != 0) + ((I) / BITS(BV_UNIT))) * sizeof(BV_UNIT))
19 /* 0 <= I < CHAR_BIT * sizeof(T) */
20 #define BV_MASK_LOWER(T, I) (~((~((T) 0)) << (I)))
21 /* 0 < I <= CHAR_BIT * sizeof(T) */
22 #define BV_MASK_HIGHER(T, I) ((~((T) 0)) << (BITS(T) - (I)))
24 #define BV_DO_ALIGNED_FORWARD(T, A) \
25 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
26 if (fs + l <= BITS(T)) { \
27 /* Branching is apparently useless, \
28 * but since we can't portably shift \
29 * CHAR_BITS from a char... \
30 * Actually, we only copy up to this */ \
31 if (fs + l < BITS(T)) \
32 mask &= BV_MASK_LOWER(T, fs + l); \
33 *t = (*t & ~mask) | (*f & mask); \
36 *t = (*t & ~mask) | (*f & mask); \
39 l -= (BITS(T) - ts); \
42 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
46 mask = BV_MASK_LOWER(T, lo); \
47 *t = (*t & ~mask) | (*f & mask); \
51 #define BV_DO_ALIGNED_BACKWARD(T, A) \
52 if (fs + l <= BITS(T)) { \
53 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
54 /* Branching is apparently useless, \
55 * but since we can't portably shift \
56 * CHAR_BITS from a char... \
57 * Actually, we only copy up to this */ \
58 if (fs + l < BITS(T)) \
59 mask &= BV_MASK_LOWER(T, fs + l); \
60 *t = (*t & ~mask) | (*f & mask); \
63 l -= (BITS(T) - ts); \
69 mask = BV_MASK_LOWER(T, lo); \
70 t[lk] = (t[lk] & ~mask) | (f[lk] & mask); \
72 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
73 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
74 t[-1] = (t[-1] & ~mask) | (f[-1] & mask); \
77 #define BV_DO_LEFT_FORWARD(T, A) \
79 mask = BV_MASK_HIGHER(T, BITS(T) - ts); \
80 if (ts + l <= BITS(T)) { \
81 if (ts + l < BITS(T)) \
82 mask &= BV_MASK_LOWER(T, ts + l); \
83 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
85 size_t pets = BITS(T) - step; \
86 l -= (BITS(T) - ts); \
87 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
90 mask = BV_MASK_LOWER(T, l); \
91 *t = (*t & ~mask) | ((*f & (mask << pets)) >> pets); \
93 ins = (*f & BV_MASK_HIGHER(T, step)) >> pets; \
95 offset = l % BITS(T); \
96 end = t + l / BITS(T); \
98 BV_##A##_UNIT_LEFT_FORWARD(T, t, f, step); \
101 if (offset > step) { \
102 mask = BV_MASK_LOWER(T, offset - step); \
103 *t = (*t & (~mask << step)) \
104 | ((*f & mask) << step) \
106 } else if (offset) { \
107 mask = BV_MASK_LOWER(T, offset); \
108 *t = (*t & ~mask) | (ins & mask); \
113 #define BV_DO_RIGHT_FORWARD(T, A) \
115 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
116 if (fs + l <= BITS(T)) { \
117 if (fs + l < BITS(T)) \
118 mask &= BV_MASK_LOWER(T, fs + l); \
119 *t = (*t & ~(mask >> step)) | ((*f & mask) >> step); \
121 l -= (BITS(T) - fs); \
122 ins = ((*f & mask) >> step) | (*t & BV_MASK_LOWER(T, ts)); \
124 offset = l % BITS(T); \
125 end = f + l / BITS(T) + (offset > step); \
127 BV_##A##_UNIT_RIGHT_FORWARD(T, t, f, step); \
132 if (offset > step) { \
133 mask = BV_MASK_LOWER(T, offset - step); \
134 *t = (*t & ~mask) | (ins & mask); \
136 mask = BV_MASK_LOWER(T, offset); \
137 *t = (*t & (~mask << (BITS(T) - step))) \
138 | ((*f & mask) << (BITS(T) - step)) \
143 #define BV_DO_LEFT_BACKWARD(T, A) \
145 mask = BV_MASK_LOWER(T, BITS(T) - ts); \
146 if (ts + l <= BITS(T)) { \
147 if (ts + l < BITS(T)) \
148 mask &= BV_MASK_HIGHER(T, ts + l); \
149 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
151 size_t pets = BITS(T) - step; \
152 l -= (BITS(T) - ts); \
153 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
156 mask = BV_MASK_HIGHER(T, l); \
157 *t = (*t & ~mask) | ((*f & (mask >> pets)) << pets); \
159 ins = (*f & BV_MASK_LOWER(T, step)) << pets; \
161 offset = l % BITS(T); \
162 begin = t - l / BITS(T); \
163 while (t > begin) { \
164 BV_##A##_UNIT_LEFT_BACKWARD(T, t, f, step); \
167 if (offset > step) { \
168 mask = BV_MASK_HIGHER(T, offset - step); \
169 *t = (*t & (~mask >> step)) \
170 | ((*f & mask) >> step) \
172 } else if (offset) { \
173 mask = BV_MASK_HIGHER(T, offset); \
174 *t = (*t & ~mask) | (ins & mask); \
179 #define BV_DO_RIGHT_BACKWARD(T, A) \
181 mask = BV_MASK_LOWER(T, BITS(T) - fs); \
182 if (fs + l <= BITS(T)) { \
183 if (fs + l < BITS(T)) \
184 mask &= BV_MASK_HIGHER(T, fs + l); \
185 *t = (*t & ~(mask << step)) | ((*f & mask) << step); \
187 l -= (BITS(T) - fs); \
188 ins = ((*f & mask) << step); \
190 ins |= (*t & BV_MASK_HIGHER(T, ts)); \
192 offset = l % BITS(T); \
193 begin = f - l / BITS(T) + (offset <= step); \
194 while (f >= begin) { \
195 BV_##A##_UNIT_RIGHT_BACKWARD(T, t, f, step); \
200 if (offset > step) { \
201 mask = BV_MASK_HIGHER(T, offset - step); \
202 *t = (*t & ~mask) | (ins & mask); \
204 mask = BV_MASK_HIGHER(T, offset); \
205 *t = (*t & (~mask >> (BITS(T) - step))) \
206 | ((*f & mask) >> (BITS(T) - step)) \
211 /* ... Copy ................................................................. */
213 #define BV_COPY_UNIT_ALIGNED(X, T, F, L) memcpy((T), (F), (L) * sizeof(X))
215 /* Save the O - B higher bits, shift B bits left, add B bits from f at right */
216 #define BV_COPY_UNIT_LEFT_FORWARD(X, T, F, B) \
217 *(T) = (*(F) << (B)) | ins; \
218 ins = *(F) >> (BITS(X) - (B));
220 /* Save the B lower bits, shift B bits right, add B bits from F at left */
221 #define BV_COPY_UNIT_RIGHT_FORWARD(X, T, F, B) \
222 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
226 INLINE_DECLARE(void bv_copy(void *t_, size_t ts, const void *f_, size_t fs, size_t l))
230 T ins, mask, *t = (T *) t_;
231 const T *f = (const T *) f_, *end;
240 BV_DO_ALIGNED_FORWARD(T, COPY);
241 } else if (ts < fs) {
242 BV_DO_RIGHT_FORWARD(T, COPY);
243 } else { /* ts > fs */
244 BV_DO_LEFT_FORWARD(T, COPY);
249 #endif /* INLINE_DEFINE */
252 /* ... Move ................................................................ */
254 #define BV_MOVE_UNIT_ALIGNED(X, T, F, L) memmove((T), (F), (L) * sizeof(X))
256 #define BV_MOVE_UNIT_LEFT_FORWARD(X, T, F, B) \
257 tmp = *(F) >> (BITS(X) - (B)); \
258 *(T) = (*(F) << (B)) | ins; \
261 #define BV_MOVE_UNIT_RIGHT_FORWARD(X, T, F, B) \
263 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
266 #define BV_MOVE_UNIT_LEFT_BACKWARD(X, T, F, B) \
267 tmp = *(F) << (BITS(X) - (B)); \
268 *(T) = (*(F) >> (B)) | ins; \
271 #define BV_MOVE_UNIT_RIGHT_BACKWARD(X, T, F, B) \
273 *(T) = (*(F) >> (BITS(X) - (B))) | ins; \
276 #define BV_MOVE_INIT_REVERSE(T, V, VS) \
278 (VS) = z % BITS(T); \
280 (V) = bv + (z / BITS(T)); \
281 (VS) = BITS(T) - (VS); \
283 /* z >= BITS(T) because l > 0 */ \
284 (V) = bv + (z / BITS(T)) - 1; \
288 INLINE_DECLARE(void bv_move(void *bv_, size_t ts, size_t fs, size_t l))
291 size_t to, fo, offset, step;
292 T ins, tmp, mask, *bv = (T *) bv_, *t, *f;
293 const T *begin, *end;
302 t = bv + ts / BITS(T);
304 f = bv + fs / BITS(T);
307 BV_DO_ALIGNED_FORWARD(T, MOVE);
308 } else if (ts < fs) {
309 BV_DO_RIGHT_FORWARD(T, MOVE);
310 } else { /* ts > fs */
311 BV_DO_LEFT_FORWARD(T, MOVE);
313 } else if (to == fo) {
314 t = bv + ts / BITS(T);
316 f = bv + fs / BITS(T);
318 BV_DO_ALIGNED_BACKWARD(T, MOVE);
319 } else { /* ts > fs */
321 BV_MOVE_INIT_REVERSE(T, t, ts);
322 BV_MOVE_INIT_REVERSE(T, f, fs);
324 BV_DO_RIGHT_BACKWARD(T, MOVE);
325 } else { /* ts > fs */
326 BV_DO_LEFT_BACKWARD(T, MOVE);
332 #endif /* INLINE_DEFINE */
335 /* ... Test if zero ........................................................ */
338 INLINE_DECLARE(int bv_zero(const void *bv_, size_t s, size_t l))
342 T mask, *bv = (T *) bv_, *end;
347 mask = BV_MASK_HIGHER(T, BITS(T) - o);
348 if (o + l <= BITS(T)) {
350 mask &= BV_MASK_LOWER(T, o + l);
358 end = bv + l / BITS(T);
359 for (; bv < end; ++bv) {
365 mask = BV_MASK_LOWER(T, o);
373 #endif /* INLINE_DEFINE */
376 /* ... Compare ............................................................. */
378 #define BV_EQ(T, B1, B2) \
379 if (((T) (B1)) != ((T) (B2))) return 0;
381 #define BV_EQ_MASK(T, B1, B2, M) BV_EQ(T, (B1) & (M), (B2) & (M))
383 #define BV_EQ_LEFT(T, B1, B2, L, B) \
384 offset = (L) % BITS(T); \
385 end = (B1) + (L) / BITS(T); \
386 while ((B1) < end) { \
387 BV_EQ(T, *(B1), (*(B2) << (B)) | ins); \
388 ins = *(B2) >> (BITS(T) - (B)); \
391 if (offset > (B)) { \
392 mask = BV_MASK_LOWER(T, offset - (B)); \
393 BV_EQ(T, *(B1) & ~(~mask << (B)), \
394 ((*(B2) & mask) << (B)) | ins); \
395 } else if (offset) { \
396 mask = BV_MASK_LOWER(T, offset); \
397 BV_EQ_MASK(T, *(B1), ins, mask); \
400 #define BV_EQ_RIGHT(T, B1, B2, L, B) \
401 offset = (L) % BITS(T); \
402 end = (B2) + (L) / BITS(T) + (offset >= (B)); \
403 while ((B2) < end) { \
404 BV_EQ(T, *(B1), (*(B2) << (BITS(T) - (B))) | ins); \
405 ins = *(B2) >> (B); \
410 if (offset >= (B)) { \
411 mask = BV_MASK_LOWER(T, offset - (B)); \
412 BV_EQ_MASK(T, *(B1), ins, mask); \
414 mask = BV_MASK_LOWER(T, offset); \
415 BV_EQ(T, *(B1) & ~(~mask << (BITS(T) - (B))), \
416 ((*(B2) & mask) << (BITS(T) - (B))) | ins); \
420 INLINE_DECLARE(int bv_eq(const void *bv1_, size_t s1, const void *bv2_, size_t s2, size_t l))
425 const T *bv1 = (const T *) bv1_, *bv2 = (const T *) bv2_, *end;
435 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
436 if (s2 + l <= BITS(T)) {
437 if (s2 + l < BITS(T)) {
438 mask &= BV_MASK_LOWER(T, s2 + l);
440 BV_EQ_MASK(T, *bv1, *bv2, mask);
444 BV_EQ_MASK(T, *bv1, *bv2, mask);
450 if ((ret = memcmp(bv1, bv2, lk * sizeof(T))) != 0)
453 mask = BV_MASK_LOWER(T, lo);
454 BV_EQ_MASK(T, *bv1, *bv2, mask);
458 } else if (s1 < s2) {
461 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
462 if (s2 + l <= BITS(T)) {
463 if (s2 + l < BITS(T))
464 mask &= BV_MASK_LOWER(T, s2 + l);
465 BV_EQ(T, *bv1 & (mask >> step), (*bv2 & mask) >> step);
468 ins = ((*bv2 & mask) >> step) | (*bv1 & BV_MASK_LOWER(T, s1));
470 offset = l % BITS(T);
471 end = bv2 + l / BITS(T) + (offset >= step);
473 BV_EQ(T, *bv1, (*bv2 << (BITS(T) - step)) | ins);
479 if (offset >= step) {
480 mask = BV_MASK_LOWER(T, offset - step);
481 BV_EQ_MASK(T, *bv1, ins, mask);
483 mask = BV_MASK_LOWER(T, offset);
484 BV_EQ(T, *bv1 & ~(~mask << (BITS(T) - step)),
485 ((*bv2 & mask) << (BITS(T) - step)) | ins);
487 /* BV_EQ_RIGHT(T, bv1, bv2, l, step); */
490 } else { /* s1 > s2 */
493 mask = BV_MASK_HIGHER(T, BITS(T) - s1);
494 if (s1 + l <= BITS(T)) {
495 if (s1 + l < BITS(T))
496 mask &= BV_MASK_LOWER(T, s1 + l);
497 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
499 size_t pets = BITS(T) - step;
501 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
504 mask = BV_MASK_LOWER(T, l);
505 BV_EQ(T, *bv1 & mask, (*bv2 & (mask << pets)) >> pets);
507 ins = (*bv2 & BV_MASK_HIGHER(T, step)) >> pets;
509 offset = l % BITS(T);
510 end = bv1 + l / BITS(T);
512 BV_EQ(T, *bv1, (*bv2 << step) | ins);
513 ins = *bv2 >> (BITS(T) - step);
517 mask = BV_MASK_LOWER(T, offset - step);
518 BV_EQ(T, *bv1 & ~(~mask << step),
519 ((*bv2 & mask) << step) | ins);
521 mask = BV_MASK_LOWER(T, offset);
522 BV_EQ_MASK(T, *bv1, ins, mask);
524 /* BV_EQ_LEFT(T, bv1, bv2, l, step); */
532 #endif /* INLINE_DEFINE */
535 /* ... Fill ................................................................ */
537 #define T unsigned char
538 INLINE_DECLARE(void bv_fill(void *bv_, size_t s, size_t l, unsigned int f))
542 T mask, *bv = (T *) bv_;
550 mask = BV_MASK_HIGHER(T, BITS(T) - o);
551 if (o + l <= BITS(T)) {
553 mask &= BV_MASK_LOWER(T, o + l);
554 *bv = (*bv & ~mask) | (f & mask);
556 *bv = (*bv & ~mask) | (f & mask);
563 mask = BV_MASK_LOWER(T, o);
565 *bv = (*bv & ~mask) | (f & mask);
571 #endif /* INLINE_DEFINE */
574 /* ========================================================================= */
576 #endif /* BITVECT_H */