4 /* === Bit vectors ========================================================= */
6 /* ... Generic macros ...................................................... */
8 #define INLINE_DECLARE(P) STATIC P
12 # define BV_UNIT unsigned char
15 #define BITS(T) (CHAR_BIT * sizeof(T))
17 #define BV_SIZE(I) (((((I) % BITS(BV_UNIT)) != 0) + ((I) / BITS(BV_UNIT))) * sizeof(BV_UNIT))
19 /* 0 <= I < CHAR_BIT * sizeof(T) */
20 #define BV_MASK_LOWER(T, I) (~((~((T) 0)) << (I)))
21 /* 0 < I <= CHAR_BIT * sizeof(T) */
22 #define BV_MASK_HIGHER(T, I) ((~((T) 0)) << (BITS(T) - (I)))
24 #define BV_DO_ALIGNED_FORWARD(T, A) \
25 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
26 if (fs + l <= BITS(T)) { \
27 /* Branching is apparently useless, \
28 * but since we can't portably shift \
29 * CHAR_BITS from a char... \
30 * Actually, we only copy up to this */ \
31 if (fs + l < BITS(T)) \
32 mask &= BV_MASK_LOWER(T, fs + l); \
33 *t = (*t & ~mask) | (*f & mask); \
36 *t = (*t & ~mask) | (*f & mask); \
39 l -= (BITS(T) - ts); \
42 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
46 mask = BV_MASK_LOWER(T, lo); \
47 *t = (*t & ~mask) | (*f & mask); \
51 #define BV_DO_ALIGNED_BACKWARD(T, A) \
52 if (fs + l <= BITS(T)) { \
53 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
54 /* Branching is apparently useless, \
55 * but since we can't portably shift \
56 * CHAR_BITS from a char... \
57 * Actually, we only copy up to this */ \
58 if (fs + l < BITS(T)) \
59 mask &= BV_MASK_LOWER(T, fs + l); \
60 *t = (*t & ~mask) | (*f & mask); \
63 l -= (BITS(T) - ts); \
69 mask = BV_MASK_LOWER(T, lo); \
70 t[lk] = (t[lk] & ~mask) | (f[lk] & mask); \
72 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
73 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
74 t[-1] = (t[-1] & ~mask) | (f[-1] & mask); \
77 #define BV_DO_LEFT_FORWARD(T, A) \
79 mask = BV_MASK_HIGHER(T, BITS(T) - ts); \
80 if (ts + l <= BITS(T)) { \
81 if (ts + l < BITS(T)) \
82 mask &= BV_MASK_LOWER(T, ts + l); \
83 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
85 size_t pets = BITS(T) - step; \
86 l -= (BITS(T) - ts); \
87 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
90 mask = BV_MASK_LOWER(T, l); \
91 *t = (*t & ~mask) | ((*f & (mask << pets)) >> pets); \
93 ins = (*f & BV_MASK_HIGHER(T, step)) >> pets; \
95 offset = l % BITS(T); \
96 end = t + l / BITS(T); \
98 BV_##A##_UNIT_LEFT_FORWARD(T, t, f, step); \
101 if (offset > step) { \
102 mask = BV_MASK_LOWER(T, offset - step); \
103 *t = (*t & (~mask << step)) \
104 | ((*f & mask) << step) \
106 } else if (offset) { \
107 mask = BV_MASK_LOWER(T, offset); \
108 *t = (*t & ~mask) | (ins & mask); \
113 #define BV_DO_RIGHT_FORWARD(T, A) \
115 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
116 if (fs + l <= BITS(T)) { \
117 if (fs + l < BITS(T)) \
118 mask &= BV_MASK_LOWER(T, fs + l); \
119 *t = (*t & ~(mask >> step)) | ((*f & mask) >> step); \
121 l -= (BITS(T) - fs); \
122 ins = ((*f & mask) >> step) | (*t & BV_MASK_LOWER(T, ts)); \
124 offset = l % BITS(T); \
125 end = f + l / BITS(T) + (offset > step); \
127 BV_##A##_UNIT_RIGHT_FORWARD(T, t, f, step); \
132 if (offset > step) { \
133 mask = BV_MASK_LOWER(T, offset - step); \
134 *t = (*t & ~mask) | (ins & mask); \
136 mask = BV_MASK_LOWER(T, offset); \
137 *t = (*t & (~mask << (BITS(T) - step))) \
138 | ((*f & mask) << (BITS(T) - step)) \
143 #define BV_DO_LEFT_BACKWARD(T, A) \
145 mask = BV_MASK_LOWER(T, BITS(T) - ts); \
146 if (ts + l <= BITS(T)) { \
147 if (ts + l < BITS(T)) \
148 mask &= BV_MASK_HIGHER(T, ts + l); \
149 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
151 size_t pets = BITS(T) - step; \
152 l -= (BITS(T) - ts); \
153 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
156 mask = BV_MASK_HIGHER(T, l); \
157 *t = (*t & ~mask) | ((*f & (mask >> pets)) << pets); \
159 ins = (*f & BV_MASK_LOWER(T, step)) << pets; \
161 offset = l % BITS(T); \
162 begin = t - l / BITS(T); \
163 while (t > begin) { \
164 BV_##A##_UNIT_LEFT_BACKWARD(T, t, f, step); \
167 if (offset > step) { \
168 mask = BV_MASK_HIGHER(T, offset - step); \
169 *t = (*t & (~mask >> step)) \
170 | ((*f & mask) >> step) \
172 } else if (offset) { \
173 mask = BV_MASK_HIGHER(T, offset); \
174 *t = (*t & ~mask) | (ins & mask); \
179 #define BV_DO_RIGHT_BACKWARD(T, A) \
181 mask = BV_MASK_LOWER(T, BITS(T) - fs); \
182 if (fs + l <= BITS(T)) { \
183 if (fs + l < BITS(T)) \
184 mask &= BV_MASK_HIGHER(T, fs + l); \
185 *t = (*t & ~(mask << step)) | ((*f & mask) << step); \
187 l -= (BITS(T) - fs); \
188 ins = ((*f & mask) << step); \
190 ins |= (*t & BV_MASK_HIGHER(T, ts)); \
192 offset = l % BITS(T); \
193 begin = f - l / BITS(T) + (offset <= step); \
194 while (f >= begin) { \
195 BV_##A##_UNIT_RIGHT_BACKWARD(T, t, f, step); \
200 if (offset > step) { \
201 mask = BV_MASK_HIGHER(T, offset - step); \
202 *t = (*t & ~mask) | (ins & mask); \
204 mask = BV_MASK_HIGHER(T, offset); \
205 *t = (*t & (~mask >> (BITS(T) - step))) \
206 | ((*f & mask) >> (BITS(T) - step)) \
211 /* ... Copy ................................................................. */
213 #define BV_COPY_UNIT_ALIGNED(X, T, F, L) memcpy((T), (F), (L) * sizeof(X))
215 /* Save the O - B higher bits, shift B bits left, add B bits from f at right */
216 #define BV_COPY_UNIT_LEFT_FORWARD(X, T, F, B) \
217 *(T) = (*(F) << (B)) | ins; \
218 ins = *(F) >> (BITS(X) - (B));
220 /* Save the B lower bits, shift B bits right, add B bits from F at left */
221 #define BV_COPY_UNIT_RIGHT_FORWARD(X, T, F, B) \
222 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
226 INLINE_DECLARE(void bv_copy(void *t_, size_t ts, const void *f_, size_t fs, size_t l))
230 T ins, mask, *t = (T *) t_;
231 const T *f = (const T *) f_, *end;
240 BV_DO_ALIGNED_FORWARD(T, COPY);
241 } else if (ts < fs) {
242 BV_DO_RIGHT_FORWARD(T, COPY);
243 } else { /* ts > fs */
244 BV_DO_LEFT_FORWARD(T, COPY);
249 #endif /* INLINE_DEFINE */
252 /* ... Move ................................................................ */
254 #define BV_MOVE_UNIT_ALIGNED(X, T, F, L) memmove((T), (F), (L) * sizeof(X))
256 #define BV_MOVE_UNIT_LEFT_FORWARD(X, T, F, B) \
257 tmp = *(F) >> (BITS(X) - (B)); \
258 *(T) = (*(F) << (B)) | ins; \
261 #define BV_MOVE_UNIT_RIGHT_FORWARD(X, T, F, B) \
263 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
266 #define BV_MOVE_UNIT_LEFT_BACKWARD(X, T, F, B) \
267 tmp = *(F) << (BITS(X) - (B)); \
268 *(T) = (*(F) >> (B)) | ins; \
271 #define BV_MOVE_UNIT_RIGHT_BACKWARD(X, T, F, B) \
273 *(T) = (*(F) >> (BITS(X) - (B))) | ins; \
276 #define BV_MOVE_INIT_REVERSE(T, V, VS) \
278 (VS) = z % BITS(T); \
280 (V) = bv + (z / BITS(T)); \
281 (VS) = BITS(T) - (VS); \
283 /* z >= BITS(T) because l > 0 */ \
284 (V) = bv + (z / BITS(T)) - 1; \
288 INLINE_DECLARE(void bv_move(void *bv_, size_t ts, size_t fs, size_t l))
291 size_t to, fo, offset, step;
292 T ins, tmp, mask, *bv = (T *) bv_, *t, *f;
293 const T *begin, *end;
302 t = bv + ts / BITS(T);
304 f = bv + fs / BITS(T);
307 BV_DO_ALIGNED_FORWARD(T, MOVE);
308 } else if (ts < fs) {
309 BV_DO_RIGHT_FORWARD(T, MOVE);
310 } else { /* ts > fs */
311 BV_DO_LEFT_FORWARD(T, MOVE);
313 } else if (to == fo) {
314 t = bv + ts / BITS(T);
316 f = bv + fs / BITS(T);
318 BV_DO_ALIGNED_BACKWARD(T, MOVE);
319 } else { /* ts > fs */
321 BV_MOVE_INIT_REVERSE(T, t, ts);
322 BV_MOVE_INIT_REVERSE(T, f, fs);
324 BV_DO_RIGHT_BACKWARD(T, MOVE);
325 } else { /* ts > fs */
326 BV_DO_LEFT_BACKWARD(T, MOVE);
332 #endif /* INLINE_DEFINE */
335 /* ... Test if zero ........................................................ */
338 INLINE_DECLARE(int bv_zero(const void *bv_, size_t s, size_t l))
343 const T *bv = (const T *) bv_, *end;
348 mask = BV_MASK_HIGHER(T, BITS(T) - o);
349 if (o + l <= BITS(T)) {
351 mask &= BV_MASK_LOWER(T, o + l);
359 end = bv + l / BITS(T);
360 for (; bv < end; ++bv) {
366 mask = BV_MASK_LOWER(T, o);
374 #endif /* INLINE_DEFINE */
377 /* ... Compare ............................................................. */
379 #define BV_EQ(T, B1, B2) \
380 if (((T) (B1)) != ((T) (B2))) return 0;
382 #define BV_EQ_MASK(T, B1, B2, M) BV_EQ(T, (B1) & (M), (B2) & (M))
384 #define BV_EQ_LEFT(T, B1, B2, L, B) \
385 offset = (L) % BITS(T); \
386 end = (B1) + (L) / BITS(T); \
387 while ((B1) < end) { \
388 BV_EQ(T, *(B1), (*(B2) << (B)) | ins); \
389 ins = *(B2) >> (BITS(T) - (B)); \
392 if (offset > (B)) { \
393 mask = BV_MASK_LOWER(T, offset - (B)); \
394 BV_EQ(T, *(B1) & ~(~mask << (B)), \
395 ((*(B2) & mask) << (B)) | ins); \
396 } else if (offset) { \
397 mask = BV_MASK_LOWER(T, offset); \
398 BV_EQ_MASK(T, *(B1), ins, mask); \
401 #define BV_EQ_RIGHT(T, B1, B2, L, B) \
402 offset = (L) % BITS(T); \
403 end = (B2) + (L) / BITS(T) + (offset >= (B)); \
404 while ((B2) < end) { \
405 BV_EQ(T, *(B1), (*(B2) << (BITS(T) - (B))) | ins); \
406 ins = *(B2) >> (B); \
411 if (offset >= (B)) { \
412 mask = BV_MASK_LOWER(T, offset - (B)); \
413 BV_EQ_MASK(T, *(B1), ins, mask); \
415 mask = BV_MASK_LOWER(T, offset); \
416 BV_EQ(T, *(B1) & ~(~mask << (BITS(T) - (B))), \
417 ((*(B2) & mask) << (BITS(T) - (B))) | ins); \
421 INLINE_DECLARE(int bv_eq(const void *bv1_, size_t s1, const void *bv2_, size_t s2, size_t l))
426 const T *bv1 = (const T *) bv1_, *bv2 = (const T *) bv2_, *end;
436 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
437 if (s2 + l <= BITS(T)) {
438 if (s2 + l < BITS(T)) {
439 mask &= BV_MASK_LOWER(T, s2 + l);
441 BV_EQ_MASK(T, *bv1, *bv2, mask);
445 BV_EQ_MASK(T, *bv1, *bv2, mask);
451 if ((ret = memcmp(bv1, bv2, lk * sizeof(T))) != 0)
454 mask = BV_MASK_LOWER(T, lo);
455 BV_EQ_MASK(T, *bv1, *bv2, mask);
459 } else if (s1 < s2) {
462 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
463 if (s2 + l <= BITS(T)) {
464 if (s2 + l < BITS(T))
465 mask &= BV_MASK_LOWER(T, s2 + l);
466 BV_EQ(T, *bv1 & (mask >> step), (*bv2 & mask) >> step);
469 ins = ((*bv2 & mask) >> step) | (*bv1 & BV_MASK_LOWER(T, s1));
471 offset = l % BITS(T);
472 end = bv2 + l / BITS(T) + (offset >= step);
474 BV_EQ(T, *bv1, (*bv2 << (BITS(T) - step)) | ins);
480 if (offset >= step) {
481 mask = BV_MASK_LOWER(T, offset - step);
482 BV_EQ_MASK(T, *bv1, ins, mask);
484 mask = BV_MASK_LOWER(T, offset);
485 BV_EQ(T, *bv1 & ~(~mask << (BITS(T) - step)),
486 ((*bv2 & mask) << (BITS(T) - step)) | ins);
488 /* BV_EQ_RIGHT(T, bv1, bv2, l, step); */
491 } else { /* s1 > s2 */
494 mask = BV_MASK_HIGHER(T, BITS(T) - s1);
495 if (s1 + l <= BITS(T)) {
496 if (s1 + l < BITS(T))
497 mask &= BV_MASK_LOWER(T, s1 + l);
498 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
500 size_t pets = BITS(T) - step;
502 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
505 mask = BV_MASK_LOWER(T, l);
506 BV_EQ(T, *bv1 & mask, (*bv2 & (mask << pets)) >> pets);
508 ins = (*bv2 & BV_MASK_HIGHER(T, step)) >> pets;
510 offset = l % BITS(T);
511 end = bv1 + l / BITS(T);
513 BV_EQ(T, *bv1, (*bv2 << step) | ins);
514 ins = *bv2 >> (BITS(T) - step);
518 mask = BV_MASK_LOWER(T, offset - step);
519 BV_EQ(T, *bv1 & ~(~mask << step),
520 ((*bv2 & mask) << step) | ins);
522 mask = BV_MASK_LOWER(T, offset);
523 BV_EQ_MASK(T, *bv1, ins, mask);
525 /* BV_EQ_LEFT(T, bv1, bv2, l, step); */
533 #endif /* INLINE_DEFINE */
536 /* ... Fill ................................................................ */
538 #define T unsigned char
539 INLINE_DECLARE(void bv_fill(void *bv_, size_t s, size_t l, unsigned int f))
543 T mask, *bv = (T *) bv_;
551 mask = BV_MASK_HIGHER(T, BITS(T) - o);
552 if (o + l <= BITS(T)) {
554 mask &= BV_MASK_LOWER(T, o + l);
555 *bv = (*bv & ~mask) | (f & mask);
557 *bv = (*bv & ~mask) | (f & mask);
564 mask = BV_MASK_LOWER(T, o);
566 *bv = (*bv & ~mask) | (f & mask);
572 #endif /* INLINE_DEFINE */
575 /* ========================================================================= */
577 #endif /* BITVECT_H */