4 /* === Bit vectors ========================================================= */
6 /* ... Generic macros ...................................................... */
8 #define INLINE_DECLARE(P) STATIC P
12 # define BV_UNIT unsigned char
15 #define BV_SIZE(I) ((((I) % CHAR_BIT) != 0) + ((I) / CHAR_BIT))
17 #define BITS(T) (CHAR_BIT * sizeof(T))
19 /* 0 <= I < CHAR_BIT * sizeof(T) */
20 #define BV_MASK_LOWER(T, I) (~((~((T) 0)) << (I)))
21 /* 0 < I <= CHAR_BIT * sizeof(T) */
22 #define BV_MASK_HIGHER(T, I) ((~((T) 0)) << (BITS(T) - (I)))
24 #define BV_DO_ALIGNED_FORWARD(T, A) \
25 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
26 if (fs + l <= BITS(T)) { \
27 /* Branching is apparently useless, \
28 * but since we can't portably shift \
29 * CHAR_BITS from a char... \
30 * Actually, we only copy up to this */ \
31 if (fs + l < BITS(T)) \
32 mask &= BV_MASK_LOWER(T, fs + l); \
33 *t = (*t & ~mask) | (*f & mask); \
36 *t = (*t & ~mask) | (*f & mask); \
39 l -= (BITS(T) - ts); \
42 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
46 mask = BV_MASK_LOWER(T, lo); \
47 *t = (*t & ~mask) | (*f & mask); \
51 #define BV_DO_ALIGNED_BACKWARD(T, A) \
52 if (fs + l <= BITS(T)) { \
53 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
54 /* Branching is apparently useless, \
55 * but since we can't portably shift \
56 * CHAR_BITS from a char... \
57 * Actually, we only copy up to this */ \
58 if (fs + l < BITS(T)) \
59 mask &= BV_MASK_LOWER(T, fs + l); \
60 *t = (*t & ~mask) | (*f & mask); \
63 l -= (BITS(T) - ts); \
69 mask = BV_MASK_LOWER(T, lo); \
70 t[lk] = (t[lk] & ~mask) | (f[lk] & mask); \
72 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
73 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
74 t[-1] = (t[-1] & ~mask) | (f[-1] & mask); \
77 #define BV_DO_LEFT_FORWARD(T, A) \
79 mask = BV_MASK_HIGHER(T, BITS(T) - ts); \
80 if (ts + l <= BITS(T)) { \
81 if (ts + l < BITS(T)) \
82 mask &= BV_MASK_LOWER(T, ts + l); \
83 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
85 size_t pets = BITS(T) - step; \
86 l -= (BITS(T) - ts); \
87 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
90 mask = BV_MASK_LOWER(T, l); \
91 *t = (*t & ~mask) | ((*f & (mask << pets)) >> pets); \
93 ins = (*f & BV_MASK_HIGHER(T, step)) >> pets; \
95 offset = l % BITS(T); \
96 end = t + l / BITS(T); \
98 BV_##A##_UNIT_LEFT_FORWARD(T, t, f, step); \
101 if (offset > step) { \
102 mask = BV_MASK_LOWER(T, offset - step); \
103 *t = (*t & (~mask << step)) \
104 | ((*f & mask) << step) \
106 } else if (offset) { \
107 mask = BV_MASK_LOWER(T, offset); \
108 *t = (*t & ~mask) | (ins & mask); \
113 #define BV_DO_RIGHT_FORWARD(T, A) \
115 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
116 if (fs + l <= BITS(T)) { \
117 if (fs + l < BITS(T)) \
118 mask &= BV_MASK_LOWER(T, fs + l); \
119 *t = (*t & ~(mask >> step)) | ((*f & mask) >> step); \
121 l -= (BITS(T) - fs); \
122 ins = ((*f & mask) >> step) | (*t & BV_MASK_LOWER(T, ts)); \
124 offset = l % BITS(T); \
125 end = f + l / BITS(T) + (offset > step); \
127 BV_##A##_UNIT_RIGHT_FORWARD(T, t, f, step); \
132 if (offset > step) { \
133 mask = BV_MASK_LOWER(T, offset - step); \
134 *t = (*t & ~mask) | (ins & mask); \
136 mask = BV_MASK_LOWER(T, offset); \
137 *t = (*t & (~mask << (BITS(T) - step))) \
138 | ((*f & mask) << (BITS(T) - step)) \
143 #define BV_DO_LEFT_BACKWARD(T, A) \
145 mask = BV_MASK_LOWER(T, BITS(T) - ts); \
146 if (ts + l <= BITS(T)) { \
147 if (ts + l < BITS(T)) \
148 mask &= BV_MASK_HIGHER(T, ts + l); \
149 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
151 size_t pets = BITS(T) - step; \
152 l -= (BITS(T) - ts); \
153 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
156 mask = BV_MASK_HIGHER(T, l); \
157 *t = (*t & ~mask) | ((*f & (mask >> pets)) << pets); \
159 ins = (*f & BV_MASK_LOWER(T, step)) << pets; \
161 offset = l % BITS(T); \
162 begin = t - l / BITS(T); \
163 while (t > begin) { \
164 BV_##A##_UNIT_LEFT_BACKWARD(T, t, f, step); \
167 if (offset > step) { \
168 mask = BV_MASK_HIGHER(T, offset - step); \
169 *t = (*t & (~mask >> step)) \
170 | ((*f & mask) >> step) \
172 } else if (offset) { \
173 mask = BV_MASK_HIGHER(T, offset); \
174 *t = (*t & ~mask) | (ins & mask); \
179 #define BV_DO_RIGHT_BACKWARD(T, A) \
181 mask = BV_MASK_LOWER(T, BITS(T) - fs); \
182 if (fs + l <= BITS(T)) { \
183 if (fs + l < BITS(T)) \
184 mask &= BV_MASK_HIGHER(T, fs + l); \
185 *t = (*t & ~(mask << step)) | ((*f & mask) << step); \
187 l -= (BITS(T) - fs); \
188 ins = ((*f & mask) << step); \
190 ins |= (*t & BV_MASK_HIGHER(T, ts)); \
192 offset = l % BITS(T); \
193 begin = f - l / BITS(T) + (offset <= step); \
194 while (f >= begin) { \
195 BV_##A##_UNIT_RIGHT_BACKWARD(T, t, f, step); \
200 if (offset > step) { \
201 mask = BV_MASK_HIGHER(T, offset - step); \
202 *t = (*t & ~mask) | (ins & mask); \
204 mask = BV_MASK_HIGHER(T, offset); \
205 *t = (*t & (~mask >> (BITS(T) - step))) \
206 | ((*f & mask) >> (BITS(T) - step)) \
211 /* ... Copy ................................................................. */
213 #define BV_COPY_UNIT_ALIGNED(X, T, F, L) memcpy((T), (F), (L) * sizeof(X))
215 /* Save the O - B higher bits, shift B bits left, add B bits from f at right */
216 #define BV_COPY_UNIT_LEFT_FORWARD(X, T, F, B) \
217 *(T) = (*(F) << (B)) | ins; \
218 ins = *(F) >> (BITS(X) - (B));
220 /* Save the B lower bits, shift B bits right, add B bits from F at left */
221 #define BV_COPY_UNIT_RIGHT_FORWARD(X, T, F, B) \
222 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
226 INLINE_DECLARE(void bv_copy(void *t_, size_t ts, const void *f_, size_t fs, size_t l))
230 T ins, mask, *t = (T *) t_;
231 const T *f = (const T *) f_, *end;
240 BV_DO_ALIGNED_FORWARD(T, COPY);
241 } else if (ts < fs) {
242 BV_DO_RIGHT_FORWARD(T, COPY);
243 } else { /* ts > fs */
244 BV_DO_LEFT_FORWARD(T, COPY);
249 #endif /* INLINE_DEFINE */
252 /* ... Move ................................................................ */
254 #define BV_MOVE_UNIT_ALIGNED(X, T, F, L) memmove((T), (F), (L) * sizeof(X))
256 #define BV_MOVE_UNIT_LEFT_FORWARD(X, T, F, B) \
257 tmp = *(F) >> (BITS(X) - (B)); \
258 *(T) = (*(F) << (B)) | ins; \
261 #define BV_MOVE_UNIT_RIGHT_FORWARD(X, T, F, B) \
263 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
266 #define BV_MOVE_UNIT_LEFT_BACKWARD(X, T, F, B) \
267 tmp = *(F) << (BITS(X) - (B)); \
268 *(T) = (*(F) >> (B)) | ins; \
271 #define BV_MOVE_UNIT_RIGHT_BACKWARD(X, T, F, B) \
273 *(T) = (*(F) >> (BITS(X) - (B))) | ins; \
276 #define BV_MOVE_INIT_REVERSE(T, V, VS) \
278 (VS) = z % BITS(T); \
280 (V) = bv + (z / BITS(T)); \
281 (VS) = BITS(T) - (VS); \
283 /* z >= BITS(T) because l > 0 */ \
284 (V) = bv + (z / BITS(T)) - 1; \
288 INLINE_DECLARE(void bv_move(void *bv_, size_t ts, size_t fs, size_t l))
291 size_t to, fo, offset, step;
292 T ins, tmp, mask, *bv = (T *) bv_, *t, *f;
293 const T *begin, *end;
302 t = bv + ts / BITS(T);
304 f = bv + fs / BITS(T);
307 BV_DO_ALIGNED_FORWARD(T, MOVE);
308 } else if (ts < fs) {
309 BV_DO_RIGHT_FORWARD(T, MOVE);
310 } else { /* ts > fs */
311 BV_DO_LEFT_FORWARD(T, MOVE);
313 } else if (to == fo) {
314 t = bv + ts / BITS(T);
316 f = bv + fs / BITS(T);
318 BV_DO_ALIGNED_BACKWARD(T, MOVE);
319 } else { /* ts > fs */
321 BV_MOVE_INIT_REVERSE(T, t, ts);
322 BV_MOVE_INIT_REVERSE(T, f, fs);
324 BV_DO_RIGHT_BACKWARD(T, MOVE);
325 } else { /* ts > fs */
326 BV_DO_LEFT_BACKWARD(T, MOVE);
332 #endif /* INLINE_DEFINE */
335 /* ... Compare ............................................................. */
337 #define BV_EQ(T, B1, B2) \
338 if (((T) (B1)) != ((T) (B2))) return 0;
340 #define BV_EQ_MASK(T, B1, B2, M) BV_EQ(T, (B1) & (M), (B2) & (M))
342 #define BV_EQ_LEFT(T, B1, B2, L, B) \
343 offset = (L) % BITS(T); \
344 end = (B1) + (L) / BITS(T); \
345 while ((B1) < end) { \
346 BV_EQ(T, *(B1), (*(B2) << (B)) | ins); \
347 ins = *(B2) >> (BITS(T) - (B)); \
350 if (offset > (B)) { \
351 mask = BV_MASK_LOWER(T, offset - (B)); \
352 BV_EQ(T, *(B1) & ~(~mask << (B)), \
353 ((*(B2) & mask) << (B)) | ins); \
354 } else if (offset) { \
355 mask = BV_MASK_LOWER(T, offset); \
356 BV_EQ_MASK(T, *(B1), ins, mask); \
359 #define BV_EQ_RIGHT(T, B1, B2, L, B) \
360 offset = (L) % BITS(T); \
361 end = (B2) + (L) / BITS(T) + (offset >= (B)); \
362 while ((B2) < end) { \
363 BV_EQ(T, *(B1), (*(B2) << (BITS(T) - (B))) | ins); \
364 ins = *(B2) >> (B); \
369 if (offset >= (B)) { \
370 mask = BV_MASK_LOWER(T, offset - (B)); \
371 BV_EQ_MASK(T, *(B1), ins, mask); \
373 mask = BV_MASK_LOWER(T, offset); \
374 BV_EQ(T, *(B1) & ~(~mask << (BITS(T) - (B))), \
375 ((*(B2) & mask) << (BITS(T) - (B))) | ins); \
379 INLINE_DECLARE(int bv_eq(const void *bv1_, size_t s1, const void *bv2_, size_t s2, size_t l))
384 const T *bv1 = (const T *) bv1_, *bv2 = (const T *) bv2_, *end;
394 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
395 if (s2 + l <= BITS(T)) {
396 if (s2 + l < BITS(T)) {
397 mask &= BV_MASK_LOWER(T, s2 + l);
399 BV_EQ_MASK(T, *bv1, *bv2, mask);
403 BV_EQ_MASK(T, *bv1, *bv2, mask);
409 if ((ret = memcmp(bv1, bv2, lk * sizeof(T))) != 0)
412 mask = BV_MASK_LOWER(T, lo);
413 BV_EQ_MASK(T, *bv1, *bv2, mask);
417 } else if (s1 < s2) {
420 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
421 if (s2 + l <= BITS(T)) {
422 if (s2 + l < BITS(T))
423 mask &= BV_MASK_LOWER(T, s2 + l);
424 BV_EQ(T, *bv1 & (mask >> step), (*bv2 & mask) >> step);
427 ins = ((*bv2 & mask) >> step) | (*bv1 & BV_MASK_LOWER(T, s1));
429 offset = l % BITS(T);
430 end = bv2 + l / BITS(T) + (offset >= step);
432 BV_EQ(T, *bv1, (*bv2 << (BITS(T) - step)) | ins);
438 if (offset >= step) {
439 mask = BV_MASK_LOWER(T, offset - step);
440 BV_EQ_MASK(T, *bv1, ins, mask);
442 mask = BV_MASK_LOWER(T, offset);
443 BV_EQ(T, *bv1 & ~(~mask << (BITS(T) - step)),
444 ((*bv2 & mask) << (BITS(T) - step)) | ins);
446 /* BV_EQ_RIGHT(T, bv1, bv2, l, step); */
449 } else { /* s1 > s2 */
452 mask = BV_MASK_HIGHER(T, BITS(T) - s1);
453 if (s1 + l <= BITS(T)) {
454 if (s1 + l < BITS(T))
455 mask &= BV_MASK_LOWER(T, s1 + l);
456 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
458 size_t pets = BITS(T) - step;
460 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
463 mask = BV_MASK_LOWER(T, l);
464 BV_EQ(T, *bv1 & mask, (*bv2 & (mask << pets)) >> pets);
466 ins = (*bv2 & BV_MASK_HIGHER(T, step)) >> pets;
468 offset = l % BITS(T);
469 end = bv1 + l / BITS(T);
471 BV_EQ(T, *bv1, (*bv2 << step) | ins);
472 ins = *bv2 >> (BITS(T) - step);
476 mask = BV_MASK_LOWER(T, offset - step);
477 BV_EQ(T, *bv1 & ~(~mask << step),
478 ((*bv2 & mask) << step) | ins);
480 mask = BV_MASK_LOWER(T, offset);
481 BV_EQ_MASK(T, *bv1, ins, mask);
483 /* BV_EQ_LEFT(T, bv1, bv2, l, step); */
491 #endif /* INLINE_DEFINE */
494 /* ... Fill ................................................................ */
496 #define T unsigned char
497 INLINE_DECLARE(void bv_fill(void *bv_, size_t s, size_t l, unsigned int f))
501 T mask, *bv = (T *) bv_;
509 mask = BV_MASK_HIGHER(T, BITS(T) - o);
510 if (o + l <= BITS(T)) {
512 mask &= BV_MASK_LOWER(T, o + l);
513 *bv = (*bv & ~mask) | (f & mask);
515 *bv = (*bv & ~mask) | (f & mask);
522 mask = BV_MASK_LOWER(T, o);
524 *bv = (*bv & ~mask) | (f & mask);
530 #endif /* INLINE_DEFINE */
533 /* ========================================================================= */
535 #endif /* BITVECT_H */