4 /* === Bit vectors ========================================================= */
6 /* ... Generic macros ...................................................... */
8 #define INLINE_DECLARE(P) STATIC P
12 # define BV_UNIT unsigned char
15 #define BV_SIZE(I) ((((I) % CHAR_BIT) != 0) + ((I) / CHAR_BIT))
17 #define BITS(T) (CHAR_BIT * sizeof(T))
19 /* 0 <= I < CHAR_BIT * sizeof(T) */
20 #define BV_MASK_LOWER(T, I) (~((~((T) 0)) << (I)))
21 /* 0 < I <= CHAR_BIT * sizeof(T) */
22 #define BV_MASK_HIGHER(T, I) ((~((T) 0)) << (BITS(T) - (I)))
24 #define BV_DO_ALIGNED(T, A) \
25 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
26 if (fs + l <= BITS(T)) { \
27 /* Branching is apparently useless, \
28 * but since we can't portably shift \
29 * CHAR_BITS from a char... \
30 * Actually, we only copy up to this */ \
31 if (fs + l < BITS(T)) \
32 mask &= BV_MASK_LOWER(T, fs + l); \
33 *t = (*t & ~mask) | (*f & mask); \
36 *t = (*t & ~mask) | (*f & mask); \
39 l -= (BITS(T) - ts); \
42 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
44 mask = BV_MASK_LOWER(T, lo); \
45 t[lk] = (t[lk] & ~mask) \
50 #define BV_DO_LEFT_FORWARD(T, A) \
52 mask = BV_MASK_HIGHER(T, BITS(T) - ts); \
53 if (ts + l <= BITS(T)) { \
54 if (ts + l < BITS(T)) \
55 mask &= BV_MASK_LOWER(T, ts + l); \
56 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
58 size_t pets = BITS(T) - step; \
59 l -= (BITS(T) - ts); \
60 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
63 mask = BV_MASK_LOWER(T, l); \
64 *t = (*t & ~mask) | ((*f & (mask << pets)) >> pets); \
66 ins = (*f & BV_MASK_HIGHER(T, step)) >> pets; \
68 offset = l % BITS(T); \
69 end = t + l / BITS(T); \
71 BV_##A##_UNIT_LEFT_FORWARD(T, t, f, step); \
74 if (offset > step) { \
75 mask = BV_MASK_LOWER(T, offset - step); \
76 *t = (*t & (~mask << step)) \
77 | ((*f & mask) << step) \
79 } else if (offset) { \
80 mask = BV_MASK_LOWER(T, offset); \
81 *t = (*t & ~mask) | (ins & mask); \
86 #define BV_DO_RIGHT_FORWARD(T, A) \
88 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
89 if (fs + l <= BITS(T)) { \
90 if (fs + l < BITS(T)) \
91 mask &= BV_MASK_LOWER(T, fs + l); \
92 *t = (*t & ~(mask >> step)) | ((*f & mask) >> step); \
94 l -= (BITS(T) - fs); \
95 ins = ((*f & mask) >> step) | (*t & BV_MASK_LOWER(T, ts)); \
97 offset = l % BITS(T); \
98 end = f + l / BITS(T) + (offset > step); \
100 BV_##A##_UNIT_RIGHT_FORWARD(T, t, f, step); \
105 if (offset > step) { \
106 mask = BV_MASK_LOWER(T, offset - step); \
107 *t = (*t & ~mask) | (ins & mask); \
109 mask = BV_MASK_LOWER(T, offset); \
110 *t = (*t & (~mask << (BITS(T) - step))) \
111 | ((*f & mask) << (BITS(T) - step)) \
116 #define BV_DO_LEFT_BACKWARD(T, A) \
118 mask = BV_MASK_LOWER(T, BITS(T) - ts); \
119 if (ts + l <= BITS(T)) { \
120 if (ts + l < BITS(T)) \
121 mask &= BV_MASK_HIGHER(T, ts + l); \
122 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
124 size_t pets = BITS(T) - step; \
125 l -= (BITS(T) - ts); \
126 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
129 mask = BV_MASK_HIGHER(T, l); \
130 *t = (*t & ~mask) | ((*f & (mask >> pets)) << pets); \
132 ins = (*f & BV_MASK_LOWER(T, step)) << pets; \
134 offset = l % BITS(T); \
135 begin = t - l / BITS(T); \
136 while (t > begin) { \
137 BV_##A##_UNIT_LEFT_BACKWARD(T, t, f, step); \
140 if (offset > step) { \
141 mask = BV_MASK_HIGHER(T, offset - step); \
142 *t = (*t & (~mask >> step)) \
143 | ((*f & mask) >> step) \
145 } else if (offset) { \
146 mask = BV_MASK_HIGHER(T, offset); \
147 *t = (*t & ~mask) | (ins & mask); \
152 #define BV_DO_RIGHT_BACKWARD(T, A) \
154 mask = BV_MASK_LOWER(T, BITS(T) - fs); \
155 if (fs + l <= BITS(T)) { \
156 if (fs + l < BITS(T)) \
157 mask &= BV_MASK_HIGHER(T, fs + l); \
158 *t = (*t & ~(mask << step)) | ((*f & mask) << step); \
160 l -= (BITS(T) - fs); \
161 ins = ((*f & mask) << step) | (*t & BV_MASK_HIGHER(T, ts)); \
163 offset = l % BITS(T); \
164 begin = f - l / BITS(T) - (offset > step); \
165 while (f > begin) { \
166 BV_##A##_UNIT_RIGHT_BACKWARD(T, t, f, step); \
171 if (offset > step) { \
172 mask = BV_MASK_HIGHER(T, offset - step); \
173 *t = (*t & ~mask) | (ins & mask); \
175 mask = BV_MASK_HIGHER(T, offset); \
176 *t = (*t & (~mask >> (BITS(T) - step))) \
177 | ((*f & mask) >> (BITS(T) - step)) \
182 /* ... Copy ................................................................. */
184 #define BV_COPY_UNIT_ALIGNED(X, T, F, L) memcpy((T), (F), (L) * sizeof(X))
186 /* Save the O - B higher bits, shift B bits left, add B bits from f at right */
187 #define BV_COPY_UNIT_LEFT_FORWARD(X, T, F, B) \
188 *(T) = (*(F) << (B)) | ins; \
189 ins = *(F) >> (BITS(X) - (B));
191 /* Save the B lower bits, shift B bits right, add B bits from F at left */
192 #define BV_COPY_UNIT_RIGHT_FORWARD(X, T, F, B) \
193 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
197 INLINE_DECLARE(void bv_copy(void *t_, size_t ts, const void *f_, size_t fs, size_t l))
201 T ins, mask, *t = (T *) t_;
202 const T *f = (const T *) f_, *end;
214 BV_DO_ALIGNED(T, COPY);
215 } else if (ts < fs) {
216 BV_DO_RIGHT_FORWARD(T, COPY);
217 } else { /* ts > fs */
218 BV_DO_LEFT_FORWARD(T, COPY);
223 #endif /* INLINE_DEFINE */
226 /* ... Move ................................................................ */
228 #define BV_MOVE_UNIT_ALIGNED(X, T, F, L) memmove((T), (F), (L) * sizeof(X))
230 #define BV_MOVE_UNIT_LEFT_FORWARD(X, T, F, B) \
231 tmp = *(F) >> (BITS(X) - (B)); \
232 *(T) = (*(F) << (B)) | ins; \
235 #define BV_MOVE_UNIT_RIGHT_FORWARD(X, T, F, B) \
237 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
240 #define BV_MOVE_UNIT_LEFT_BACKWARD(X, T, F, B) \
241 tmp = *(F) << (BITS(X) - (B)); \
242 *(T) = (*(F) >> (B)) | ins; \
245 #define BV_MOVE_UNIT_RIGHT_BACKWARD(X, T, F, B) \
247 *(T) = (*(F) >> (BITS(X) - (B))) | ins; \
250 #define BV_MOVE_INIT_REVERSE(T, V, VS) \
252 (VS) = z % BITS(T); \
254 (V) = bv + (z / BITS(T)); \
255 (VS) = BITS(T) - (VS); \
257 /* z >= BITS(T) because l > 0 */ \
258 (V) = bv + (z / BITS(T)) - 1; \
262 INLINE_DECLARE(void bv_move(void *bv_, size_t ts, size_t fs, size_t l))
265 size_t to, fo, offset, step;
266 T ins, tmp, mask, *bv = (T *) bv_, *t, *f;
267 const T *begin, *end;
276 t = bv + ts / BITS(T);
278 f = bv + fs / BITS(T);
280 BV_DO_ALIGNED(T, MOVE);
281 } else if (ts < fs) {
282 t = bv + ts / BITS(T);
284 f = bv + fs / BITS(T);
287 BV_DO_RIGHT_FORWARD(T, MOVE);
288 } else { /* ts > fs */
289 BV_DO_LEFT_FORWARD(T, MOVE);
291 } else { /* ts > fs */
293 BV_MOVE_INIT_REVERSE(T, t, ts);
294 BV_MOVE_INIT_REVERSE(T, f, fs);
296 BV_DO_RIGHT_BACKWARD(T, MOVE);
297 } else { /* ts > fs */
298 BV_DO_LEFT_BACKWARD(T, MOVE);
304 #endif /* INLINE_DEFINE */
307 /* ... Compare ............................................................. */
309 #define BV_EQ(T, B1, B2) \
310 if (((T) (B1)) != ((T) (B2))) return 0;
312 #define BV_EQ_MASK(T, B1, B2, M) BV_EQ(T, (B1) & (M), (B2) & (M))
314 #define BV_EQ_LEFT(T, B1, B2, L, B) \
315 offset = (L) % BITS(T); \
316 end = (B1) + (L) / BITS(T); \
317 while ((B1) < end) { \
318 BV_EQ(T, *(B1), (*(B2) << (B)) | ins); \
319 ins = *(B2) >> (BITS(T) - (B)); \
322 if (offset > (B)) { \
323 mask = BV_MASK_LOWER(T, offset - (B)); \
324 BV_EQ(T, *(B1) & ~(~mask << (B)), \
325 ((*(B2) & mask) << (B)) | ins); \
326 } else if (offset) { \
327 mask = BV_MASK_LOWER(T, offset); \
328 BV_EQ_MASK(T, *(B1), ins, mask); \
331 #define BV_EQ_RIGHT(T, B1, B2, L, B) \
332 offset = (L) % BITS(T); \
333 end = (B2) + (L) / BITS(T) + (offset >= (B)); \
334 while ((B2) < end) { \
335 BV_EQ(T, *(B1), (*(B2) << (BITS(T) - (B))) | ins); \
336 ins = *(B2) >> (B); \
341 if (offset >= (B)) { \
342 mask = BV_MASK_LOWER(T, offset - (B)); \
343 BV_EQ_MASK(T, *(B1), ins, mask); \
345 mask = BV_MASK_LOWER(T, offset); \
346 BV_EQ(T, *(B1) & ~(~mask << (BITS(T) - (B))), \
347 ((*(B2) & mask) << (BITS(T) - (B))) | ins); \
351 INLINE_DECLARE(int bv_eq(const void *bv1_, size_t s1, const void *bv2_, size_t s2, size_t l))
356 const T *bv1 = (const T *) bv1_, *bv2 = (const T *) bv2_, *end;
366 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
367 if (s2 + l <= BITS(T)) {
368 if (s2 + l < BITS(T)) {
369 mask &= BV_MASK_LOWER(T, s2 + l);
371 BV_EQ_MASK(T, *bv1, *bv2, mask);
375 BV_EQ_MASK(T, *bv1, *bv2, mask);
381 if ((ret = memcmp(bv1, bv2, lk * sizeof(T))) != 0)
384 mask = BV_MASK_LOWER(T, lo);
385 BV_EQ_MASK(T, *bv1, *bv2, mask);
389 } else if (s1 < s2) {
392 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
393 if (s2 + l <= BITS(T)) {
394 if (s2 + l < BITS(T))
395 mask &= BV_MASK_LOWER(T, s2 + l);
396 BV_EQ(T, *bv1 & (mask >> step), (*bv2 & mask) >> step);
399 ins = ((*bv2 & mask) >> step) | (*bv1 & BV_MASK_LOWER(T, s1));
401 offset = l % BITS(T);
402 end = bv2 + l / BITS(T) + (offset >= step);
404 BV_EQ(T, *bv1, (*bv2 << (BITS(T) - step)) | ins);
410 if (offset >= step) {
411 mask = BV_MASK_LOWER(T, offset - step);
412 BV_EQ_MASK(T, *bv1, ins, mask);
414 mask = BV_MASK_LOWER(T, offset);
415 BV_EQ(T, *bv1 & ~(~mask << (BITS(T) - step)),
416 ((*bv2 & mask) << (BITS(T) - step)) | ins);
418 /* BV_EQ_RIGHT(T, bv1, bv2, l, step); */
421 } else { /* s1 > s2 */
424 mask = BV_MASK_HIGHER(T, BITS(T) - s1);
425 if (s1 + l <= BITS(T)) {
426 if (s1 + l < BITS(T))
427 mask &= BV_MASK_LOWER(T, s1 + l);
428 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
430 size_t pets = BITS(T) - step;
432 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
435 mask = BV_MASK_LOWER(T, l);
436 BV_EQ(T, *bv1 & mask, (*bv2 & (mask << pets)) >> pets);
438 ins = (*bv2 & BV_MASK_HIGHER(T, step)) >> pets;
440 offset = l % BITS(T);
441 end = bv1 + l / BITS(T);
443 BV_EQ(T, *bv1, (*bv2 << step) | ins);
444 ins = *bv2 >> (BITS(T) - step);
448 mask = BV_MASK_LOWER(T, offset - step);
449 BV_EQ(T, *bv1 & ~(~mask << step),
450 ((*bv2 & mask) << step) | ins);
452 mask = BV_MASK_LOWER(T, offset);
453 BV_EQ_MASK(T, *bv1, ins, mask);
455 /* BV_EQ_LEFT(T, bv1, bv2, l, step); */
463 #endif /* INLINE_DEFINE */
466 /* ... Fill ................................................................ */
468 #define T unsigned char
469 INLINE_DECLARE(void bv_fill(void *bv_, size_t s, size_t l, unsigned int f))
473 T mask, *bv = (T *) bv_;
484 mask = BV_MASK_HIGHER(T, BITS(T) - o);
485 if (o + l <= BITS(T)) {
487 mask &= BV_MASK_LOWER(T, o + l);
488 *bv = (*bv & ~mask) | (f & mask);
490 *bv = (*bv & ~mask) | (f & mask);
497 mask = BV_MASK_LOWER(T, o);
499 *bv = (*bv & ~mask) | (f & mask);
505 #endif /* INLINE_DEFINE */
508 /* ========================================================================= */
510 #endif /* BITVECT_H */