4 /* === Bit vectors ========================================================= */
6 /* ... Generic macros ...................................................... */
8 #define INLINE_DECLARE(P) STATIC P
12 # define BV_UNIT unsigned char
15 #define BV_SIZE(I) ((((I) % CHAR_BIT) != 0) + ((I) / CHAR_BIT))
17 #define BITS(T) (CHAR_BIT * sizeof(T))
19 /* 0 <= I < CHAR_BIT * sizeof(T) */
20 #define BV_MASK_LOWER(T, I) (~((~((T) 0)) << (I)))
21 /* 0 < I <= CHAR_BIT * sizeof(T) */
22 #define BV_MASK_HIGHER(T, I) ((~((T) 0)) << (BITS(T) - (I)))
24 #define BV_DO_ALIGNED(T, A) \
25 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
26 if (fs + l <= BITS(T)) { \
27 if (fs + l < BITS(T)) { \
28 /* Branching is apparently useless, \
29 * but since we can't portably shift \
30 * CHAR_BITS from a char... \
31 * Actually, we only copy up to this */ \
32 mask &= BV_MASK_LOWER(T, fs + l); \
34 *t = (*t & ~mask) | (*f & mask); \
37 *t = (*t & ~mask) | (*f & mask); \
40 l -= (BITS(T) - ts); \
43 BV_##A##_UNIT_ALIGNED(T, t, f, lk); \
45 mask = BV_MASK_LOWER(T, lo); \
46 t[lk] = (t[lk] & ~mask) \
51 #define BV_DO_LEFT_FORWARD(T, A) \
53 mask = BV_MASK_HIGHER(T, BITS(T) - ts); \
54 if (ts + l <= BITS(T)) { \
55 if (ts + l < BITS(T)) { \
56 mask &= BV_MASK_LOWER(T, ts + l); \
58 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
60 size_t pets = BITS(T) - step; \
61 l -= (BITS(T) - ts); \
62 *t = (*t & ~mask) | ((*f & (mask >> step)) << step); \
65 mask = BV_MASK_LOWER(T, l); \
66 *t = (*t & ~mask) | ((*f & (mask << pets)) >> pets); \
68 ins = (*f & BV_MASK_HIGHER(T, step)) >> pets; \
70 offset = l % BITS(T); \
71 end = t + l / BITS(T); \
73 BV_##A##_UNIT_LEFT_FORWARD(T, t, f, step); \
76 if (offset > step) { \
77 mask = BV_MASK_LOWER(T, offset - step); \
78 *t = (*t & (~mask << step)) \
79 | ((*f & mask) << step) \
81 } else if (offset) { \
82 mask = BV_MASK_LOWER(T, offset); \
83 *t = (*t & ~mask) | (ins & mask); \
88 #define BV_DO_RIGHT_FORWARD(T, A) \
90 mask = BV_MASK_HIGHER(T, BITS(T) - fs); \
91 if (fs + l <= BITS(T)) { \
92 if (fs + l < BITS(T)) { \
93 mask &= BV_MASK_LOWER(T, fs + l); \
95 *t = (*t & ~(mask >> step)) | ((*f & mask) >> step); \
97 l -= (BITS(T) - fs); \
98 ins = ((*f & mask) >> step) | (*t & BV_MASK_LOWER(T, ts)); \
100 offset = l % BITS(T); \
101 end = f + l / BITS(T) + (offset > step); \
103 BV_##A##_UNIT_RIGHT_FORWARD(T, t, f, step); \
106 if (!offset) { offset += BITS(T); } \
107 if (offset > step) { \
108 mask = BV_MASK_LOWER(T, offset - step); \
109 *t = (*t & ~mask) | (ins & mask); \
111 mask = BV_MASK_LOWER(T, offset); \
112 *t = (*t & (~mask << (BITS(T) - step))) \
113 | ((*f & mask) << (BITS(T) - step)) \
118 #define BV_DO_LEFT_BACKWARD(T, A) \
120 mask = BV_MASK_LOWER(T, BITS(T) - ts); \
121 if (ts + l <= BITS(T)) { \
122 if (ts + l < BITS(T)) { \
123 mask &= BV_MASK_HIGHER(T, ts + l); \
125 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
127 size_t pets = BITS(T) - step; \
128 l -= (BITS(T) - ts); \
129 *t = (*t & ~mask) | ((*f & (mask << step)) >> step); \
132 mask = BV_MASK_HIGHER(T, l); \
133 *t = (*t & ~mask) | ((*f & (mask >> pets)) << pets); \
135 ins = (*f & BV_MASK_LOWER(T, step)) << pets; \
137 offset = l % BITS(T); \
138 begin = t - l / BITS(T); \
139 while (t > begin) { \
140 BV_##A##_UNIT_LEFT_BACKWARD(T, t, f, step); \
143 if (offset > step) { \
144 mask = BV_MASK_HIGHER(T, offset - step); \
145 *t = (*t & (~mask >> step)) \
146 | ((*f & mask) >> step) \
148 } else if (offset) { \
149 mask = BV_MASK_HIGHER(T, offset); \
150 *t = (*t & ~mask) | (ins & mask); \
155 #define BV_DO_RIGHT_BACKWARD(T, A) \
157 mask = BV_MASK_LOWER(T, BITS(T) - fs); \
158 if (fs + l <= BITS(T)) { \
159 if (fs + l < BITS(T)) { \
160 mask &= BV_MASK_HIGHER(T, fs + l); \
162 *t = (*t & ~(mask << step)) | ((*f & mask) << step); \
164 l -= (BITS(T) - fs); \
165 ins = ((*f & mask) << step) | (*t & BV_MASK_HIGHER(T, ts)); \
167 offset = l % BITS(T); \
168 begin = f - l / BITS(T) - (offset > step); \
169 while (f > begin) { \
170 BV_##A##_UNIT_RIGHT_BACKWARD(T, t, f, step); \
173 if (!offset) { offset += BITS(T); } \
174 if (offset > step) { \
175 mask = BV_MASK_HIGHER(T, offset - step); \
176 *t = (*t & ~mask) | (ins & mask); \
178 mask = BV_MASK_HIGHER(T, offset); \
179 *t = (*t & (~mask >> (BITS(T) - step))) \
180 | ((*f & mask) >> (BITS(T) - step)) \
185 /* ... Copy ................................................................. */
187 #define BV_COPY_UNIT_ALIGNED(X, T, F, L) memcpy((T), (F), (L) * sizeof(X))
189 /* Save the O - B higher bits, shift B bits left, add B bits from f at right */
190 #define BV_COPY_UNIT_LEFT_FORWARD(X, T, F, B) \
191 *(T) = (*(F) << (B)) | ins; \
192 ins = *(F) >> (BITS(X) - (B));
194 /* Save the B lower bits, shift B bits right, add B bits from F at left */
195 #define BV_COPY_UNIT_RIGHT_FORWARD(X, T, F, B) \
196 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
200 INLINE_DECLARE(void bv_copy(void *t_, size_t ts, const void *f_, size_t fs, size_t l))
204 T ins, mask, *t = t_;
205 const T *f = f_, *end;
214 BV_DO_ALIGNED(T, COPY);
215 } else if (ts < fs) {
216 BV_DO_RIGHT_FORWARD(T, COPY);
217 } else { /* ts > fs */
218 BV_DO_LEFT_FORWARD(T, COPY);
222 #endif /* INLINE_DEFINE */
225 /* ... Move ................................................................ */
227 #define BV_MOVE_UNIT_ALIGNED(X, T, F, L) memmove((T), (F), (L) * sizeof(X))
229 #define BV_MOVE_UNIT_LEFT_FORWARD(X, T, F, B) \
230 tmp = *(F) >> (BITS(X) - (B)); \
231 *(T) = (*(F) << (B)) | ins; \
234 #define BV_MOVE_UNIT_RIGHT_FORWARD(X, T, F, B) \
236 *(T) = (*(F) << (BITS(X) - (B))) | ins; \
239 #define BV_MOVE_UNIT_LEFT_BACKWARD(X, T, F, B) \
240 tmp = *(F) << (BITS(X) - (B)); \
241 *(T) = (*(F) >> (B)) | ins; \
244 #define BV_MOVE_UNIT_RIGHT_BACKWARD(X, T, F, B) \
246 *(T) = (*(F) >> (BITS(X) - (B))) | ins; \
249 #define BV_MOVE_INIT_REVERSE(T, V, VS) \
251 (VS) = z % BITS(T); \
253 (V) = bv + (z / BITS(T)); \
254 (VS) = BITS(T) - (VS); \
256 /* z >= BITS(T) because l > 0 */ \
257 (V) = bv + (z / BITS(T)) - 1; \
261 INLINE_DECLARE(void bv_move(void *bv_, size_t ts, size_t fs, size_t l))
264 size_t to, fo, offset, step;
265 T ins, tmp, mask, *bv = bv_, *t, *f;
266 const T *begin, *end;
272 t = bv + ts / BITS(T);
274 f = bv + fs / BITS(T);
276 BV_DO_ALIGNED(T, MOVE);
277 } else if (ts < fs) {
278 t = bv + ts / BITS(T);
280 f = bv + fs / BITS(T);
283 BV_DO_RIGHT_FORWARD(T, MOVE);
284 } else { /* ts > fs */
285 BV_DO_LEFT_FORWARD(T, MOVE);
287 } else { /* ts > fs */
289 BV_MOVE_INIT_REVERSE(T, t, ts);
290 BV_MOVE_INIT_REVERSE(T, f, fs);
292 BV_DO_RIGHT_BACKWARD(T, MOVE);
293 } else { /* ts > fs */
294 BV_DO_LEFT_BACKWARD(T, MOVE);
299 #endif /* INLINE_DEFINE */
302 /* ... Compare ............................................................. */
304 #define BV_EQ(T, B1, B2) \
305 if (((T) (B1)) != ((T) (B2))) { return 0; }
307 #define BV_EQ_MASK(T, B1, B2, M) BV_EQ(T, (B1) & (M), (B2) & (M))
309 #define BV_EQ_LEFT(T, B1, B2, L, B) \
310 offset = (L) % BITS(T); \
311 end = (B1) + (L) / BITS(T); \
312 while ((B1) < end) { \
313 BV_EQ(T, *(B1), (*(B2) << (B)) | ins); \
314 ins = *(B2) >> (BITS(T) - (B)); \
317 if (offset > (B)) { \
318 mask = BV_MASK_LOWER(T, offset - (B)); \
319 BV_EQ(T, *(B1) & ~(~mask << (B)), \
320 ((*(B2) & mask) << (B)) | ins); \
321 } else if (offset) { \
322 mask = BV_MASK_LOWER(T, offset); \
323 BV_EQ_MASK(T, *(B1), ins, mask); \
326 #define BV_EQ_RIGHT(T, B1, B2, L, B) \
327 offset = (L) % BITS(T); \
328 end = (B2) + (L) / BITS(T) + (offset >= (B)); \
329 while ((B2) < end) { \
330 BV_EQ(T, *(B1), (*(B2) << (BITS(T) - (B))) | ins); \
331 ins = *(B2) >> (B); \
334 if (!offset) { offset += BITS(T); } \
335 if (offset >= (B)) { \
336 mask = BV_MASK_LOWER(T, offset - (B)); \
337 BV_EQ_MASK(T, *(B1), ins, mask); \
339 mask = BV_MASK_LOWER(T, offset); \
340 BV_EQ(T, *(B1) & ~(~mask << (BITS(T) - (B))), \
341 ((*(B2) & mask) << (BITS(T) - (B))) | ins); \
345 INLINE_DECLARE(int bv_eq(const void *bv1_, size_t s1, const void *bv2_, size_t s2, size_t l))
350 const T *bv1 = bv1_, *bv2 = bv2_, *end;
360 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
361 if (s2 + l <= BITS(T)) {
362 if (s2 + l < BITS(T)) {
363 mask &= BV_MASK_LOWER(T, s2 + l);
365 BV_EQ_MASK(T, *bv1, *bv2, mask);
369 BV_EQ_MASK(T, *bv1, *bv2, mask);
375 if ((ret = memcmp(bv1, bv2, lk * sizeof(T))) != 0) { return 0; }
377 mask = BV_MASK_LOWER(T, lo);
378 BV_EQ_MASK(T, *bv1, *bv2, mask);
382 } else if (s1 < s2) {
385 mask = BV_MASK_HIGHER(T, BITS(T) - s2);
386 if (s2 + l <= BITS(T)) {
387 if (s2 + l < BITS(T)) {
388 mask &= BV_MASK_LOWER(T, s2 + l);
390 BV_EQ(T, *bv1 & (mask >> step), (*bv2 & mask) >> step);
393 ins = ((*bv2 & mask) >> step) | (*bv1 & BV_MASK_LOWER(T, s1));
395 offset = l % BITS(T);
396 end = bv2 + l / BITS(T) + (offset >= step);
398 BV_EQ(T, *bv1, (*bv2 << (BITS(T) - step)) | ins);
402 if (!offset) { offset += BITS(T); }
403 if (offset >= step) {
404 mask = BV_MASK_LOWER(T, offset - step);
405 BV_EQ_MASK(T, *bv1, ins, mask);
407 mask = BV_MASK_LOWER(T, offset);
408 BV_EQ(T, *bv1 & ~(~mask << (BITS(T) - step)),
409 ((*bv2 & mask) << (BITS(T) - step)) | ins);
413 } else { /* s1 > s2 */
416 mask = BV_MASK_HIGHER(T, BITS(T) - s1);
417 if (s1 + l <= BITS(T)) {
418 if (s1 + l < BITS(T)) {
419 mask &= BV_MASK_LOWER(T, s1 + l);
421 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
423 size_t pets = BITS(T) - step;
425 BV_EQ(T, *bv1 & mask, (*bv2 & (mask >> step)) << step);
428 mask = BV_MASK_LOWER(T, l);
429 BV_EQ(T, *bv1 & mask, (*bv2 & (mask << pets)) >> pets);
431 ins = (*bv2 & BV_MASK_HIGHER(T, step)) >> pets;
433 offset = l % BITS(T);
434 end = bv1 + l / BITS(T);
436 BV_EQ(T, *bv1, (*bv2 << step) | ins);
437 ins = *bv2 >> (BITS(T) - step);
441 mask = BV_MASK_LOWER(T, offset - step);
442 BV_EQ(T, *bv1 & ~(~mask << step),
443 ((*bv2 & mask) << step) | ins);
445 mask = BV_MASK_LOWER(T, offset);
446 BV_EQ_MASK(T, *bv1, ins, mask);
455 #endif /* INLINE_DEFINE */
458 /* ... Fill ................................................................ */
460 #define T unsigned char
461 INLINE_DECLARE(void bv_fill(void *bv_, size_t s, size_t l, unsigned int f))
472 mask = BV_MASK_HIGHER(T, BITS(T) - o);
473 if (o + l <= BITS(T)) {
474 if (o + l < BITS(T)) {
475 mask &= BV_MASK_LOWER(T, o + l);
477 *bv = (*bv & ~mask) | (f & mask);
479 *bv = (*bv & ~mask) | (f & mask);
486 mask = BV_MASK_LOWER(T, o);
488 *bv = (*bv & ~mask) | (f & mask);
493 #endif /* INLINE_DEFINE */
496 /* ========================================================================= */
498 #endif /* BITVECT_H */