1#ifndef BM__H__INCLUDED__
2#define BM__H__INCLUDED__
30# include <initializer_list>
37#pragma warning( push )
38#pragma warning( disable : 4311 4312 4127)
47# define BM_DECLARE_TEMP_BLOCK(x) bm::bit_block_t x;
155 position_(ref.position_)
157 bv_.set(position_, ref.bv_.get_bit(position_));
162 return bv_.get_bit(position_);
167 bv_.set(position_, (
bool)ref);
173 bv_.set(position_, value);
179 return bool(*
this) == bool(ref);
185 bv_.set_bit_and(position_, value);
192 if (value != bv_.get_bit(position_))
194 bv_.set_bit(position_);
202 bv_.set(position_, value != bv_.get_bit(position_));
209 return !bv_.get_bit(position_);
215 return !bv_.get_bit(position_);
294 if (this->
bv_ != ib.bv_)
return false;
295 if (this->
position_ != ib.position_)
return false;
296 if (this->
block_ != ib.block_)
return false;
297 if (this->
block_type_ != ib.block_type_)
return false;
298 if (this->
block_idx_ != ib.block_idx_)
return false;
309 for (
unsigned i = 0; i < bd.
bit_.
cnt; ++i)
497 buf_ =
bvect_->blockman_.get_allocator().alloc_bit_block();
514 buf_ = iit.buf_; iit.buf_ = 0;
535 buf_ = ii.buf_; ii.buf_ = 0;
677 return this->
valid();
696 static bool decode_wave(block_descr_type* bdescr)
BMNOEXCEPT;
697 bool decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT;
698 bool decode_bit_group(block_descr_type* bdescr,
725 bit_count_ = this->
valid();
733 this->bit_count_ = 1;
740 this->bit_count_ += this->
valid();
748 this->bit_count_ += this->
valid();
780 bv.set_allocator_pool(&pool);
785 bv_->set_allocator_pool(0);
792 if (!bv.get_allocator_pool())
796 bv_->set_allocator_pool(&pool);
826 : strat(s), glevel_len(glevels)
861 const Alloc& alloc = Alloc())
862 : blockman_(glevel_len, bv_size, alloc),
863 new_blocks_strat_(strat),
873 const Alloc& alloc = Alloc())
874 : blockman_(glevel_len, bv_size, alloc),
875 new_blocks_strat_(strat),
883 : blockman_(
bvect.blockman_),
884 new_blocks_strat_(
bvect.new_blocks_strat_),
894 : blockman_(
bvect.blockman_.glevel_len_,
bvect.blockman_.max_bits_,
bvect.blockman_.alloc_),
895 new_blocks_strat_(
bvect.new_blocks_strat_),
898 if (!
bvect.blockman_.is_init())
902 copy_range_no_check(
bvect, left, right);
919 blockman_.deinit_tree();
920 blockman_.copy(
bvect.blockman_);
932 blockman_.move_from(
bvect.blockman_);
934 new_blocks_strat_ =
bvect.new_blocks_strat_;
942 new_blocks_strat_(
BM_BIT),
946 std::initializer_list<size_type>::const_iterator it_start = il.begin();
947 std::initializer_list<size_type>::const_iterator it_end = il.end();
948 for (; it_start < it_end; ++it_start)
1017 {
return blockman_.get_allocator(); }
1023 { blockman_.get_allocator().set_pool(pool_ptr); }
1028 {
return blockman_.get_allocator().get_pool(); }
1219 void clear(
bool free_mem =
false) { blockman_.set_all_zero(free_mem); }
1541 {
return (++prev ==
bm::id_max) ? 0 : check_or_next(prev); }
1552 return (++prev ==
bm::id_max) ? 0 : check_or_next_extract(prev);
1819 {
return new_blocks_strat_; }
1835 statistics* stat = 0);
1925 {
return blockman_; }
1933 {
return blockman_; }
1979 bool and_bit_no_check(
size_type n,
bool val);
1981 bool set_bit_conditional_impl(
size_type n,
bool val,
bool condition);
1994 bool combine_operation_block_or(
unsigned i,
1998 bool combine_operation_block_xor(
unsigned i,
2002 bool combine_operation_block_and(
unsigned i,
2006 bool combine_operation_block_sub(
unsigned i,
2012 void combine_operation_block_or(
unsigned i,
2017 void combine_operation_block_xor(
unsigned i,
2022 void combine_operation_block_and(
unsigned i,
2027 void combine_operation_block_sub(
unsigned i,
2046 void clear_range_no_check(
size_type left,
2052 void keep_range_no_check(
size_type left,
2061 unsigned nbit_right,
2077template<
class Alloc>
2088template<
class Alloc>
2099template<
class Alloc>
2110template<
class Alloc>
2122template<
typename Alloc>
2125 if (!blockman_.is_init())
2126 blockman_.init_tree();
2131template<
typename Alloc>
2136 blockman_.move_from(
bvect.blockman_);
2137 size_ =
bvect.size_;
2138 new_blocks_strat_ =
bvect.new_blocks_strat_;
2144template<
class Alloc>
2147 if (!blockman_.is_init())
2153 keep_range_no_check(left, right);
2157template<
typename Alloc>
2162 if (!blockman_.is_init())
2184 set_range_no_check(left, right);
2186 clear_range_no_check(left, right);
2193template<
typename Alloc>
2196 if (!blockman_.is_init())
2199 word_t*** blk_root = blockman_.top_blocks_root();
2203 unsigned top_blocks = blockman_.top_block_size();
2204 for (
unsigned i = 0; i < top_blocks; ++i)
2213 blk_blk = blk_root[i];
2227 cnt += blockman_.block_bitcount(blk_blk[j]);
2229 cnt += blockman_.block_bitcount(blk_blk[j+1]);
2231 cnt += blockman_.block_bitcount(blk_blk[j+2]);
2233 cnt += blockman_.block_bitcount(blk_blk[j+3]);
2243template<
typename Alloc>
2246 word_t*** blk_root = blockman_.top_blocks_root();
2249 typename blocks_manager_type::block_any_func func(blockman_);
2255template<
typename Alloc>
2258 if (size_ == new_size)
return;
2259 if (size_ < new_size)
2261 if (!blockman_.is_init())
2262 blockman_.init_tree();
2264 blockman_.reserve(new_size);
2276template<
typename Alloc>
2283 if (found && last >= size_)
2289template<
typename Alloc>
2304 if (!blockman_.is_init())
2317 unsigned real_top_blocks = blockman_.find_real_top_blocks();
2318 unsigned max_top_blocks = blockman_.find_max_top_blocks();
2321 rs_idx->set_total(nb + 1);
2322 rs_idx->resize(nb + 1);
2323 rs_idx->resize_effective_super_blocks(real_top_blocks);
2327 BM_ASSERT(max_top_blocks <= blockman_.top_block_size());
2328 bm::word_t*** blk_root = blockman_.top_blocks_root();
2329 for (
unsigned i = 0; i < max_top_blocks; ++i)
2334 rs_idx->set_null_super_block(i);
2339 rs_idx->set_full_super_block(i);
2344 bv_blocks->set_range_no_check(nb_from, nb_to);
2355 bcount[j] = sub_count[j] = 0;
2359 unsigned cnt = blockman_.block_bitcount(block);
2361 if (bv_blocks && cnt)
2367 unsigned local_first, local_second;
2389 BM_ASSERT(cnt >= local_first + local_second);
2390 sub_count[j] = local_first | (local_second << 16);
2394 rs_idx->register_super_block(i, &bcount[0], &sub_count[0]);
2403template<
typename Alloc>
2407 bm::word_t*** blk_root = blockman_.top_blocks_root();
2410 typename blocks_manager_type::block_count_arr_func func(blockman_, &(arr[0]));
2412 return func.last_block();
2417template<
typename Alloc>
2421 unsigned nbit_right,
2425 unsigned sub_range = rs_idx.find_sub_range(nbit_right);
2427 unsigned sub_cnt = rs_idx.sub_count(nb);
2428 unsigned first = sub_cnt & 0xFFFF;
2429 unsigned second = sub_cnt >> 16;
2473 unsigned bc_second_range =
first + second;
2477 c = bc_second_range;
2485 c = bc_second_range - c;
2491 unsigned bc_second_range =
first + second;
2499 c += bc_second_range;
2506 c = rs_idx.count(nb);
2531template<
typename Alloc>
2537 if (!blockman_.is_init())
2546 if (nblock_right >= rs_idx.get_total())
2548 cnt = rs_idx.count();
2551 cnt = nblock_right ? rs_idx.rcount(nblock_right-1) : 0;
2555 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2571 cnt += nbit_right + 1;
2576 this->block_count_to(block, nblock_right, nbit_right, rs_idx);
2585template<
typename Alloc>
2591 if (!blockman_.is_init())
2599 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2618 cnt = nbit_right + 1;
2623 unsigned w = block[nword];
2627 cnt = block_count_to(block, nblock_right, nbit_right, rs_idx);
2636 cnt += nblock_right ? rs_idx.rcount(nblock_right - 1) : 0;
2642template<
typename Alloc>
2648 if (!blockman_.is_init())
2654 size_type cnt = nblock_right ? rs_idx.rcount(nblock_right - 1) : 0;
2658 const bm::word_t* block = blockman_.get_block_ptr(i, j);
2675 cnt += block_count_to(block, nblock_right, nbit_right, rs_idx);
2687template<
typename Alloc>
2697 if (!blockman_.is_init())
2708 const bm::word_t* block = blockman_.get_block(i0, j0);
2718 typename blocks_manager_type::block_count_func func(blockman_);
2741 cnt += func.count();
2742 if (nblock_left == nblock_right)
2750 word_t*** blk_root = blockman_.top_blocks_root();
2754 nblock_left+1, nblock_right-1, func);
2755 cnt += func.count();
2759 block = blockman_.get_block(i0, j0);
2780template<
typename Alloc>
2784 if (!blockman_.is_init())
2801 const bm::word_t* block = blockman_.get_block(i0, j0);
2803 if (nblock_left == nblock_right)
2823 block = blockman_.get_block(i0, j0);
2833 if (nblock_left <= nblock_right)
2835 unsigned i_from, j_from, i_to, j_to;
2839 bm::word_t*** blk_root = blockman_.top_blocks_root();
2841 for (
unsigned i = i_from; i <= i_to; ++i)
2849 unsigned j = (i == i_from) ? j_from : 0;
2856 }
while (++j < j_limit);
2864template<
typename Alloc>
2869 if (!blockman_.is_init())
2884 const bm::word_t* block = blockman_.get_block(i0, j0);
2886 if (nblock_left == nblock_right)
2906 block = blockman_.get_block(i0, j0);
2916 if (nblock_left <= nblock_right)
2918 unsigned i_from, j_from, i_to, j_to;
2922 bm::word_t*** blk_root = blockman_.top_blocks_root();
2925 if (i_from >= top_size)
2927 if (i_to >= top_size)
2929 i_to = unsigned(top_size-1);
2934 for (
unsigned i = i_from; i <= i_to; ++i)
2942 unsigned j = (i == i_from) ? j_from : 0;
2949 }
while (++j < j_limit);
2957template<
typename Alloc>
2971 return this->
test(left);
2975 cnt_l = this->
count_to(left-1, rs_idx);
2979 cnt_r = this->
count_to(right, rs_idx);
2981 return cnt_r - cnt_l;
2986template<
typename Alloc>
2993 bm::word_t*** blk_root = blockman_.top_blocks_root();
2994 for (
unsigned i = 0; i < top_blocks; ++i)
3015 blockman_.set_block_ptr(i, j, 0);
3036template<
typename Alloc>
3046 const bm::word_t* block = blockman_.get_block_ptr(i, j);
3061 unsigned w = block[nword];
3070template<
typename Alloc>
3075 if (!blockman_.is_init())
3082 temp_block = blockman_.check_allocate_tempblock();
3092 blockman_.optimize_tree(temp_block, opt_mode, stat);
3097 if (!safe_inc) safe_inc = 256;
3100 stat->
memory_used += (unsigned)(
sizeof(*
this) -
sizeof(blockman_));
3102 unsigned top_size = blockman_.top_block_size();
3103 size_t blocks_mem =
sizeof(blockman_);
3104 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3111 blockman_.free_temp_block();
3116template<
typename Alloc>
3120 if (!blockman_.is_init())
3130 ::memcpy(opt_glen, st.gap_levels, bm::gap_levels * sizeof(*opt_glen));
3133 st.gap_length + st.gap_blocks,
3142template<typename Alloc>
3145 if (blockman_.is_init())
3147 word_t*** blk_root = blockman_.top_blocks_root();
3149 blocks_manager_type::gap_level_func gl_func(blockman_, glevel_len);
3153 blockman_.set_glen(glevel_len);
3158template<
typename Alloc>
3162 unsigned top_blocks = blockman_.top_block_size();
3163 unsigned bvect_top_blocks = bv.blockman_.top_block_size();
3165 if (bvect_top_blocks > top_blocks) top_blocks = bvect_top_blocks;
3167 for (
unsigned i = 0; i < top_blocks; ++i)
3169 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3170 const bm::word_t*
const* arg_blk_blk = bv.blockman_.get_topblock(i);
3172 if (blk_blk == arg_blk_blk)
3182 arg_blk = arg_blk_blk ? arg_blk_blk[j] : 0;
3190 blk = blk_blk ? blk_blk[j] : 0;
3194 if (blk == arg_blk)
continue;
3199 if (!blk || !arg_blk)
3276template<
typename Alloc>
3281 unsigned top_blocks = blockman_.top_block_size();
3282 bm::word_t*** top_root = blockman_.top_blocks_root();
3284 if (!top_blocks || !top_root)
3289 unsigned i_to, j_to;
3291 unsigned bvect_top_blocks =
bvect.blockman_.top_block_size();
3292 if (!bvect_top_blocks || !arg_top_root)
3294 bool f = this->
find(pos);
3297 if (pos > search_to)
3303 if (bvect_top_blocks > top_blocks)
3304 top_blocks = bvect_top_blocks;
3308 if (i_to < top_blocks)
3309 top_blocks = i_to+1;
3311 for (
unsigned i = 0; i < top_blocks; ++i)
3313 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
3314 const bm::word_t*
const* arg_blk_blk =
bvect.blockman_.get_topblock(i);
3316 if (blk_blk == arg_blk_blk)
3360 if (pos > search_to)
3380template<
typename Alloc>
3385 blockman_.swap(
bvect.blockman_);
3392template<
typename Alloc>
3399 ::memcpy(st->gap_levels,
3402 st->max_serialize_mem = unsigned(
sizeof(
bm::id_t) * 4);
3403 unsigned top_size = blockman_.top_block_size();
3405 size_t blocks_mem =
sizeof(blockman_);
3408 blocks_mem +=
sizeof(
bm::word_t**) * top_size;
3409 bm::word_t*** blk_root = blockman_.top_blocks_root();
3413 for (
unsigned i = 0; i < top_size; ++i)
3415 const bm::word_t*
const* blk_blk = blk_root[i];
3422 blk_blk = blk_root[i];
3429 st->ptr_sub_blocks++;
3440 st->add_gap_block(cap, len);
3443 st->add_bit_block();
3448 size_t full_null_size = blockman_.calc_serialization_null_full();
3449 st->max_serialize_mem += full_null_size;
3453 size_t safe_inc = st->max_serialize_mem / 10;
3454 if (!safe_inc) safe_inc = 256;
3455 st->max_serialize_mem += safe_inc;
3458 st->memory_used += unsigned(
sizeof(*
this) -
sizeof(blockman_));
3460 st->memory_used += blocks_mem;
3467template<
class Alloc>
3481 blockman_.check_allocate_block(nblock,
3490 this->gap_block_set_no_ret(
BMGAP_PTR(blk), val, nblock, nbit);
3496 blk[nword] |= (1u << nbit);
3502template<
class Alloc>
3506 if (!ids || !ids_size)
3508 if (!blockman_.is_init())
3509 blockman_.init_tree();
3511 import(ids, ids_size, so);
3517template<
class Alloc>
3521 if (!ids || !ids_size || !blockman_.is_init())
3527 bv_tmp.
import(ids, ids_size, so);
3545template<
class Alloc>
3549 if (!ids || !ids_size || !blockman_.is_init())
3554 bv_tmp.
import(ids, ids_size, so);
3571template<
class Alloc>
3580template<
class Alloc>
3589template<
class Alloc>
3592 if (val == condition)
return false;
3598 return set_bit_conditional_impl(n, val, condition);
3603template<
class Alloc>
3609 if (!blockman_.is_init())
3610 blockman_.init_tree();
3611 return and_bit_no_check(n, val);
3616template<
class Alloc>
3621 if (!blockman_.is_init())
3622 blockman_.init_tree();
3633template<
class Alloc>
3649 if (nblock == nblock_end)
3679 }
while (start < size_in);
3684template<
class Alloc>
3693 blockman_.check_allocate_block(nblock, 1, 0, &block_type,
3700 blk = blockman_.deoptimize_block(nblock);
3715template<
class Alloc>
3723 blockman_.check_allocate_block(nblock,
3735 return gap_block_set(
BMGAP_PTR(blk), val, nblock, nbit);
3746 val = ~(*word & mask);
3752 val = ~(*word & mask);
3762template<
class Alloc>
3767 unsigned is_set, new_len, old_len;
3770 if (old_len < new_len)
3772 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
3773 if (new_len > threshold)
3774 blockman_.extend_gap_block(nblock, gap_blk);
3781template<
class Alloc>
3782void bvector<Alloc>::gap_block_set_no_ret(
bm::gap_word_t* gap_blk,
3785 unsigned new_len, old_len;
3788 if (old_len < new_len)
3790 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
3791 if (new_len > threshold)
3792 blockman_.extend_gap_block(nblock, gap_blk);
3799template<
class Alloc>
3805 blockman_.check_allocate_block(nblock,
3816 this->gap_block_set(gap_blk, !is_set, nblock, nbit);
3825 is_set = ((*word) & mask);
3827 *word = (is_set) ? (*word & ~mask) : (*word | mask);
3834template<
class Alloc>
3843 blockman_.check_allocate_block(nblock,
3853 if (block_type == 1)
3858 if (old_val != condition)
3865 unsigned is_set = gap_block_set(gap_blk, val, nblock, nbit);
3877 bool is_set = ((*word) & mask) != 0;
3879 if (is_set != condition)
3903template<
class Alloc>
3904bool bvector<Alloc>::and_bit_no_check(
size_type n,
bool val)
3911 blockman_.check_allocate_block(nblock,
3921 if (block_type == 1)
3926 bool new_val = val & old_val;
3927 if (new_val != old_val)
3929 unsigned is_set = gap_block_set(gap_blk, val, nblock, nbit);
3941 bool is_set = ((*word) & mask) != 0;
3943 bool new_val = is_set & val;
3962template<
class Alloc>
3971 pos = check_or_next(from);
3977template<
class Alloc>
3982 unsigned top_blocks = blockman_.top_block_size();
3985 for (
unsigned i = top_blocks-1;
true; --i)
3987 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4016 pos = base_idx + block_pos;
4034template<
class Alloc>
4039 unsigned top_blocks = blockman_.top_block_size();
4040 for (
unsigned i = 0; i < top_blocks; ++i)
4042 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4056 found =
true; block_pos = 0;
4068 pos = base_idx + block_pos;
4080template<
class Alloc>
4084 bool found =
find(in_first);
4093 in_first = in_last = 0;
4100template<
class Alloc>
4109 if (!rank_in || !blockman_.is_init())
4114 unsigned bit_pos = 0;
4119 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
4124 unsigned cnt = blockman_.block_bitcount(block);
4153template<
class Alloc>
4164 !blockman_.is_init() ||
4165 (rs_idx.count() < rank_in))
4173 nb = rs_idx.find(rank_in);
4174 BM_ASSERT(rs_idx.rcount(nb) >= rank_in);
4176 rank_in -= rs_idx.rcount(nb-1);
4180 unsigned bit_pos = 0;
4182 for (; nb < rs_idx.get_total(); ++nb)
4185 const bm::word_t* block = blockman_.get_block(nb, &no_more_blocks);
4190 unsigned block_bc = rs_idx.count(nb);
4191 if (rank_in <= block_bc)
4193 nbit = rs_idx.select_sub_range(nb, rank_in);
4199 rank_in -= block_bc;
4224template<
class Alloc>
4231 !blockman_.is_init() ||
4232 (rs_idx.count() < rank_in))
4238 bool found = rs_idx.find(&rank_in, &nb, &sub_range_from);
4244 const bm::word_t* block = blockman_.get_block_ptr(i, j);
4251 unsigned bit_pos = 0;
4260template<
class Alloc>
4264 if (!blockman_.is_init())
4271 const bm::word_t* block = blockman_.get_block_ptr(i, j);
4301 for (; i < top_blocks; ++i)
4303 const bm::word_t*
const* blk_blk = blockman_.get_topblock(i);
4318 found =
true; block_pos = 0;
4330 prev = base_idx + block_pos;
4344template<
class Alloc>
4346bvector<Alloc>::check_or_next_extract(
size_type prev)
4348 if (!blockman_.is_init())
4351 size_type pos = this->check_or_next(prev);
4359template<
class Alloc>
4367template<
class Alloc>
4370 bool b = this->
test(0);
4377template<
class Alloc>
4384 if (!blockman_.is_init())
4403 bm::word_t* block = blockman_.get_block_ptr(i, j);
4405 if (!block && !value)
4410 block = blockman_.check_allocate_block(nb,
BM_BIT);
4412 block = blockman_.deoptimize_block(nb);
4425 unsigned top_blocks = blockman_.top_block_size();
4426 bm::word_t*** blk_root = blockman_.top_blocks_root();
4432 if (i >= top_blocks)
4439 blk_blk = blk_root[i];
4450 blockman_.check_allocate_block(nblock, 0, 0, &block_type,
false);
4451 block[0] |= carry_over;
4454 blk_root = blockman_.top_blocks_root();
4455 blk_blk = blk_root[i];
4456 top_blocks = blockman_.top_block_size();
4467 blk_blk = blockman_.check_alloc_top_subblock(i);
4481 carry_over = 0; block = 0;
4486 if (0 != (block = blk_blk[j]))
4501 block = blockman_.deoptimize_block(nblock);
4502 block[0] <<= (carry_over = 1);
4511 block = blockman_.deoptimize_block(nblock);
4515 unsigned new_block_len;
4519 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4520 if (new_block_len > threshold)
4522 blockman_.extend_gap_block(nblock, gap_blk);
4538 blockman_.zero_block(nblock);
4542 blockman_.zero_block(nblock);
4554template<
class Alloc>
4559 if (!blockman_.is_init())
4571 bm::word_t* block = blockman_.get_block_ptr(i, j);
4572 bool carry_over = test_first_block_bit(nb+1);
4577 block = blockman_.check_allocate_block(nb,
BM_BIT);
4584 block = blockman_.deoptimize_block(nb);
4596 unsigned top_blocks = blockman_.top_block_size();
4597 bm::word_t*** blk_root = blockman_.top_blocks_root();
4603 if (i >= top_blocks)
4606 blk_blk = blk_root[i];
4618 bool carry_over = 0;
4622 carry_over = this->
test(co_idx);
4626 blk_blk = blockman_.check_alloc_top_subblock(i);
4630 blk_blk = blockman_.check_alloc_top_subblock(i);
4638 bool carry_over = 0;
4643 bool no_blocks = (j == 0);
4646 if (0 != (block = blk_blk[j]))
4656 blockman_.zero_block(i, j-1);
4664 carry_over = test_first_block_bit(nblock+1);
4669 block = blockman_.deoptimize_block(nblock);
4677 carry_over = test_first_block_bit(nblock+1);
4678 unsigned new_block_len;
4682 unsigned threshold =
bm::gap_limit(gap_blk, blockman_.glen());
4683 if (new_block_len > threshold)
4684 blockman_.extend_gap_block(nblock, gap_blk);
4688 blockman_.zero_block(i, j);
4696 blockman_.zero_block(i, j);
4699 if (carry_over && nblock)
4712template<
class Alloc>
4723template<
class Alloc>
4726 if (!bv.blockman_.is_init())
4732 unsigned top_blocks = blockman_.top_block_size();
4733 if (size_ < bv.size_)
4737 unsigned arg_top_blocks = bv.blockman_.top_block_size();
4738 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
4741 bm::word_t*** blk_root = blockman_.top_blocks_root();
4742 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
4744 for (
unsigned i = 0; i < top_blocks; ++i)
4747 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
4748 if (blk_blk == blk_blk_arg || !blk_blk_arg)
4750 if (!blk_blk && blk_blk_arg)
4754 blk_root[i] = blk_root_arg[i];
4755 blk_root_arg[i] = 0;
4762 blockman_.deallocate_top_subblock(i);
4772 blk = blk_blk[j]; arg_blk = blk_blk_arg[j];
4775 if (!blk && arg_blk)
4777 blockman_.set_block_ptr(i, j, arg_blk);
4778 bv.blockman_.set_block_ptr(i, j, 0);
4782 combine_operation_block_or(i, j, blk, arg_blk);
4791template<
class Alloc>
4799 bm::word_t* blk = blockman_.get_block_ptr(i0, j0);
4807template<
class Alloc>
4813 if (blockman_.is_init())
4814 blockman_.deinit_tree();
4835 unsigned top_blocks1 = bman1.top_block_size();
4836 unsigned top_blocks2 = bman2.top_block_size();
4837 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
4838 top_blocks = blockman_.reserve_top_blocks(top_blocks);
4841 if (size_ < bv2.size_)
4844 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
4850 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
4857 for (
unsigned i = 0; i < top_blocks; ++i)
4859 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
4860 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
4862 if (blk_blk_arg1 == blk_blk_arg2)
4865 bm::word_t*** blk_root = blockman_.top_blocks_root();
4866 blk_root[i] = blk_blk_arg1;
4872 bm::word_t*** blk_root = blockman_.top_blocks_root();
4876 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
4877 bool any_blocks =
false;
4881 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
4882 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
4883 if (arg_blk1 == arg_blk2 && !arg_blk1)
4885 bool need_opt = combine_operation_block_or(i, j, arg_blk1, arg_blk2);
4887 blockman_.optimize_bit_block(i, j);
4888 any_blocks |= bool(blk_blk[j]);
4892 blockman_.free_top_subblock(i);
4897 blockman_.free_temp_block();
4904template<
class Alloc>
4910 if (blockman_.is_init())
4911 blockman_.deinit_tree();
4928 if (!bman1.is_init())
4934 if (!bman2.is_init())
4940 unsigned top_blocks1 = bman1.top_block_size();
4941 unsigned top_blocks2 = bman2.top_block_size();
4942 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
4943 top_blocks = blockman_.reserve_top_blocks(top_blocks);
4946 if (size_ < bv2.size_)
4949 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
4950 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
4952 for (
unsigned i = 0; i < top_blocks; ++i)
4954 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
4955 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
4957 if (blk_blk_arg1 == blk_blk_arg2)
4962 blockman_.deallocate_top_subblock(i);
4970 bm::word_t*** blk_root= blockman_.top_blocks_root();
4983 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
4984 bool any_blocks =
false;
4989 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
4990 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
4992 if ((arg_blk1 == arg_blk2) &&
4996 bool need_opt = combine_operation_block_xor(i, j, arg_blk1, arg_blk2);
4998 blockman_.optimize_bit_block(i, j);
4999 any_blocks |= bool(blk_blk[j]);
5003 blockman_.free_top_subblock(i);
5008 blockman_.free_temp_block();
5015template<
class Alloc>
5036 if (blockman_.is_init())
5037 blockman_.deinit_tree();
5041 if (!bman1.is_init() || !bman2.is_init())
5044 unsigned top_blocks1 = bman1.top_block_size();
5045 unsigned top_blocks2 = bman2.top_block_size();
5046 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5047 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5050 if (size_ < bv2.size_)
5053 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5054 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5056 for (
unsigned i = 0; i < top_blocks; ++i)
5058 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5059 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5061 if (blk_blk_arg1 == blk_blk_arg2)
5067 bm::word_t*** blk_root = blockman_.top_blocks_root();
5077 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5078 bool any_blocks =
false;
5083 arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5084 arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5086 if ((arg_blk1 == arg_blk2) && !arg_blk1)
5089 bool need_opt = combine_operation_block_and(i, j, arg_blk1, arg_blk2);
5091 blockman_.optimize_bit_block(i, j);
5092 any_blocks |= bool(blk_blk[j]);
5096 blockman_.free_top_subblock(i);
5101 blockman_.free_temp_block();
5109template<
class Alloc>
5115 if (blockman_.is_init())
5116 blockman_.deinit_tree();
5134 if (!bman1.is_init())
5138 if (!bman2.is_init())
5144 unsigned top_blocks1 = bman1.top_block_size();
5145 unsigned top_blocks2 = bman2.top_block_size();
5146 unsigned top_blocks = (top_blocks2 > top_blocks1) ? top_blocks2 : top_blocks1;
5147 top_blocks = blockman_.reserve_top_blocks(top_blocks);
5150 if (size_ < bv2.size_)
5153 bm::word_t*** blk_root_arg1 = bv1.blockman_.top_blocks_root();
5154 bm::word_t*** blk_root_arg2 = bv2.blockman_.top_blocks_root();
5156 for (
unsigned i = 0; i < top_blocks; ++i)
5158 bm::word_t** blk_blk_arg1 = (i < top_blocks1) ? blk_root_arg1[i] : 0;
5159 bm::word_t** blk_blk_arg2 = (i < top_blocks2) ? blk_root_arg2[i] : 0;
5161 if (blk_blk_arg1 == blk_blk_arg2)
5168 bm::word_t** blk_blk = blockman_.alloc_top_subblock(i);
5169 bool any_blocks =
false;
5173 const bm::word_t* arg_blk1 = blk_blk_arg1 ? blk_blk_arg1[j] : 0;
5174 const bm::word_t* arg_blk2 = blk_blk_arg2 ? blk_blk_arg2[j] : 0;
5175 if ((arg_blk1 == arg_blk2) && !arg_blk1)
5178 bool need_opt = combine_operation_block_sub(i, j, arg_blk1, arg_blk2);
5180 blockman_.optimize_bit_block(i, j);
5181 any_blocks |= bool(blk_blk[j]);
5185 blockman_.free_top_subblock(i);
5190 blockman_.free_temp_block();
5198#define BM_OR_OP(x) \
5200 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
5201 if (blk != arg_blk) \
5202 combine_operation_block_or(i, j+x, blk, arg_blk); \
5205template<
class Alloc>
5208 if (!bv.blockman_.is_init())
5211 unsigned top_blocks = blockman_.top_block_size();
5212 if (size_ < bv.size_)
5215 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5216 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5218 bm::word_t*** blk_root = blockman_.top_blocks_root();
5219 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5221 for (
unsigned i = 0; i < top_blocks; ++i)
5224 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5225 if (blk_blk == blk_blk_arg || !blk_blk_arg)
5231 blockman_.deallocate_top_subblock(i);
5236 blk_blk = blockman_.alloc_top_subblock(i);
5243 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5244 if (!avx2_test_all_eq_wave2(blk_blk + j, blk_blk_arg + j))
5252 #elif defined(BM64_SSE4)
5271#define BM_XOR_OP(x) \
5273 blk = blk_blk[j+x]; arg_blk = blk_blk_arg[j+x]; \
5274 combine_operation_block_xor(i, j+x, blk, arg_blk); \
5277template<
class Alloc>
5280 if (!bv.blockman_.is_init())
5282 if (!blockman_.is_init())
5288 unsigned top_blocks = blockman_.top_block_size();
5289 if (size_ < bv.size_)
5293 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5294 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5296 bm::word_t*** blk_root = blockman_.top_blocks_root();
5297 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5299 for (
unsigned i = 0; i < top_blocks; ++i)
5301 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5305 if (blk_blk == blk_blk_arg)
5315 blk_blk = blockman_.check_alloc_top_subblock(i);
5328 blk_blk = blockman_.alloc_top_subblock(i);
5335 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5336 if (!avx2_test_all_zero_wave2(blk_blk + j, blk_blk_arg + j))
5344 #elif defined(BM64_SSE4)
5364#define BM_AND_OP(x) if (0 != (blk = blk_blk[j+x])) \
5366 if (0 != (arg_blk = blk_blk_arg[j+x])) \
5368 combine_operation_block_and(i, j+x, blk, arg_blk); \
5369 if (opt_mode == opt_compress) \
5370 blockman_.optimize_bit_block(i, j+x); \
5373 blockman_.zero_block(i, j+x); \
5376template<
class Alloc>
5380 if (!blockman_.is_init())
5382 if (!bv.blockman_.is_init())
5388 unsigned top_blocks = blockman_.top_block_size();
5389 if (size_ < bv.size_)
5392 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5393 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5396 bm::word_t*** blk_root = blockman_.top_blocks_root();
5397 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5399 for (
unsigned i = 0; i < top_blocks; ++i)
5404 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5414 blockman_.zero_block(i, j);
5415 blockman_.deallocate_top_subblock(i);
5423 blk_blk = blockman_.check_alloc_top_subblock(i);
5430 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5431 if (!avx2_test_all_zero_wave(blk_blk + j))
5439 #elif defined(BM64_SSE4)
5459#define BM_SUB_OP(x) \
5460 if ((0 != (blk = blk_blk[j+x])) && (0 != (arg_blk = blk_blk_arg[j+x]))) \
5461 combine_operation_block_sub(i, j+x, blk, arg_blk);
5464template<
class Alloc>
5467 if (!blockman_.is_init() || !bv.blockman_.is_init())
5470 unsigned top_blocks = blockman_.top_block_size();
5471 if (size_ < bv.size_)
5474 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5475 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5477 bm::word_t*** blk_root = blockman_.top_blocks_root();
5478 bm::word_t*** blk_root_arg = bv.blockman_.top_blocks_root();
5480 for (
unsigned i = 0; i < top_blocks; ++i)
5483 bm::word_t** blk_blk_arg = (i < arg_top_blocks) ? blk_root_arg[i] : 0;
5484 if (!blk_blk || !blk_blk_arg)
5489 blockman_.deallocate_top_subblock(i);
5493 blk_blk = blockman_.check_alloc_top_subblock(i);
5500 #if defined(BM64_AVX2) || defined(BM64_AVX512)
5501 if (!avx2_test_all_zero_wave(blk_blk + j))
5509 #elif defined(BM64_SSE4)
5528template<
class Alloc>
5533 if (!blockman_.is_init())
5537 blockman_.init_tree();
5540 unsigned top_blocks = blockman_.top_block_size();
5541 unsigned arg_top_blocks = bv.blockman_.top_block_size();
5543 if (arg_top_blocks > top_blocks)
5544 top_blocks = blockman_.reserve_top_blocks(arg_top_blocks);
5546 if (size_ < bv.size_)
5550 blockman_.reserve_top_blocks(arg_top_blocks);
5551 top_blocks = blockman_.top_block_size();
5554 if (size_ > bv.size_)
5559 if (arg_top_blocks < top_blocks)
5562 top_blocks = arg_top_blocks;
5567 bm::word_t*** blk_root = blockman_.top_blocks_root();
5568 unsigned block_idx = 0;
5572 top_blocks = blockman_.top_block_size();
5573 if (top_blocks < bv.blockman_.top_block_size())
5577 top_blocks = bv.blockman_.top_block_size();
5581 for (i = 0; i < top_blocks; ++i)
5591 const bm::word_t*
const* bvbb = bv.blockman_.get_topblock(i);
5601 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5619 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5626 blockman_.zero_block(i, j);
5637 const bm::word_t* arg_blk = bv.blockman_.get_block(i, j);
5650template<
class Alloc>
5659 blockman_.clone_assign_block(i, j, arg_blk2);
5664 blockman_.clone_assign_block(i, j, arg_blk1);
5676 if (is_gap1 | is_gap2)
5678 if (is_gap1 & is_gap2)
5684 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5692 arg_block = arg_blk2;
5697 arg_block = arg_blk1;
5700 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5708 bm::word_t* block = blockman_.borrow_tempblock();
5709 blockman_.set_block_ptr(i, j, block);
5715 blockman_.return_tempblock(block);
5723template<
class Alloc>
5724bool bvector<Alloc>::combine_operation_block_xor(
unsigned i,
5733 blockman_.clone_assign_block(i, j, arg_blk2);
5738 blockman_.clone_assign_block(i, j, arg_blk1);
5744 blockman_.clone_assign_block(i, j, arg_blk2,
true);
5750 blockman_.clone_assign_block(i, j, arg_blk1,
true);
5757 if (is_gap1 | is_gap2)
5759 if (is_gap1 & is_gap2)
5765 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5773 arg_block = arg_blk2;
5778 arg_block = arg_blk1;
5781 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5789 bm::word_t* block = blockman_.borrow_tempblock();
5790 blockman_.set_block_ptr(i, j, block);
5795 blockman_.set_block_ptr(i, j, 0);
5796 blockman_.return_tempblock(block);
5805template<
class Alloc>
5806bool bvector<Alloc>::combine_operation_block_and(
unsigned i,
5813 if (!arg_blk1 || !arg_blk2)
5824 blockman_.clone_assign_block(i, j, arg_blk2);
5829 blockman_.clone_assign_block(i, j, arg_blk1);
5836 if (is_gap1 | is_gap2)
5838 if (is_gap1 & is_gap2)
5844 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5852 arg_block = arg_blk2;
5857 arg_block = arg_blk1;
5860 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_block);
5868 bm::word_t* block = blockman_.borrow_tempblock();
5869 blockman_.set_block_ptr(i, j, block);
5874 blockman_.set_block_ptr(i, j, 0);
5875 blockman_.return_tempblock(block);
5884template<
class Alloc>
5885bool bvector<Alloc>::combine_operation_block_sub(
unsigned i,
5896 blockman_.clone_assign_block(i, j, arg_blk1);
5907 if (is_gap1 | is_gap2)
5909 if (is_gap1 & is_gap2)
5915 blockman_.clone_gap_block(i, j, tmp_buf, res_len);
5921 bm::word_t* block = blockman_.borrow_tempblock();
5922 blockman_.set_block_ptr(i, j, block);
5927 blockman_.set_block_ptr(i, j, 0);
5928 blockman_.return_tempblock(block);
5934 bm::word_t* block = blockman_.clone_assign_block(i, j, arg_blk1);
5942 bm::word_t* block = blockman_.borrow_tempblock();
5943 blockman_.set_block_ptr(i, j, block);
5948 blockman_.set_block_ptr(i, j, 0);
5949 blockman_.return_tempblock(block);
5958template<
class Alloc>
5959void bvector<Alloc>::combine_operation_block_or(
5960 unsigned i,
unsigned j,
5970 blockman_.zero_block(i, j);
5984 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
5989 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
5993 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
5994 blockman_.set_block_ptr(i, j, new_blk);
6006 blk = blockman_.clone_gap_block(arg_gap, gap);
6007 blockman_.set_block(i, j, blk, gap);
6019 blk = blockman_.alloc_.alloc_bit_block();
6021 blockman_.set_block_ptr(i, j, blk);
6029 blockman_.get_allocator().free_bit_block(blk);
6037template<
class Alloc>
6038void bvector<Alloc>::combine_operation_block_xor(
6039 unsigned i,
unsigned j,
6055 blockman_.set_block_ptr(i, j, 0);
6071 blk = blockman_.get_allocator().alloc_bit_block();
6073 blockman_.set_block_ptr(i, j, blk);
6089 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6094 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6098 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
6099 blockman_.set_block_ptr(i, j, new_blk);
6111 blk = blockman_.clone_gap_block(arg_gap, gap);
6112 blockman_.set_block(i, j, blk, gap);
6123 blk = blockman_.alloc_.alloc_bit_block();
6125 blockman_.set_block_ptr(i, j, blk);
6132 blockman_.get_allocator().free_bit_block(blk);
6133 blockman_.set_block_ptr(i, j, 0);
6141template<
class Alloc>
6142void bvector<Alloc>::combine_operation_block_and(
6164 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6169 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6179 blockman_.get_allocator().free_bit_block(new_blk);
6186 blockman_.get_allocator().free_gap_block(gap_blk, blockman_.glen());
6187 blockman_.set_block_ptr(i, j, new_blk);
6198 blockman_.zero_block(i, j);
6206 bm::word_t* new_blk = blockman_.clone_gap_block(arg_gap, is_new_gap);
6210 blockman_.set_block_ptr(i, j, new_blk);
6218 blockman_.zero_block(i, j);
6228 bm::word_t* new_blk = blockman_.get_allocator().alloc_bit_block();
6230 blockman_.set_block_ptr(i, j, new_blk);
6237 blockman_.get_allocator().free_bit_block(blk);
6238 blockman_.set_block_ptr(i, j, 0);
6244template<
class Alloc>
6245void bvector<Alloc>::combine_operation_block_sub(
6252 blockman_.zero_block(i, j);
6271 BM_ASSERT(!(res == tmp_buf && res_len == 0));
6273 blockman_.assign_gap_check(i, j, res, ++res_len, blk, tmp_buf);
6277 blk = blockman_.convert_gap2bitset(i, j, gap_blk);
6289 blockman_.zero_block(i, j);
6304 if (!dst || !arg_blk)
6308 if (ret && ret == arg_blk)
6310 ret = blockman_.get_allocator().alloc_bit_block();
6316 blockman_.set_block_ptr(i, j, ret);
6318 blockman_.get_allocator().free_bit_block(dst);
6324template<
class Alloc>
6339 if (!blk && arg_gap)
6341 blk = blockman_.clone_gap_block(
BMGAP_PTR(arg_blk), gap);
6342 blockman_.set_block(nb, blk, gap);
6361 BM_ASSERT(!(res == tmp_buf && res_len == 0));
6366 blockman_.zero_block(nb);
6368 blockman_.assign_gap(nb, res, ++res_len, blk, tmp_buf);
6381 blockman_.zero_block(nb);
6390 blk = blockman_.convert_gap2bitset(nb, gap_blk);
6427 if (dst == 0 && arg_blk == 0)
6439 ret = blockman_.get_allocator().alloc_bit_block();
6461 }
while (wrd_ptr < wrd_end);
6471 ret = blockman_.get_allocator().alloc_bit_block();
6478 if (ret && ret == arg_blk)
6480 ret = blockman_.get_allocator().alloc_bit_block();
6491 blockman_.set_block(nb, ret);
6494 blockman_.get_allocator().free_bit_block(dst);
6501template<
class Alloc>
6502void bvector<Alloc>::set_range_no_check(
size_type left,
6528 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6533 block = blockman_.get_block_ptr(i, j);
6540 if (nblock_left == nblock_right)
6552 blockman_.set_all_set(nb, nb_to-1);
6555 if (nb_to > nblock_right)
6559 block = blockman_.get_block_ptr(i, j);
6561 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6575template<
class Alloc>
6576void bvector<Alloc>::clear_range_no_check(
size_type left,
6603 bm::gap_init_range_block<gap_word_t>(tmp_gap_blk,
6608 block = blockman_.get_block_ptr(i, j);
6616 if (nblock_left == nblock_right)
6618 nb = nblock_left + 1;
6628 blockman_.set_all_zero(nb, nb_to - 1u);
6631 if (nb_to > nblock_right)
6635 block = blockman_.get_block_ptr(i, j);
6636 gap_init_range_block<gap_word_t>(tmp_gap_blk,
6652template<
class Alloc>
6657 if (!
bvect.blockman_.is_init())
6663 if (blockman_.is_init())
6665 blockman_.deinit_tree();
6670 copy_range_no_check(
bvect, left, right);
6675template<
class Alloc>
6683 clear_range_no_check(0, left - 1);
6689 if (found && (last > right))
6690 clear_range_no_check(right + 1, last);
6697template<
class Alloc>
6698void bvector<Alloc>::copy_range_no_check(
const bvector<Alloc>&
bvect,
6709 blockman_.copy(
bvect.blockman_, nblock_left, nblock_right);
6715 clear_range_no_check(from, left-1);
6721 if (found && (last > right))
6722 clear_range_no_check(right+1, last);
6729template<
class Alloc>
6733 throw std::bad_alloc();
6735 BM_THROW(BM_ERR_BADALLOC);
6743template<
class Alloc>
6749 if (this->block_type_)
6759 unsigned val = *(++(bdescr->
gap_.
ptr));
6760 this->position_ += val - prev;
6765 val = *(++(bdescr->
gap_.
ptr));
6773 unsigned short idx = ++(bdescr->
bit_.
idx);
6774 if (idx < bdescr->bit_.cnt)
6782 if (decode_bit_group(bdescr))
6786 if (search_in_blocks())
6796template<
class Alloc>
6802 return this->valid();
6807 switch (this->block_type_)
6812 unsigned short idx = ++(bdescr->
bit_.
idx);
6813 if (idx < bdescr->bit_.cnt)
6822 if (!decode_bit_group(bdescr,
rank))
6837 unsigned int val = *(++(bdescr->
gap_.
ptr));
6839 this->position_ += val - prev;
6844 val = *(++(bdescr->
gap_.
ptr));
6855 if (!search_in_blocks())
6862 return this->valid();
6869template<
class Alloc>
6875 return this->valid();
6878 size_type new_pos = this->bv_->check_or_next(pos);
6888 this->position_ = pos;
6891 this->bv_->get_blocks_manager();
6894 this->block_ = bman.get_block(i0, j0);
6898 this->block_type_ = (bool)
BM_IS_GAP(this->block_);
6903 if (this->block_type_)
6906 search_in_gapblock();
6908 if (this->position_ == pos)
6909 return this->valid();
6910 this->position_ = pos;
6917 bdescr->
gap_.
ptr = gptr + gpos;
6933 search_in_bitblock();
6934 return this->valid();
6941 bdescr->
bit_.
ptr = this->block_ + (nword - parity);
6947 nbit += 32 * parity;
6948 for (
unsigned i = 0; i < bdescr->
bit_.
cnt; ++i)
6951 return this->valid();
6956 return this->valid();
6961template<
class Alloc>
6967 if (!bman->is_init())
6973 bm::word_t*** blk_root = bman->top_blocks_root();
6974 this->block_idx_ = this->position_= 0;
6977 for (i = 0; i < bman->top_block_size(); ++i)
6992 this->block_ = blk_blk[j];
6993 if (this->block_ == 0)
7000 this->block_type_ = 1;
7001 if (search_in_gapblock())
7008 this->block_type_ = 0;
7009 if (search_in_bitblock())
7020template<
class Alloc>
7025 if (bdescr->bit_.cnt)
7027 bdescr->bit_.idx = 0;
7035template<
class Alloc>
7037bvector<Alloc>::enumerator::decode_bit_group(block_descr_type* bdescr)
BMNOEXCEPT
7040 for (; bdescr->bit_.ptr < block_end;)
7042 if (decode_wave(bdescr))
7044 bdescr->bit_.pos = this->position_;
7045 this->position_ += bdescr->bit_.bits[0];
7056template<
class Alloc>
7058bvector<Alloc>::enumerator::decode_bit_group(block_descr_type* bdescr,
7062 for (; bdescr->bit_.ptr < block_end;)
7075 if (decode_wave(bdescr))
7077 bdescr->bit_.pos = this->position_;
7078 this->position_ += bdescr->bit_.bits[0];
7090template<
class Alloc>
7091bool bvector<Alloc>::enumerator::search_in_bitblock()
BMNOEXCEPT
7095 block_descr_type* bdescr = &(this->bdescr_);
7096 bdescr->bit_.ptr = this->block_;
7097 return decode_bit_group(bdescr);
7102template<
class Alloc>
7103bool bvector<Alloc>::enumerator::search_in_gapblock()
BMNOEXCEPT
7107 block_descr_type* bdescr = &(this->bdescr_);
7108 bdescr->gap_.ptr =
BMGAP_PTR(this->block_);
7109 unsigned bitval = *(bdescr->gap_.ptr) & 1;
7111 ++(bdescr->gap_.ptr);
7115 unsigned val = *(bdescr->gap_.ptr);
7119 if (bdescr->gap_.ptr ==
first)
7121 bdescr->gap_.gap_len = (
gap_word_t)(val + 1);
7125 bdescr->gap_.gap_len =
7130 this->position_ += val + 1;
7134 ++(bdescr->gap_.ptr);
7141template<
class Alloc>
7142bool bvector<Alloc>::enumerator::search_in_blocks()
BMNOEXCEPT
7144 ++(this->block_idx_);
7148 bm::word_t*** blk_root = bman.top_blocks_root();
7149 for (; i < top_block_size; ++i)
7157 for (++i; i < top_block_size; ++i)
7164 this->block_idx_ = bn;
7165 this->position_ = pos;
7166 if ((i < top_block_size) && blk_root[i])
7177 this->block_ = blk_blk[j];
7178 if (this->block_ == 0)
7183 this->block_type_ =
BM_IS_GAP(this->block_);
7184 if (this->block_type_)
7186 if (search_in_gapblock())
7193 if (search_in_bitblock())
7209#pragma warning( pop )
#define BM_DECLARE_TEMP_BLOCK(x)
Default SIMD friendly allocator.
Constants, tables and typedefs.
#define IS_FULL_BLOCK(addr)
#define IS_VALID_ADDR(addr)
#define BMSET_PTRGAP(ptr)
#define BLOCK_ADDR_SAN(addr)
#define BM_ASSERT_THROW(x, xerrcode)
#define FULL_BLOCK_FAKE_ADDR
#define FULL_SUB_BLOCK_REAL_ADDR
#define FULL_BLOCK_REAL_ADDR
Bit manipulation primitives (internal)
SIMD target version definitions.
#define VECT_XOR_ARR_2_MASK(dst, src, src_end, mask)
pre-processor un-defines to avoid global space pollution (internal)
Algorithms for fast aggregation of a group of bit-vectors.
Output iterator iterator designed to set "ON" bits based on input sequence of integers.
bulk_insert_iterator(const insert_iterator &iit)
bulk_insert_iterator & operator=(bulk_insert_iterator &&ii) BMNOEXCEPT
size_type * buf_
bulk insert buffer
bvector_type * get_bvector() const BMNOEXCEPT
bm::sort_order sorted_
sort order hint
bulk_insert_iterator(const bulk_insert_iterator &iit)
bulk_insert_iterator(bulk_insert_iterator &&iit) BMNOEXCEPT
static size_type buf_size_max() BMNOEXCEPT
bulk_insert_iterator & operator++(int)
bvector_type::size_type size_type
size_type buf_size_
current buffer size
std::output_iterator_tag iterator_category
bvector_type * bvect_
target bvector
bulk_insert_iterator & operator++()
bulk_insert_iterator(bvector< Alloc > &bvect, bm::sort_order so=BM_UNKNOWN) BMNOEXCEPT
bulk_insert_iterator & operator=(const bulk_insert_iterator &ii)
bvector_type::size_type value_type
bulk_insert_iterator() BMNOEXCEPT
bm::bvector< Alloc > bvector_type
bulk_insert_iterator & operator=(size_type n)
bulk_insert_iterator & operator*()
Constant iterator designed to enumerate "ON" bits counted_enumerator keeps bitcount,...
counted_enumerator & operator=(const enumerator &en) BMNOEXCEPT
counted_enumerator(const enumerator &en) BMNOEXCEPT
size_type count() const BMNOEXCEPT
Number of bits ON starting from the .
counted_enumerator() BMNOEXCEPT
counted_enumerator operator++(int)
std::input_iterator_tag iterator_category
counted_enumerator & operator++() BMNOEXCEPT
Constant iterator designed to enumerate "ON" bits.
std::input_iterator_tag iterator_category
void go_first() BMNOEXCEPT
Position enumerator to the first available bit.
size_type value() const BMNOEXCEPT
Get current position (value)
size_type operator*() const BMNOEXCEPT
Get current position (value)
enumerator operator++(int) BMNOEXCEPT
Advance enumerator forward to the next available bit. Possibly do NOT use this operator it is slower ...
enumerator & operator++() BMNOEXCEPT
Advance enumerator forward to the next available bit.
bool go_to(size_type pos) BMNOEXCEPT
go to a specific position in the bit-vector (or next)
enumerator(const bvector< Alloc > *bv) BMNOEXCEPT
Construct enumerator associated with a vector. This construction creates unpositioned iterator with s...
bool skip(size_type rank) BMNOEXCEPT
Skip specified number of bits from enumeration.
bool go_up() BMNOEXCEPT
Advance enumerator to the next available bit.
bool skip_to_rank(size_type rank) BMNOEXCEPT
Skip to specified relative rank.
bool advance() BMNOEXCEPT
enumerator(const bvector< Alloc > *bv, size_type pos) BMNOEXCEPT
Construct enumerator for bit vector.
Output iterator iterator designed to set "ON" bits based on input sequence of integers (bit indeces).
insert_iterator & operator*()
insert_iterator & operator++(int)
bvector_type * get_bvector() const
insert_iterator(const insert_iterator &iit)
insert_iterator(bvector< Alloc > &bvect) BMNOEXCEPT
insert_iterator() BMNOEXCEPT
insert_iterator & operator++()
bm::bvector< Alloc > bvector_type
insert_iterator & operator=(const insert_iterator &ii)
insert_iterator & operator=(size_type n)
std::output_iterator_tag iterator_category
Base class for all iterators.
size_type position_
Bit position (bit idx)
iterator_base() BMNOEXCEPT
bool valid() const BMNOEXCEPT
Checks if iterator is still valid.
bool operator!=(const iterator_base &it) const BMNOEXCEPT
unsigned block_type_
Type of block. 0-Bit, 1-GAP.
bool compare_state(const iterator_base &ib) const BMNOEXCEPT
Compare FSMs for testing purposes.
union bm::bvector::iterator_base::block_descr bdescr_
void invalidate() BMNOEXCEPT
Turns iterator into an invalid state.
bool operator<(const iterator_base &it) const BMNOEXCEPT
bm::bvector< Alloc > * bv_
Pointer on parent bitvector.
bool operator==(const iterator_base &it) const BMNOEXCEPT
const bm::word_t * block_
Block pointer.(NULL-invalid)
bool operator>=(const iterator_base &it) const BMNOEXCEPT
bool operator>(const iterator_base &it) const BMNOEXCEPT
bool operator<=(const iterator_base &it) const BMNOEXCEPT
block_idx_type block_idx_
Block index.
void assign_if_not_set(allocator_pool_type &pool, bvector< Alloc > &bv) BMNOEXCEPT
check if vector has no assigned allocator and set one
mem_pool_guard(allocator_pool_type &pool, bvector< Alloc > &bv) BMNOEXCEPT
mem_pool_guard() BMNOEXCEPT
Class reference implements an object for bit assignment.
bool operator==(const reference &ref) const BMNOEXCEPT
bool operator~() const BMNOEXCEPT
const reference & operator|=(bool value) const
const reference & operator=(const reference &ref) const
const reference & operator^=(bool value) const
const reference & operator=(bool value) const BMNOEXCEPT
const reference & operator&=(bool value) const
reference(const reference &ref) BMNOEXCEPT
bool operator!() const BMNOEXCEPT
reference(bvector< Alloc > &bv, size_type position) BMNOEXCEPT
Bitvector Bit-vector container with runtime compression of bits.
bvector< Alloc > & reset()
Clears every bit in the bitvector.
void import_block(const size_type *ids, block_idx_type nblock, size_type start, size_type stop)
optmode
Optimization mode Every next level means additional checks (better compression vs time)
@ opt_compress
compress blocks when possible (GAP/prefix sum)
@ opt_free_01
Free unused 0 and 1 blocks.
@ opt_free_0
Free unused 0 blocks.
@ opt_none
no optimization
bool find(size_type &pos) const BMNOEXCEPT
Finds index of first 1 bit.
bvector & operator=(bvector< Alloc > &&bvect) BMNOEXCEPT
Move assignment operator.
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv, optmode opt_mode=opt_none)
2 operand logical AND
void operator&=(const bvector< Alloc > &bv)
void set_gap_levels(const gap_word_t *glevel_len)
Sets new GAP lengths table. All GAP blocks will be reallocated to match the new scheme.
bvector(strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, size_type bv_size=bm::id_max, const Alloc &alloc=Alloc())
Constructs bvector class.
bool test(size_type n) const BMNOEXCEPT
returns true if bit n is set and false is bit n is 0.
allocator_pool_type * get_allocator_pool() BMNOEXCEPT
Get curent allocator pool (if set)
bvector(bvector< Alloc > &&bvect) BMNOEXCEPT
Move constructor.
void merge(bm::bvector< Alloc > &bvect)
Merge/move content from another vector.
bool equal(const bvector< Alloc > &bvect) const BMNOEXCEPT
Equal comparison with an agr bit-vector.
bvector< Alloc > & invert()
Invert/NEG all bits It should be noted, invert is affected by size() if size is set - it only inverts...
bool set_bit_and(size_type n, bool val=true)
Sets bit n using bit AND with the provided value.
allocator_type::allocator_pool_type allocator_pool_type
size_type size() const BMNOEXCEPT
Returns bvector's capacity (number of bits it can store)
size_type count() const BMNOEXCEPT
population cout (count of ON bits)
void combine_operation_xor(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation XOR
bool clear_bit(size_type n)
Clears bit n.
bool operator[](size_type n) const BMNOEXCEPT
bool is_init() const BMNOEXCEPT
Return true if bvector is initialized at all.
static void throw_bad_alloc()
size_type get_next(size_type prev) const BMNOEXCEPT
Finds the number of the next bit ON.
bool insert(size_type n, bool value)
Insert bit into specified position All the vector content after insert position is shifted right.
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv)
2 operand logical OR
bvector< Alloc > operator~() const
bm::bvector< Alloc > & bit_and(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand AND : this := bv1 AND bv2
void sync_size()
Syncronize size if it got extended due to bulk import.
void clear(bool free_mem=false)
Clears every bit in the bitvector.
bool find_range(size_type &first, size_type &last) const BMNOEXCEPT
Finds dynamic range of bit-vector [first, last].
bvector< Alloc > & set(size_type n, bool val=true)
Sets bit n if val is true, clears bit n if val is false.
bool any_range(size_type left, size_type right) const BMNOEXCEPT
Returns true if any bits in the range are 1s (non-empty interval) Function uses closed interval [left...
void resize(size_type new_size)
Change size of the bvector.
reference operator[](size_type n)
void set_allocator_pool(allocator_pool_type *pool_ptr) BMNOEXCEPT
Set allocator pool for local (non-th readed) memory cyclic(lots of alloc-free ops) opertations.
bvector(const bvector< Alloc > &bvect)
Copy constructor.
strategy get_new_blocks_strat() const BMNOEXCEPT
Returns blocks allocation strategy.
void optimize(bm::word_t *temp_block=0, optmode opt_mode=opt_compress, statistics *stat=0)
Optimize memory bitvector's memory allocation.
void forget_count() BMNOEXCEPT
bm::bvector< Alloc > & bit_or(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand OR : this := bv1 OR bv2
size_type count_to_test(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
popcount in [0..right] range if test(right) == true
void set_new_blocks_strat(strategy strat)
Sets new blocks allocation strategy.
size_type rank(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns rank of specified bit position (same as count_to())
bvector(std::initializer_list< size_type > il)
Brace constructor.
bvector< Alloc > & flip()
Flips all bits.
bool any() const BMNOEXCEPT
Returns true if any bits in this bitset are set, and otherwise returns false.
bool set_bit(size_type n, bool val=true)
Sets bit n.
insert_iterator inserter()
blocks_manager< Alloc > blocks_manager_type
void keep_range(size_type left, size_type right)
Sets all bits to zero outside of the closed interval [left,right] Expected result: 00000....
const blocks_manager_type & get_blocks_manager() const BMNOEXCEPT
get access to memory manager (internal) Use only if you are BitMagic library
void operator-=(const bvector< Alloc > &bv)
bool is_all_one_range(size_type left, size_type right) const BMNOEXCEPT
Returns true if all bits in the range are 1s (saturated interval) Function uses closed interval [left...
bool select(size_type rank, size_type &pos, const rs_index_type &rs_idx) const BMNOEXCEPT
select bit-vector position for the specified rank(bitcount)
Alloc get_allocator() const
void clear_range(size_type left, size_type right)
Sets all bits to zero in the specified closed interval [left,right] Interval must be inside the bvect...
void combine_operation_and(const bm::bvector< Alloc > &bvect, optmode opt_mode)
perform a set-algebra operation AND
bool find_first_mismatch(const bvector< Alloc > &bvect, size_type &pos, size_type search_to=bm::id_max) const BMNOEXCEPT
Find index of first bit different between this and the agr vector.
enumerator first() const
Returns enumerator pointing on the first non-zero bit.
enumerator end() const
Returns enumerator pointing on the next bit after the last.
bvector< Alloc > & set()
Sets every bit in this bitset to 1.
bool shift_left()
Shift left by 1 bit, fill with zero return carry out.
void swap(bvector< Alloc > &bvect) BMNOEXCEPT
Exchanges content of bv and this bvector.
size_type count_range(size_type left, size_type right, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns count of 1 bits in the given range [left..right] Uses rank-select index to accelerate the sea...
size_type get_first() const BMNOEXCEPT
find first 1 bit in vector. Function may return 0 and this requires an extra check if bit 0 is actual...
bool find_rank(size_type rank, size_type from, size_type &pos) const BMNOEXCEPT
Find bit-vector position for the specified rank(bitcount)
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand SUB : this := bv1 MINUS bv2 SUBtraction is also known as AND NOT
void optimize_gap_size()
Optimize sizes of GAP blocks.
void init()
Explicit post-construction initialization.
bool shift_right()
Shift right by 1 bit, fill with zero return carry out.
bvector< Alloc > & set_range(size_type left, size_type right, bool value=true)
Sets all bits in the specified closed interval [left,right] Interval must be inside the bvector's siz...
void erase(size_type n)
Erase bit in the specified position All the vector content after erase position is shifted left.
size_type extract_next(size_type prev)
Finds the number of the next bit ON and sets it to 0.
void copy_range(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy all bits in the specified closed interval [left,right].
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv)
2 operand logical XOR
enumerator get_enumerator(size_type pos) const
Returns enumerator pointing on specified or the next available bit.
bm::bvector< Alloc > & bit_sub(const bm::bvector< Alloc > &bv)
2 operand logical SUB(AND NOT). Also known as MINUS.
size_type rank_corrected(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns rank corrceted by the requested border value (as -1)
void import(const size_type *ids, size_type ids_size, bm::sort_order sorted_idx)
Import integers (set bits).
bvector & operator=(const bvector< Alloc > &bvect)
Copy assignment operator.
void combine_operation_or(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation OR
blocks_manager_type::block_idx_type block_idx_type
void operator^=(const bvector< Alloc > &bv)
void operator|=(const bvector< Alloc > &bv)
bvector< Alloc > & flip(size_type n)
Flips bit n.
void clear(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
clear list of bits in this bitset
block_idx_type count_blocks(unsigned *arr) const BMNOEXCEPT
Computes bitcount values for all bvector blocks.
rs_index< allocator_type > rs_index_type
bvector(const bvector< Alloc > &bvect, size_type left, size_type right)
Copy constructor for range copy [left..right].
void combine_operation_sub(const bm::bvector< Alloc > &bvect)
perform a set-algebra operation MINUS (AND NOT)
int compare(const bvector< Alloc > &bvect) const BMNOEXCEPT
Lexicographical comparison with a bitvector.
blocks_manager_type & get_blocks_manager() BMNOEXCEPT
get access to memory manager (internal) Use only if you are BitMagic library
size_type count_to(size_type n, const rs_index_type &rs_idx) const BMNOEXCEPT
Returns count of 1 bits (population) in [0..right] range.
rs_index< allocator_type > blocks_count
bool none() const BMNOEXCEPT
Returns true if no bits are set, otherwise returns false.
bool set_bit_conditional(size_type n, bool val, bool condition)
Sets bit n only if current value equals the condition.
bool find_reverse(size_type &pos) const BMNOEXCEPT
Finds last index of 1 bit.
void build_rs_index(rs_index_type *rs_idx, bvector< Alloc > *bv_blocks=0) const
compute running total of all blocks in bit vector (rank-select index)
void combine_operation_with_block(block_idx_type nb, const bm::word_t *arg_blk, bool arg_gap, bm::operation opcode)
void clear_bit_no_check(size_type n)
Clears bit n without precondiion checks.
bm::bvector< Alloc > & bit_xor(const bm::bvector< Alloc > &bv1, const bm::bvector< Alloc > &bv2, typename bm::bvector< Alloc >::optmode opt_mode)
3-operand XOR : this := bv1 XOR bv2
bvector(size_type bv_size, strategy strat=BM_BIT, const gap_word_t *glevel_len=bm::gap_len_table< true >::_len, const Alloc &alloc=Alloc())
Constructs bvector class.
void combine_operation(const bm::bvector< Alloc > &bvect, bm::operation opcode)
perform a set-algebra operation by operation code
void keep(const size_type *ids, size_type ids_size, bm::sort_order so=bm::BM_UNKNOWN)
Keep list of bits in this bitset, others are cleared.
bool get_bit(size_type n) const BMNOEXCEPT
returns true if bit n is set and false is bit n is 0.
void calc_stat(struct bm::bvector< Alloc >::statistics *st) const BMNOEXCEPT
Calculates bitvector statistics.
size_type recalc_count() BMNOEXCEPT
void move_from(bvector< Alloc > &bvect) BMNOEXCEPT
Move bvector content from another bvector.
bool inc(size_type n)
Increment the specified element.
void set_bit_no_check(size_type n)
Set bit without checking preconditions (size, etc)
Deserializer, performs logical operations between bit-vector and serialized bit-vector.
Encoding utilities for serialization (internal)
BMFORCEINLINE bool sse42_test_all_zero_wave2(const void *ptr0, const void *ptr1)
check if 2 waves of pointers are all NULL
BMFORCEINLINE bool sse42_test_all_zero_wave(const void *ptr)
check if wave of pointers is all NULL
BMFORCEINLINE bool sse42_test_all_eq_wave2(const void *ptr0, const void *ptr1)
check if wave of 2 pointers are the same (null or FULL)
bm::id_t bit_block_calc_count_range(const bm::word_t *block, bm::word_t left, bm::word_t right) BMNOEXCEPT
unsigned bit_block_find(const bm::word_t *BMRESTRICT block, unsigned nbit, unsigned *BMRESTRICT pos) BMNOEXCEPT
Searches for the next 1 bit in the BIT block.
bool bit_block_or_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2) BMNOEXCEPT
2 way (target := source1 | source2) bitblock OR operation.
void bit_block_copy(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Bitblock copy operation.
bm::id64_t bit_block_sub_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bitblock SUB (AND NOT) operation (3 operand)
bm::id64_t bit_block_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock AND operation. Function does not analyse availability of source and destination blocks...
bm::word_t * bit_operation_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock XOR operation.
bool bit_block_shift_r1_unr(bm::word_t *BMRESTRICT block, bm::word_t *BMRESTRICT empty_acc, bm::word_t co_flag) BMNOEXCEPT
Right bit-shift of bit-block by 1 bit (loop unrolled)
BMFORCEINLINE unsigned word_bitcount64(bm::id64_t x) BMNOEXCEPT
bm::id_t bit_block_calc_count_to(const bm::word_t *block, bm::word_t right) BMNOEXCEPT
void bit_block_erase(bm::word_t *block, unsigned bitpos, bool carry_over) BMNOEXCEPT
erase bit from position and shift the rest right with carryover
bm::word_t * bit_operation_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Block OR operation. Makes analysis if block is 0 or FULL.
bool bit_block_shift_l1_unr(bm::word_t *block, bm::word_t *empty_acc, bm::word_t co_flag) BMNOEXCEPT
Left bit-shift of bit-block by 1 bit (loop unrolled)
bm::id64_t calc_block_digest0(const bm::word_t *const block) BMNOEXCEPT
Compute digest for 64 non-zero areas.
bm::id64_t bit_block_and_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2, bm::id64_t digest) BMNOEXCEPT
digest based bit-block AND
bm::word_t * bit_operation_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock SUB operation.
unsigned short bitscan_wave(const bm::word_t *BMRESTRICT w_ptr, unsigned char *BMRESTRICT bits) BMNOEXCEPT
Unpacks word wave (Nx 32-bit words)
bool bit_find_first(const bm::word_t *BMRESTRICT block, unsigned *BMRESTRICT pos) BMNOEXCEPT
BIT block find the first set bit.
void bit_invert(T *start) BMNOEXCEPT
int bitcmp(const T *buf1, const T *buf2, unsigned len) BMNOEXCEPT
Lexicographical comparison of BIT buffers.
bm::id64_t bit_block_xor(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock XOR operation. Function does not analyse availability of source and destination blocks...
bm::word_t * bit_operation_and(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock AND operation.
void bit_block_set(bm::word_t *BMRESTRICT dst, bm::word_t value) BMNOEXCEPT
Bitblock memset operation.
bool bit_block_or(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock OR operation. Function does not analyse availability of source and destination blocks.
bool bit_is_all_zero(const bm::word_t *BMRESTRICT start) BMNOEXCEPT
Returns "true" if all bits in the block are 0.
unsigned bit_find_last(const bm::word_t *BMRESTRICT block, unsigned *BMRESTRICT last) BMNOEXCEPT
BIT block find the last set bit (backward search)
bm::id64_t update_block_digest0(const bm::word_t *const block, bm::id64_t digest) BMNOEXCEPT
Compute digest for 64 non-zero areas based on existing digest (function revalidates zero areas)
void bit_andnot_arr_ffmask(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
bitblock AND NOT with constant ~0 mask operation.
bm::word_t bit_block_insert(bm::word_t *BMRESTRICT block, unsigned bitpos, bool value) BMNOEXCEPT
insert bit into position and shift the rest right with carryover
bm::id64_t bit_block_sub(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src) BMNOEXCEPT
Plain bitblock SUB (AND NOT) operation. Function does not analyse availability of source and destinat...
bm::id64_t bit_block_xor_2way(bm::word_t *BMRESTRICT dst, const bm::word_t *BMRESTRICT src1, const bm::word_t *BMRESTRICT src2) BMNOEXCEPT
2 way (target := source1 ^ source2) bitblock XOR operation.
bool is_bits_one(const bm::wordop_t *start) BMNOEXCEPT
Returns "true" if all bits in the block are 1.
sort_order
Sort order declaration.
int(* bit_visitor_callback_type)(void *handle_ptr, bm::id_t bit_idx)
Callback type to visit (callback style) bits in bit-vector(s)
strategy
Block allocation strategies.
@ BM_SORTED
input set is sorted (ascending order)
@ BM_UNKNOWN
sort order unknown
@ BM_BIT
No GAP compression strategy. All new blocks are bit blocks.
gap_word_t * gap_operation_or(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP OR operation.
unsigned gap_test_unr(const T *BMRESTRICT buf, const unsigned pos) BMNOEXCEPT
Tests if bit = pos is true. Analog of bm::gap_test with SIMD unrolling.
unsigned gap_find_last(const T *BMRESTRICT buf, unsigned *BMRESTRICT last) BMNOEXCEPT
GAP block find the last set bit.
void gap_and_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
ANDs GAP block to bitblock.
gap_word_t * gap_operation_xor(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP XOR operation.
unsigned gap_set_value(unsigned val, T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set) BMNOEXCEPT
Sets or clears bit in the GAP buffer.
void gap_xor_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
XOR GAP block to bitblock.
bool gap_shift_r1(T *BMRESTRICT buf, unsigned co_flag, unsigned *BMRESTRICT new_len) BMNOEXCEPT
Right shift GAP block by 1 bit.
int gapcmp(const T *buf1, const T *buf2) BMNOEXCEPT
Lexicographical comparison of GAP buffers.
unsigned gap_bit_count_to(const T *const buf, T right, bool is_corrected=false) BMNOEXCEPT
Counts 1 bits in GAP buffer in the closed [0, right] range.
unsigned gap_find_first(const T *BMRESTRICT buf, unsigned *BMRESTRICT first) BMNOEXCEPT
GAP block find the first set bit.
void gap_invert(T *buf) BMNOEXCEPT
Inverts all bits in the GAP buffer.
unsigned * gap_convert_to_bitset_smart(unsigned *BMRESTRICT dest, const T *BMRESTRICT buf, id_t set_max) BMNOEXCEPT
Smart GAP block to bitblock conversion.
void gap_sub_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr) BMNOEXCEPT
SUB (AND NOT) GAP block to bitblock.
unsigned gap_block_find(const T *BMRESTRICT buf, unsigned nbit, bm::id_t *BMRESTRICT prev) BMNOEXCEPT
Searches for the next 1 bit in the GAP block.
BMFORCEINLINE bool gap_is_all_zero(const bm::gap_word_t *BMRESTRICT buf) BMNOEXCEPT
Checks if GAP block is all-zero.
bool gap_shift_l1(T *BMRESTRICT buf, unsigned co_flag, unsigned *BMRESTRICT new_len) BMNOEXCEPT
Left shift GAP block by 1 bit.
void gap_add_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT pcurr, unsigned len) BMNOEXCEPT
Adds(OR) GAP block to bitblock.
BMFORCEINLINE gap_word_t * gap_operation_and(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP AND operation.
void gap_convert_to_bitset(unsigned *BMRESTRICT dest, const T *BMRESTRICT buf) BMNOEXCEPT
GAP block to bitblock conversion.
unsigned gap_bit_count_range(const T *const buf, unsigned left, unsigned right) BMNOEXCEPT
Counts 1 bits in GAP buffer in the closed [left, right] range.
gap_word_t * gap_operation_sub(const gap_word_t *BMRESTRICT vect1, const gap_word_t *BMRESTRICT vect2, gap_word_t *BMRESTRICT tmp_buf, unsigned &dsize) BMNOEXCEPT
GAP SUB (AND NOT) operation.
bool improve_gap_levels(const T *length, const T *length_end, T *glevel_len) BMNOEXCEPT
Finds optimal gap blocks lengths.
unsigned gap_capacity(const T *BMRESTRICT buf, const T *BMRESTRICT glevel_len) BMNOEXCEPT
Returs GAP block capacity.
unsigned gap_limit(const T *BMRESTRICT buf, const T *BMRESTRICT glevel_len) BMNOEXCEPT
Returs GAP block capacity limit.
BMFORCEINLINE bm::gap_word_t gap_length(const bm::gap_word_t *BMRESTRICT buf) BMNOEXCEPT
Returs GAP block length.
const unsigned set_array_mask
bool block_any(const bm::word_t *const BMRESTRICT block) BMNOEXCEPT
Returns "true" if one bit is set in the block Function check for block varieties.
bvector< Alloc > operator|(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
const id64_t all_bits_mask
void for_each_nzblock(T ***root, unsigned size1, F &f)
void xor_swap(W &x, W &y) BMNOEXCEPT
XOR swap two scalar variables.
bool block_is_all_one_range(const bm::word_t *const BMRESTRICT block, unsigned left, unsigned right) BMNOEXCEPT
Returns "true" if all bits are 1 in the block [left, right] Function check for block varieties.
const unsigned set_block_mask
bvector< Alloc > operator-(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
bool check_block_one(const bm::word_t *blk, bool deep_scan) BMNOEXCEPT
Checks if block has only 1 bits.
unsigned gap_bfind(const T *BMRESTRICT buf, unsigned pos, unsigned *BMRESTRICT is_set) BMNOEXCEPT
const unsigned set_sub_array_size
const unsigned bits_in_array
const unsigned set_total_blocks
bvector< Alloc > operator^(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
gap_word_t *(* gap_operation_func_type)(const gap_word_t *BMRESTRICT, const gap_word_t *BMRESTRICT, gap_word_t *BMRESTRICT, unsigned &)
const unsigned set_block_size_op
bool block_find_first_diff(const bm::word_t *BMRESTRICT blk, const bm::word_t *BMRESTRICT arg_blk, unsigned *BMRESTRICT pos) BMNOEXCEPT
Find first bit which is different between two blocks (GAP or bit)
const unsigned rs3_half_span
const unsigned gap_levels
bool find_not_null_ptr(bm::word_t ***arr, N start, N size, N *pos) BMNOEXCEPT
const unsigned set_word_shift
bm::id64_t idx_arr_block_lookup_u64(const bm::id64_t *idx, bm::id64_t size, bm::id64_t nb, bm::id64_t start) BMNOEXCEPT
block boundaries look ahead U32
const unsigned set_block_size
unsigned long long int id64_t
bool block_any_range(const bm::word_t *const BMRESTRICT block, unsigned left, unsigned right) BMNOEXCEPT
Returns "true" if one bit is set in the block [left, right] Function check for block varieties.
const unsigned gap_equiv_len
const unsigned rs3_border1
void set_block_bits_u32(bm::word_t *BMRESTRICT block, const unsigned *BMRESTRICT idx, unsigned start, unsigned stop) BMNOEXCEPT
set bits in a bit-block using global index
unsigned idx_arr_block_lookup_u32(const unsigned *idx, unsigned size, unsigned nb, unsigned start) BMNOEXCEPT
block boundaries look ahead U32
const unsigned short set_bitscan_wave_size
Size of bit decode wave in words.
const unsigned set_array_shift
bvector< Alloc > operator&(const bvector< Alloc > &bv1, const bvector< Alloc > &bv2)
unsigned short gap_word_t
void(* gap_operation_to_bitset_func_type)(unsigned *, const gap_word_t *)
const unsigned rs3_border0
const unsigned gap_max_bits
void get_block_coord(BI_TYPE nb, unsigned &i, unsigned &j) BMNOEXCEPT
Recalc linear bvector block index into 2D matrix coordinates.
const unsigned set_top_array_size
const unsigned set_block_shift
void for_each_nzblock_range(T ***root, N top_size, N nb_from, N nb_to, F &f) BMNOEXCEPT
const unsigned set_word_mask
const unsigned bits_in_block
void set_block_bits_u64(bm::word_t *BMRESTRICT block, const bm::id64_t *BMRESTRICT idx, bm::id64_t start, bm::id64_t stop) BMNOEXCEPT
set bits in a bit-block using global index
bool for_each_nzblock_if(T ***root, BI size1, F &f) BMNOEXCEPT
SIZE_TYPE block_find_rank(const bm::word_t *const block, SIZE_TYPE rank, unsigned nbit_from, unsigned &nbit_pos) BMNOEXCEPT
Find rank in block (GAP or BIT)
Structure with statistical information about memory allocation footprint, serialization projection,...
size_t ptr_sub_blocks
Number of sub-blocks.
size_t bv_count
Number of bit-vectors.
gap_word_t gap_levels[bm::gap_levels]
GAP block lengths in the bvect.
size_t max_serialize_mem
estimated maximum memory for serialization
void reset() BMNOEXCEPT
Reset statisctics.
size_t memory_used
memory usage for all blocks and service tables
allocation_policy(bm::strategy s=BM_BIT, const gap_word_t *glevels=bm::gap_len_table< true >::_len) BMNOEXCEPT
const gap_word_t * glevel_len
unsigned short idx
Current position in the bit list.
unsigned char bits[set_bitscan_wave_size *32]
bit list
size_type pos
Last bit position decode before.
unsigned short cnt
Number of ON bits.
const bm::word_t * ptr
Word pointer.
Information about current DGAP block.
const gap_word_t * ptr
Word pointer.
gap_word_t gap_len
Current dgap length.
Statistical information about bitset's memory allocation details.
Default GAP lengths table.
static gap_operation_to_bitset_func_type gap_op_to_bit(unsigned i)
static gap_operation_func_type gap_operation(unsigned i)
dgap_descr gap_
DGAP block related info.
bitblock_descr bit_
BitBlock related info.