Bitset
A bitset is a special container class that is designed to store bits (elements with only two possible values: 0 or 1, true or false, ...).
The class is very similar to a regular array, but optimizing for space allocation: each element occupies only one bit (which is eight times less than the smallest elemental type in C++: char).
Bitsets have a fixed size. For a similar container class that also optimizes for space allocation and allows for dynamic resizing, see the bool specialization of vector<bool>.
#include <iostream>
#include <string>
#include <bitset>
using namespace std;
void construct()
{
bitset<10> first; // empty bitset
bitset<10> second (120ul); // initialize from unsigned long
bitset<10> third (string("01011"), 1/*position*/, 3/*no of chars*/); // initialize from string - 101
//bitset<10> fourth(string("0012")); //invalid_argument exception
}
void operation
{
bitset<4> first (string("1001"));
bitset<4> second (string("0011"));
cout << (first^=second) << endl; // 1010 (XOR,assign)
cout << (first&=second) << endl; // 0010 (AND,assign)
cout << (first|=second) << endl; // 0011 (OR,assign)
cout << (first<<=2) << endl; // 1100 (SHL,assign)
cout << (first>>=1) << endl; // 0110 (SHR,assign)
cout << (~second) << endl; // 1100 (NOT)
cout << (second<<1) << endl; // 0110 (SHL)
cout << (second>>1) << endl; // 0001 (SHR)
cout << (first==second) << endl; // false (0110==0011)
cout << (first!=second) << endl; // true (0110!=0011)
cout << (first&second) << endl; // 0010
cout << (first|second) << endl; // 0111
cout << (first^second) << endl; // 0101
}
void misc()
{
bitset<4> mybits (string("0001"));
cout << mybits.any() << endl; // true
cout << mybits.none() << endl; // false
cout << mybits.test(0) << endl; // true
cout << mybits.size() << endl; // 4
cout << (int)mybits.count() << endl; // 1
cout << mybits.flip(2) << endl; // 0101
cout << mybits.flip() << endl; // 1010
mybits[1]=0; // 1000
mybits[2]=mybits[3]; // 1100
mybits.reset(2); // 1000
mybits.reset(); // 0000
mybits.set(); // 1111
mybits.set(2); // 0100
mybits.set(2,0); // 0000
string mystring;
mystring=mybits.to_string<char,char_traits<char>,allocator<char> >();
cout << mybits.to_ulong() << endl;
}