I wrote this code snippet to convert a two state string ("+++–+-" or "yynnny") into a bitset:
#include <bitset>
#include <cstddef>
#include <string>
#include <iostream>
std::bitset<70> convertTwoStateString(std::string twoState)
{
std::bitset<70> a{0b0};
std::bitset<70> eins{0b1};
for(const auto c : twoState)
{
if(c == '+')
{
a <<= 1;
a |= eins;
}
if(c == '-')
{
a <<= 1;
}
}
return a;
}
int main()
{
std::string s{"-+--+++--+--+"};
std::bitset<70> set = convertTwoStateString(s);
std::cout << set << std::endl;
//0000000000000000000000000000000000000000000000000000000000100111001001
return 0;
}
I wonder if there is a more algorithmic/elegant way to do such a conversion.
>Solution :
The std::bitset constructor can specify alternate characters representing 0/1, so you can
std::string s{"-+--+++--+--+"};
std::bitset<70> set(s, 0, s.size(), '-', '+');