Question

I have a string variable containing 32 bits of binary. What would be the best way to convert these 4 characters (8 bits is one character) represented by the binary back into their ASCII characters?

For example, a variable contains "01110100011001010111001101110100" which should be converted back into the string "test".

Was it helpful?

Solution

An alternative if you're using C++11:

#include <iostream>
#include <string>
#include <sstream>
#include <bitset>

int main()
{
    std::string data = "01110100011001010111001101110100";
    std::stringstream sstream(data);
    std::string output;
    while(sstream.good())
    {
        std::bitset<8> bits;
        sstream >> bits;
        char c = char(bits.to_ulong());
        output += c;
    }

    std::cout << output;

   return 0;
}

Note that bitset is part of C++11.

Also note that if data is not well formed, the result will be silently truncated when sstream.good() returns false.

OTHER TIPS

Try using this with method. Example:

#include <iostream>
#include <bitset>
#include <sstream>
using namespace std;

string BinaryStringToText(string binaryString) {
    string text = "";
    stringstream sstream(binaryString);
    while (sstream.good())
    {
        bitset<8> bits;
        sstream >> bits;
        text += char(bits.to_ulong());
    }
    return text;
}

int main()
{
    string binaryString = "0100100001100101011011000110110001101111001000000101011101101111011100100110110001100100";
    cout << "Binary string: " << binaryString << "!\n";
    cout << "Result binary string to text: " << BinaryStringToText(binaryString) << "!\n";

    return 0;
}

result code:

Binary string: 0100100001100101011011000110110001101111001000000101011101101111011100100110110001100100!                                                                                                  
Result binary string to text: Hello World! 
#include <iostream>
#include <vector>
#include <bitset>

using namespace std;  

int main()
try
{

   string myString = "Hello World"; // string object

   vector<bitset<8>> us;           // string to binary array of each characater

   for (int i = 0; i < myString.size(); ++i)
   {
        // After convert string to binary, push back of the array
        us.push_back(bitset<8>(myString[i]));
   }

   string c;  // binary to string object

   for (int i = 0; i < us.size(); ++i)
    {
        // the function 'to_ulong' returns
        // integer value with the same bit representation as the bitset object.
        c += char(us[i].to_ulong());
    }

    cout << c;

}
catch (exception& e)
{
    cerr << "the error is : " << e.what() << '\n';
}

output : Hello World

Fastest way to Convert String to Binary?

To convert string to binary, I referred to the answer above link.

Convert a string of binary into an ASCII string (C++)

To convert binary to string, I referred to the answer above link, the answer of Dale Wilson.

here is my attempt:

std::string str2bits(const std::string_view &str, bool big_endian = false)
{
    std::string ret;
    ret.reserve(str.size() * 8);
    for (size_t i = 0; i < str.length(); ++i)
    {
        const uint8_t ord = uint8_t(str[i]);
        for (int bitnum = (big_endian ? 0 : 7);; (big_endian ? (++bitnum) : (--bitnum)))
        {
            if ((big_endian && bitnum >= 8) || (!big_endian && bitnum < 0))
            {
                break;
            }
            if (ord & (1 << bitnum))
            {
                ret += "1";
            }
            else
            {
                ret += "0";
            }
        }
    }
    return ret;
}

str2bits("test") ==> 01110100011001010111001101110100

Another method could be this, which is more flexible:

#include <iostream>
#include <string>
#include <math.h>

std::string BinaryTextToASCIIText(const std::string& binaryText, const unsigned int blockSize = 8, const unsigned int separatorLength = 1)
{
    std::string text(""), block;
    unsigned int separatorBlockStartIndex = blockSize;

    for (unsigned int i = 0; i < binaryText.length(); i++)
    {
        if (i == separatorBlockStartIndex)
        {
            i += separatorLength;
            separatorBlockStartIndex += blockSize + separatorLength;
        }
        
        block += binaryText[i];

        if (block.length() == 8)
        {
            char binaryTextToASCIIChar = 0;
            for (unsigned int j = 0; j < block.length(); j++)
            {
                if (block[j] == '1')
                {
                    binaryTextToASCIIChar += pow(2, block.length() - j - 1);
                }
            }

            block.clear();
            text += binaryTextToASCIIChar;
        }
    }

    return text;
}

int main()
{
    //if you have your binary text in blocks of 32 bits and the blocks have been separated with " " (the separator can be any kind of ascii character and any length) you can use the function like this:
    std::cout << BinaryTextToASCIIText("01110100011001010111001101110100", 32, 1) << std::endl;
    //here is an example of blocks of 32 bits and the "0101111011010" (13 characters long) separator
    std::cout << BinaryTextToASCIIText("010101000110100001101001011100110101111011010001000000110100101110011001000000101111011010011000010010000001110100011001010101111011010011100110111010000100001", 32, 13);

    return 0;
}

The method can be much more useful, of course.. can be improved for sure, but works pretty good.

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top