I faced a problem cannot convert parameter from 'System::String ^' to 'std::string'. How can i convert System::String^ into standard C++ string in Visual C++ environment ?
From this link at Microsoft:
void MarshalString ( String ^ s, string& os ) {
using namespace Runtime::InteropServices;
const char* chars =
(const char*)(Marshal::StringToHGlobalAnsi(s)).ToPointer();
os = chars;
Marshal::FreeHGlobal(IntPtr((void*)chars));
}
int main() {
string a = "test";
String ^ c = gcnew String("abcd");
cout << a << endl;
MarshalString(c, a);
cout << a << endl;
}
//*** convert String^ to standard C++ string
` String ^SyStr = "abc"; // system string
string StStr =""; // standard string
for each (char c in SyStr )
{
StStr .push_back( c);
} `
//**** End convert String^ to standard C++ string
//**** convert standard C++ string to String^
` String ^SyStr = ""; // system string
string StStr ="xyz"; // standard string
SyStr = gcnew String(StStr.c_str());`
//**** End convert standard C++ string to String^
Related
I am going to convert a QString to its hex representation, everything works fine, until I put some special characters like '€':
QString l_str = "How are you";
qDebug() << "hex: " << l_str.toUtf8().toHex();
qDebug() << "readable:" << l_str.toUtf8();
prints out:
hex: "486f772061726520796f753f"
readable: "How are you?"
And so it easy to convert back the hex values to ascii, being just two values (48 = H etc.) the hex representation of an ascii char it is enough to iterate and convert every two chars.
If I set l_str = "H€w ar€ you?", and the hex € sign in utf8 is "e282ac" which are 6 values, the result is as following:
hex: "48e282ac77206172e282ac20796f753f"
but how can I get it back to a readable string?
It would be better having a conversion which results in an utf16 string:
hex: "0048006F20AC00200061007220AC00200079006F0075003F"
Consider that the "Ho€ ar€ you" string is created at runtime (so no QStringLiteral available), and I cannot use the auto keyword.
You can convert the string to utf8/16, and then convert the buffer that holds that into hexadecimal representation. These steps can be followed in reverse order to go from hex to utf8/16 to a string.
The toUtf16Hex function prepends a byte order mark so that both little- and big-endian hosts can correctly decode the Utf16 representation.
// https://github.com/KubaO/stackoverflown/tree/master/questions/str-to-utf-38831190
#include <QtCore>
QByteArray toUtf8Hex(const QString & str) {
return str.toUtf8().toHex();
}
QString fromUtf8Hex(const QByteArray & hex) {
return QString::fromUtf8(QByteArray::fromHex(hex));
}
QByteArray toUtf16Hex(QString str) {
str.prepend(QChar::ByteOrderMark);
// It is OK to use `fromRawData` since toHex copies it.
return QByteArray::fromRawData(
reinterpret_cast<const char*>(str.constData()), (str.size()+1)*2).toHex();
}
QString fromUtf16Hex(const QByteArray & hex) {
const QByteArray utf16 = QByteArray::fromHex(hex);
return QString::fromUtf16(reinterpret_cast<const quint16*>(utf16.data()));
}
int main() {
const QString str = QStringLiteral("H€w ar€ you?");
// To Utf8 and back
const QByteArray hex8 = toUtf8Hex(str);
Q_ASSERT(fromUtf8Hex(hex8) == str);
// To Utf16 and back
const QByteArray hex16 = toUtf16Hex(str);
Q_ASSERT(fromUtf16Hex(hex16) == str);
}
I found a hackish but working solution:
QString l_str = "Ho€ Ar€ you?";
qDebug() << "hex: " << l_str.toUtf8().toHex();
qDebug() << "readable:" << l_str.toUtf8();
QTextCodec* l_codec = QTextCodec::codecForName("UTF-16");
QByteArray l_string = l_codec->fromUnicode(l_str).toHex();
qDebug() << "utf16 encoded: " << l_string;
QByteArray l_reversed;
for(int i=0; i < l_string.length(); i=i+4)
{
QString l_hex_chars_1 = l_string.mid(i, 2);
QString l_hex_chars_2 = l_string.mid(i+2, 2);
l_reversed.append(l_hex_chars_2);
l_reversed.append(l_hex_chars_1);
}
QByteArray l_bom("feff");
if(l_reversed.startsWith(l_bom))
{
l_reversed.remove(0, l_bom.length());
}
QString l_res;
for(int i=0; i < l_reversed.length(); i=i+4)
{
QString l_hex_chars_1 = l_reversed.mid(i, 2);
QString l_hex_chars_2 = l_reversed.mid(i+2, 2);
int l_val = l_hex_chars_1.toInt(0,16)*256+l_hex_chars_2.toInt(0,16);
QChar l_char(l_val);
l_res.append(l_char);
}
qDebug() << "back to string: " << l_res;
This prints out:
hex: "48e282ac77206172e282ac20796f753f"
readable: "H€w ar€ you?"
utf16 encoded: "fffe4800ac207700200061007200ac20200079006f0075003f00"
byte-reversed: "004820ac007700200061007220ac00200079006f0075003f"
back to string: "H€w ar€ you?"
How do I convert a string from char* to Platform::String^ and vice versa?
I'm developing a DLL for the Universal Windows Platform, using version 10.0.10586.0 of the SDK and Visual Studio 2015 Update 1.
Not the most elegant but the only solution that worked for me to get const char * from the Platform::String
const char * StringToChar(String^ s) {
const wchar_t *W = s->Data();
int Size = wcslen(W);
char *CString = new char[Size + 1];
CString[Size] = 0;
for (int y = 0;y<Size; y++)
{
CString[y] = (char)W[y];
}
return (const char *)CString;
}
and its a lot easier to convert it back
String^ CharToString(const char * char_array) {
std::string s_str = std::string(char_array);
std::wstring wid_str = std::wstring(s_str.begin(), s_str.end());
const wchar_t* w_char = wid_str.c_str();
return ref new String(w_char);
}
//Char to String
char *text = "new string";
Platform::String str = new Platform::String(text, strlen(text));
//String to char
char16 *newText = str.Data();
More detailed answer: https://stackoverflow.com/a/11746252/5477130
Here is my declaration in question, I even use include guards:
Edit: I'm including the entire header if this will help answer any additional
questions one might have.
#ifndef STRING_H
#define STRING_H
#include<iostream>
class String
{
public:
String(const char * s = "");
String(const String & s);
String operator = (const String & s);
char & operator [] (int index);
int size();
String reverse();
int indexOf(char c);
int indexOf(String pattern);
bool operator == (String s);
bool operator != (String s);
bool operator > (String s);
bool operator < (String s);
bool operator >= (String s);
bool operator <= (String s);
String operator + (String s);
String operator += (String s);
void print(std::ostream & out);
void read(std::istream & in);
static int strLen(const String &s);
static String strCpy(const String &s, int length);
static String strDup(const String &s);
static bool strCmp(const String &s, const String &t);
~String();
private:
bool inBounds(int i)
{
return i >= 0 && i < len;
}
char * buf;
int len;
};
#endif
And here is my definition:(starting form line 183)
String String::operator = (const String & s)
{
String t(s);
return t;
}
And I keep getting this error:
>c:\users\omive_000\documents\visual studio 2013\projects\string\string\string.h(183): error C2084: function 'String String::operator =(const String &)' already has a body
1> c:\users\omive_000\documents\visual studio 2013\projects\string\string\string.h(11) : see previous definition of '='
can anyone offer me an explanation as to why this error occurs?
Definitions normally don't belong into header files.
You can declare and define your function inline, inside your include guards
You can use a cpp file
That said, your code looks fishy. It does not do what it seems to do. There is no assignment to this or it's variables happening. But that's a bug and not a compiler error.
I would like to convert a CLSID to a *char in c++ so I can display it in a text box. I am new to c++ so please make this as simple a s possible.
Thanks
C'ish solution:
/* 128 bit GUID to human-readable string */
char * guid_to_str(const GUID * id, char * out) {
int i;
char * ret = out;
out += sprintf(out, "%.8lX-%.4hX-%.4hX-", id->Data1, id->Data2, id->Data3);
for (i = 0; i < sizeof(id->Data4); ++i) {
out += sprintf(out, "%.2hhX", id->Data4[i]);
if (i == 1) *(out++) = '-';
}
return ret;
}
This assumes the output buffer has been already allocated, and should be of a size of 37 bytes (including the null terminating character).
The output is of the form "75B22630-668E-11CF-A6D9-00AA0062CE6C"
Usage example:
GUID g;
char buffer[37];
std::cout << guid_to_str(&g, buffer);
Note:
This code exists because I had to implement GUID parsing under Linux, otherwise I would have used the Windows API function StringFromCLSID mentioned by #krowe.
Here is a great example for converting GUID to string and vice versa that I am using in my projects:
std::string guidToString(GUID guid) {
std::array<char,40> output;
snprintf(output.data(), output.size(), "{%08X-%04hX-%04hX-%02X%02X-%02X%02X%02X%02X%02X%02X}", guid.Data1, guid.Data2, guid.Data3, guid.Data4[0], guid.Data4[1], guid.Data4[2], guid.Data4[3], guid.Data4[4], guid.Data4[5], guid.Data4[6], guid.Data4[7]);
return std::string(output.data());
}
GUID stringToGUID(const std::string& guid) {
GUID output;
const auto ret = sscanf(guid.c_str(), "{%8X-%4hX-%4hX-%2hX%2hX-%2hX%2hX%2hX%2hX%2hX%2hX}", &output.Data1, &output.Data2, &output.Data3, &output.Data4[0], &output.Data4[1], &output.Data4[2], &output.Data4[3], &output.Data4[4], &output.Data4[5], &output.Data4[6], &output.Data4[7]);
if (ret != 11)
throw std::logic_error("Unvalid GUID, format should be {00000000-0000-0000-0000-000000000000}");
return output;
}
In the example, it firsts uses char* before converting to string so this is exactly what you are looking for in an efficient way.
The Windows API has a function for this:
CLSID clsid;
HRESULT hr = CLSIDFromProgID ((OLESTR "Adobe.SVGCtl.3"),&clsid);
// Get class id as string
LPOLESTR className;
hr = StringFromCLSID(clsid, &className);
// convert to CString
CString c = (char *) (_bstr_t) className;
// then release the memory used by the class name
CoTaskMemFree(className);
// Now c is ready to use
A CLSID is the same as a UUID, so you can use the UuidToString() function
http://msdn.microsoft.com/en-us/library/windows/desktop/aa379352(v=vs.85).aspx
Can someone please post a simple code that would convert,
System::String^
To,
C++ std::string
I.e., I just want to assign the value of,
String^ originalString;
To,
std::string newString;
Don't roll your own, use these handy (and extensible) wrappers provided by Microsoft.
For example:
#include <msclr\marshal_cppstd.h>
System::String^ managed = "test";
std::string unmanaged = msclr::interop::marshal_as<std::string>(managed);
You can easily do this as follows
#include <msclr/marshal_cppstd.h>
System::String^ xyz="Hi boys";
std::string converted_xyz=msclr::interop::marshal_as< std::string >( xyz);
Check out System::Runtime::InteropServices::Marshal::StringToCoTaskMemUni() and its friends.
Sorry can't post code now; I don't have VS on this machine to check it compiles before posting.
This worked for me:
#include <stdlib.h>
#include <string.h>
#include <msclr\marshal_cppstd.h>
//..
using namespace msclr::interop;
//..
System::String^ clrString = (TextoDeBoton);
std::string stdString = marshal_as<std::string>(clrString); //String^ to std
//System::String^ myString = marshal_as<System::String^>(MyBasicStirng); //std to String^
prueba.CopyInfo(stdString); //MyMethod
//..
//Where: String^ = TextoDeBoton;
//and stdString is a "normal" string;
Here are some conversion routines I wrote many years ago for a c++/cli project, they should still work.
void StringToStlWString ( System::String const^ s, std::wstring& os)
{
String^ string = const_cast<String^>(s);
const wchar_t* chars = reinterpret_cast<const wchar_t*>((Marshal::StringToHGlobalUni(string)).ToPointer());
os = chars;
Marshal::FreeHGlobal(IntPtr((void*)chars));
}
System::String^ StlWStringToString (std::wstring const& os) {
String^ str = gcnew String(os.c_str());
//String^ str = gcnew String("");
return str;
}
System::String^ WPtrToString(wchar_t const* pData, int length) {
if (length == 0) {
//use null termination
length = wcslen(pData);
if (length == 0) {
System::String^ ret = "";
return ret;
}
}
System::IntPtr bfr = System::IntPtr(const_cast<wchar_t*>(pData));
System::String^ ret = System::Runtime::InteropServices::Marshal::PtrToStringUni(bfr, length);
return ret;
}
void Utf8ToStlWString(char const* pUtfString, std::wstring& stlString) {
//wchar_t* pString;
MAKE_WIDEPTR_FROMUTF8(pString, pUtfString);
stlString = pString;
}
void Utf8ToStlWStringN(char const* pUtfString, std::wstring& stlString, ULONG length) {
//wchar_t* pString;
MAKE_WIDEPTR_FROMUTF8N(pString, pUtfString, length);
stlString = pString;
}
I found an easy way to get a std::string from a String^ is to use sprintf().
char cStr[50] = { 0 };
String^ clrString = "Hello";
if (clrString->Length < sizeof(cStr))
sprintf(cStr, "%s", clrString);
std::string stlString(cStr);
No need to call the Marshal functions!
UPDATE Thanks to Eric, I've modified the sample code to check for the size of the input string to prevent buffer overflow.
I spent hours trying to convert a windows form listbox ToString value to a standard string so that I could use it with fstream to output to a txt file. My Visual Studio didn't come with marshal header files which several answers I found said to use. After so much trial and error I finally found a solution to the problem that just uses System::Runtime::InteropServices:
void MarshalString ( String ^ s, string& os ) {
using namespace Runtime::InteropServices;
const char* chars =
(const char*)(Marshal::StringToHGlobalAnsi(s)).ToPointer();
os = chars;
Marshal::FreeHGlobal(IntPtr((void*)chars));
}
//this is the code to use the function:
scheduleBox->SetSelected(0,true);
string a = "test";
String ^ c = gcnew String(scheduleBox->SelectedItem->ToString());
MarshalString(c, a);
filestream << a;
And here is the MSDN page with the example:
http://msdn.microsoft.com/en-us/library/1b4az623(v=vs.80).aspx
I know it's a pretty simple solution but this took me HOURS of troubleshooting and visiting several forums to finally find something that worked.
C# uses the UTF16 format for its strings.
So, besides just converting the types, you should also be conscious about the string's actual format.
When compiling for Multi-byte Character set Visual Studio and the Win API assumes UTF8 (Actually windows encoding which is Windows-28591 ).
When compiling for Unicode Character set Visual studio and the Win API assume UTF16.
So, you must convert the string from UTF16 to UTF8 format as well, and not just convert to std::string.
This will become necessary when working with multi-character formats like some non-latin languages.
The idea is to decide that std::wstring always represents UTF16.
And std::string always represents UTF8.
This isn't enforced by the compiler, it's more of a good policy to have.
#include "stdafx.h"
#include <string>
#include <codecvt>
#include <msclr\marshal_cppstd.h>
using namespace System;
int main(array<System::String ^> ^args)
{
System::String^ managedString = "test";
msclr::interop::marshal_context context;
//Actual format is UTF16, so represent as wstring
std::wstring utf16NativeString = context.marshal_as<std::wstring>(managedString);
//C++11 format converter
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> convert;
//convert to UTF8 and std::string
std::string utf8NativeString = convert.to_bytes(utf16NativeString);
return 0;
}
Or have it in a more compact syntax:
int main(array<System::String ^> ^args)
{
System::String^ managedString = "test";
msclr::interop::marshal_context context;
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> convert;
std::string utf8NativeString = convert.to_bytes(context.marshal_as<std::wstring>(managedString));
return 0;
}
I like to stay away from the marshaller.
Using CString newString(originalString);
Seems much cleaner and faster to me. No need to worry about creating and deleting a context.
// I used VS2012 to write below code-- convert_system_string to Standard_Sting
#include "stdafx.h"
#include <iostream>
#include <string>
using namespace System;
using namespace Runtime::InteropServices;
void MarshalString ( String^ s, std::string& outputstring )
{
const char* kPtoC = (const char*) (Marshal::StringToHGlobalAnsi(s)).ToPointer();
outputstring = kPtoC;
Marshal::FreeHGlobal(IntPtr((void*)kPtoC));
}
int _tmain(int argc, _TCHAR* argv[])
{
std::string strNativeString;
String ^ strManagedString = "Temp";
MarshalString(strManagedString, strNativeString);
std::cout << strNativeString << std::endl;
return 0;
}
For me, I was getting an error with some of these messages. I have an std::string. To convert it to String^, I had to do the following String^ sysString = gcnew String(stdStr.c_str()); where sysString is a System::String^ and stdStr is an std::string. Hope this helps someone
You may have to #include <string> for this to work