I need to send binary data over a serial port, without any bytes getting reinterpreted as control characters along the way. I'm currently setting up my serial port as follows:
#include <windows.h>
// open serial port
HANDLE hSerial;
hSerial = CreateFile ("COM1", GENERIC_READ | GENERIC_WRITE, 0, 0, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, 0);
// get serial parameters
DCB dcbSerialParams = {0};
dcbSerialParams.DCBlength = sizeof (dcbSerialParams);
if (!GetCommState(hSerial, &dcbSerialParams)) {
cout << "error getting state\n";
exit(0);
}
// set serial params
dcbSerialParams.BaudRate = CBR_115200;
dcbSerialParams.ByteSize = 8;
dcbSerialParams.StopBits = ONESTOPBIT;
dcbSerialParams.Parity = NOPARITY;
if (!SetCommState (hSerial, &dcbSerialParams)) {
cout << "error setting parameters\n";
exit(0);
}
// set time outs
COMMTIMEOUTS timeouts = {0};
timeouts.ReadIntervalTimeout = 50;
timeouts.ReadTotalTimeoutConstant = 10;
timeouts.ReadTotalTimeoutMultiplier = 10;
timeouts.WriteTotalTimeoutConstant = 10;
timeouts.WriteTotalTimeoutMultiplier = 10;
if (!SetCommTimeouts (hSerial, &timeouts)) {
cout << "problem setting timeout values\n";
exit(0);
} else cout << "timeouts set\n";
When I issue ReadFile commands, I can get and display bytes from 0 to 255 with no problem. but I'm having no such luck with WriteFile. Is there a way to explicitly set a binary write mode?
EDIT
Ok, here's some more info. I have a windows machine and a linux single board computer hooked up through serial, the code above on the windows side is followed by:
unsigned char temp = 0;
bool keepReading = true;
while (keepReading) {
DWORD dwBytesRead = 0;
ReadFile (hSerial, &temp, 1, &dwBytesRead, NULL);
if (1 == dwBytesRead) cout << (unsigned int) temp << " ";
if (255 == temp) keepReading = false;
}
cout << endl;
bool keepWriting = true;
char send = 0;
while (keepWriting) {
DWORD dwBytesWritten = 0;
WriteFile (hSerial, &send, 1, &dwBytesWritten, NULL);
send++;
if (256 == send) keepWriting = false;
}
My code on the linux side looks like this:
int fd = open("/dev/ttymxc0", O_RDWR | O_NOCTTY);
struct termios options;
bzero (options, sizeof(options));
options.c_cflag = B115200 | CS8 | CLOCAL | CREAD;
options.c_iflat = IGNPAR;
options.c_oflag = 0;
options.c_lflag = ICANON;
options.c_cc[VMIN] = 1;
options.c_CC[VTIME] = 0;
tcflush (fd, TCIFLUSH);
tcsetattr (fd, ICSANOW, &options);
bool keepWriting = true;
char send = 0;
while (keepWriting) {
write (fd, &send, 1);
send++;
if (256 == send) keepWriting = false;
}
bool keepReading = true;
while (keepReading) {
char temp = 0;
int n = read (fd, &temp, 1);
if (-1 == n) {
perror ("Read error");
keepReading = false;
} else if (1 == n) {
cout << temp << " ";
}
if (256 == temp) keepReading = false;
}
cout << endl;
close(fd);
I start up the code on both machines, and the first set of while loops runs fine. The terminal on the windows side displays 0 through 255. Then it just sits there. If I output the number of bytes read on the linux side for the second set of while loops, it continually gives me 0 bytes. This would indicate a closed port normally, but I just sent a bunch of info through it so how could that be?
As Jonathan Potter mentions, most likely you don't have XON/XOFF flow control turned off. Add these lines before the call to SetCommState:
dcbSerialParams.fOutX = 0;
dcbSerialParams.fInX = 0;
Some other fields which you may need to set:
dcbSerialParams.fNull = 0;
dcbSerialParams.fDtrControl = DTR_CONTROL_DISABLE;
dcbSerialParams.fRtsControl = RTS_CONTROL_DISABLE;
I think what may be happening is that Linux is detecting a break, and resetting the port, or the fact that canonical mode is set is messing it up. Try these settings in addition to what you have already:
options.c_iflag |= IGNBRK;
options.c_iflag &= ~BRKINT;
options.c_iflag &= ~ICRNL;
options.c_oflag = 0;
options.c_lflag = 0;
Alright so I figured it out, rather, a co-worker did. On the linux side, in the file /etc/inittab I had to comment out the line:
T0:23:respawn:/sbin/getty -L ttymxc0 115200 vt100
This was grabbing the serial port in a way that made it unusable for receiving bytes. I now see the expected output.
Related
I'm making frames with the bytes that I read from a controller.
The first 100-200 frames are good, but after that I start to get bad frames. I checked 10000 times that my class' parsing of frames is good. That's why I'm asking if there is maybe a problem with reading from the device.
This is my code:
int ConnectDevice::getBytes() {
DCB dcb;
BYTE Byte;
DWORD dwBytesTransferred;
DWORD dwCommModemStatus;
HANDLE hPort = CreateFile(
chosePort().c_str(),
GENERIC_READ,
0,
NULL,
OPEN_EXISTING,
0,
NULL);
if (!GetCommState(hPort, &dcb)) {
std::cout.flush();
system("cls");
getBytes();
}
dcb.BaudRate = 115200;
dcb.ByteSize = 8;
dcb.StopBits = ONESTOPBIT;
dcb.Parity = NOPARITY;
dcb.fDtrControl = DTR_CONTROL_DISABLE;
COMMTIMEOUTS timeout;
timeout.ReadIntervalTimeout = 50;
timeout.ReadTotalTimeoutConstant = 50;
timeout.ReadTotalTimeoutMultiplier = 50;
timeout.WriteTotalTimeoutConstant = 50;
timeout.WriteTotalTimeoutMultiplier = 10;
SetCommTimeouts(&Byte, &timeout);
if (!SetCommState(hPort, &dcb))
return false;
while (1) {
SetCommMask(hPort, EV_RXCHAR | EV_ERR);
if (_deviceProtocol->IsFrameReady != 1) {
WaitCommEvent(hPort, &dwCommModemStatus, 0);
if (dwCommModemStatus & EV_RXCHAR)
ReadFile(hPort, &Byte, 1, &dwBytesTransferred, NULL);
else if (dwCommModemStatus & EV_ERR)
return 0x101;
_parseFrame->ParseGCSFrame(Byte);
} else if (_deviceProtocol->IsFrameReady == 1) {
_deviceProtocol->IsFrameReady = 0;
_kalmanFilter->displayKalman();
}
}
return 0;
}
I´m using ubuntu as host, and C++ as language.
I´m communicating to a serial device that uses line by line commands. For that purpose I opted for a canonical mode of operation and my termios flags are:
fd = open(portName.c_str(), O_RDWR | O_NOCTTY | O_NDELAY);
struct termios config = { 0 };
config.c_iflag = IGNPAR | ICRNL;
config.c_oflag = 0;
config.c_lflag = ICANON;
config.c_lflag &= ~(ECHO | ECHONL | IEXTEN | ISIG);
config.c_cc[VINTR] = 0;
config.c_cc[VQUIT] = 0;
config.c_cc[VERASE] = 0;
config.c_cc[VKILL] = 0;
config.c_cc[VEOF] = 4;
config.c_cc[VTIME] = 0;
config.c_cc[VMIN] = 0;
config.c_cc[VSWTC] = 0;
config.c_cc[VSTART] = 0;
config.c_cc[VSTOP] = 0;
config.c_cc[VSUSP] = 0;
config.c_cc[VEOL] = 0;
config.c_cc[VREPRINT] = 0;
config.c_cc[VDISCARD] = 0;
config.c_cc[VWERASE] = 0;
config.c_cc[VLNEXT] = 0;
config.c_cc[VEOL2] = 0;
if(tcsetattr(fd, TCSAFLUSH, &config) < 0)
{
std::cout << "Error configuring serial." << std::endl;
}
And a simple read is done as:
char buffer[1024]; // 1024 bytes
read(fd, &buffer[0], 1024);
My problem is that if I loose communication (ie: when the connected device powers off), my read statement hangs and the program gets blocked.
I need to imlpement a timeout where it would return with an error after some seconds. As far as I know, VTIME and VMIN has no effect on canonical reads.
Help appreciated on how to enable timeout for such a read command.
I have copied this code to write through the serial port in c++. It works properly, but what I need now is to know how to convert a string into an array of bytes, because I want to write several strings to communicate with an arduino. I paste the code here:
#include <windows.h>
#include <stdio.h>
int main()
{
// Define the five bytes to send ("hello")
char bytes_to_send[5];
bytes_to_send[0] = 104;
bytes_to_send[1] = 101;
bytes_to_send[2] = 108;
bytes_to_send[3] = 108;
bytes_to_send[4] = 111;
// Declare variables and structures
HANDLE hSerial;
DCB dcbSerialParams = {0};
COMMTIMEOUTS timeouts = {0};
// Open the highest available serial port number
fprintf(stderr, "Opening serial port...");
hSerial = CreateFile(
"\\\\.\\COM1", GENERIC_READ|GENERIC_WRITE, 0, NULL,
OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL );
if (hSerial == INVALID_HANDLE_VALUE)
{
fprintf(stderr, "Error\n");
return 1;
}
else fprintf(stderr, "OK\n");
// Set device parameters (38400 baud, 1 start bit,
// 1 stop bit, no parity)
dcbSerialParams.DCBlength = sizeof(dcbSerialParams);
if (GetCommState(hSerial, &dcbSerialParams) == 0)
{
fprintf(stderr, "Error getting device state\n");
CloseHandle(hSerial);
return 1;
}
dcbSerialParams.BaudRate = CBR_38400;
dcbSerialParams.ByteSize = 8;
dcbSerialParams.StopBits = ONESTOPBIT;
dcbSerialParams.Parity = NOPARITY;
if(SetCommState(hSerial, &dcbSerialParams) == 0)
{
fprintf(stderr, "Error setting device parameters\n");
CloseHandle(hSerial);
return 1;
}
// Set COM port timeout settings
timeouts.ReadIntervalTimeout = 50;
timeouts.ReadTotalTimeoutConstant = 50;
timeouts.ReadTotalTimeoutMultiplier = 10;
timeouts.WriteTotalTimeoutConstant = 50;
timeouts.WriteTotalTimeoutMultiplier = 10;
if(SetCommTimeouts(hSerial, &timeouts) == 0)
{
fprintf(stderr, "Error setting timeouts\n");
CloseHandle(hSerial);
return 1;
}
// Send specified text (remaining command line arguments)
DWORD bytes_written, total_bytes_written = 0;
fprintf(stderr, "Sending bytes...");
if(!WriteFile(hSerial, bytes_to_send, 5, &bytes_written, NULL))
{
fprintf(stderr, "Error\n");
CloseHandle(hSerial);
return 1;
}
fprintf(stderr, "%d bytes written\n", bytes_written);
// Close serial port
fprintf(stderr, "Closing serial port...");
if (CloseHandle(hSerial) == 0)
{
fprintf(stderr, "Error\n");
return 1;
}
fprintf(stderr, "OK\n");
// exit normally
return 0;
}
Here is the part I want to change:
// Define the five bytes to send ("hello")
char bytes_to_send[5];
bytes_to_send[0] = 104;
bytes_to_send[1] = 101;
bytes_to_send[2] = 108;
bytes_to_send[3] = 108;
bytes_to_send[4] = 111;
I want to use my own strings and a method to transform them into bytes, so that I can send them through the serial port.
A string already is an array of bytes.
char bytes_to_send[] = "hello";
I need to send binary data over a serial port, without any bytes getting reinterpreted as control characters along the way. I'm currently setting up my serial port as follows:
#include <windows.h>
// open serial port
HANDLE hSerial;
hSerial = CreateFile ("COM1", GENERIC_READ | GENERIC_WRITE, 0, 0, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, 0);
// get serial parameters
DCB dcbSerialParams = {0};
dcbSerialParams.DCBlength = sizeof (dcbSerialParams);
if (!GetCommState(hSerial, &dcbSerialParams)) {
cout << "error getting state\n";
exit(0);
}
// set serial params
dcbSerialParams.BaudRate = CBR_115200;
dcbSerialParams.ByteSize = 8;
dcbSerialParams.StopBits = ONESTOPBIT;
dcbSerialParams.Parity = NOPARITY;
if (!SetCommState (hSerial, &dcbSerialParams)) {
cout << "error setting parameters\n";
exit(0);
}
// set time outs
COMMTIMEOUTS timeouts = {0};
timeouts.ReadIntervalTimeout = 50;
timeouts.ReadTotalTimeoutConstant = 10;
timeouts.ReadTotalTimeoutMultiplier = 10;
timeouts.WriteTotalTimeoutConstant = 10;
timeouts.WriteTotalTimeoutMultiplier = 10;
if (!SetCommTimeouts (hSerial, &timeouts)) {
cout << "problem setting timeout values\n";
exit(0);
} else cout << "timeouts set\n";
When I issue ReadFile commands, I can get and display bytes from 0 to 255 with no problem. but I'm having no such luck with WriteFile. Is there a way to explicitly set a binary write mode?
EDIT
Ok, here's some more info. I have a windows machine and a linux single board computer hooked up through serial, the code above on the windows side is followed by:
unsigned char temp = 0;
bool keepReading = true;
while (keepReading) {
DWORD dwBytesRead = 0;
ReadFile (hSerial, &temp, 1, &dwBytesRead, NULL);
if (1 == dwBytesRead) cout << (unsigned int) temp << " ";
if (255 == temp) keepReading = false;
}
cout << endl;
bool keepWriting = true;
char send = 0;
while (keepWriting) {
DWORD dwBytesWritten = 0;
WriteFile (hSerial, &send, 1, &dwBytesWritten, NULL);
send++;
if (256 == send) keepWriting = false;
}
My code on the linux side looks like this:
int fd = open("/dev/ttymxc0", O_RDWR | O_NOCTTY);
struct termios options;
bzero (options, sizeof(options));
options.c_cflag = B115200 | CS8 | CLOCAL | CREAD;
options.c_iflat = IGNPAR;
options.c_oflag = 0;
options.c_lflag = ICANON;
options.c_cc[VMIN] = 1;
options.c_CC[VTIME] = 0;
tcflush (fd, TCIFLUSH);
tcsetattr (fd, ICSANOW, &options);
bool keepWriting = true;
char send = 0;
while (keepWriting) {
write (fd, &send, 1);
send++;
if (256 == send) keepWriting = false;
}
bool keepReading = true;
while (keepReading) {
char temp = 0;
int n = read (fd, &temp, 1);
if (-1 == n) {
perror ("Read error");
keepReading = false;
} else if (1 == n) {
cout << temp << " ";
}
if (256 == temp) keepReading = false;
}
cout << endl;
close(fd);
I start up the code on both machines, and the first set of while loops runs fine. The terminal on the windows side displays 0 through 255. Then it just sits there. If I output the number of bytes read on the linux side for the second set of while loops, it continually gives me 0 bytes. This would indicate a closed port normally, but I just sent a bunch of info through it so how could that be?
As Jonathan Potter mentions, most likely you don't have XON/XOFF flow control turned off. Add these lines before the call to SetCommState:
dcbSerialParams.fOutX = 0;
dcbSerialParams.fInX = 0;
Some other fields which you may need to set:
dcbSerialParams.fNull = 0;
dcbSerialParams.fDtrControl = DTR_CONTROL_DISABLE;
dcbSerialParams.fRtsControl = RTS_CONTROL_DISABLE;
I think what may be happening is that Linux is detecting a break, and resetting the port, or the fact that canonical mode is set is messing it up. Try these settings in addition to what you have already:
options.c_iflag |= IGNBRK;
options.c_iflag &= ~BRKINT;
options.c_iflag &= ~ICRNL;
options.c_oflag = 0;
options.c_lflag = 0;
Alright so I figured it out, rather, a co-worker did. On the linux side, in the file /etc/inittab I had to comment out the line:
T0:23:respawn:/sbin/getty -L ttymxc0 115200 vt100
This was grabbing the serial port in a way that made it unusable for receiving bytes. I now see the expected output.
When I connect my embedded device to my system, I am running my program which will write to the port my embedded is connect and it prints the reply to console.
When I connect my device and run this program it is not giving any output.
But when I connect my device and use PUTTY to send some commands first and then run my program it is working.
Maybe there is a problem in the way I am starting communication?
My source code is:
#include "stdafx.h"
#include <iostream>
//#include <windows.h>
#include <afx.h>
int main()
{
using namespace std;
int i=0;
// cout << "Hello world!" << endl;
HANDLE hSerial;
hSerial = CreateFile("COM5",
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_WRITE | FILE_SHARE_READ,
0,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
0);
if(hSerial==INVALID_HANDLE_VALUE)
{
if(GetLastError()==ERROR_FILE_NOT_FOUND)
{
// TRACE("serial port does not exist for reading\n");
//serial port does not exist. Inform user.
}
// TRACE("some other error,serial port does not exist for reading\n");
//some other error occurred. Inform user.
}
DCB dcbSerialParams = {0};
dcbSerialParams.DCBlength=sizeof(dcbSerialParams);
if (!GetCommState(hSerial, &dcbSerialParams))
{
// TRACE("error getting state for reading\n");
//error getting state
}
dcbSerialParams.BaudRate=9600;
dcbSerialParams.ByteSize=8;
dcbSerialParams.StopBits=ONESTOPBIT;
dcbSerialParams.Parity=NOPARITY;
if(!SetCommState(hSerial, &dcbSerialParams))
{
//TRACE("error setting state for reading\n");
//error setting serial port state
}
COMMTIMEOUTS timeouts={0};
timeouts.ReadIntervalTimeout=50;
timeouts.ReadTotalTimeoutConstant=50;
timeouts.ReadTotalTimeoutMultiplier=10;
timeouts.WriteTotalTimeoutConstant=50;
timeouts.WriteTotalTimeoutMultiplier=10;
if(!SetCommTimeouts(hSerial, &timeouts))
{
// TRACE("some error occured for reading\n");
//error occureed. Inform user
}
int n=100,n1=100;
char szBuff[100];
DWORD dwBytesRead = 0;
char szBuff1[100];
DWORD dwByteswrote = 0;
memset(szBuff1,0,100);
memcpy(szBuff1,"LIST\r",5);
if(!WriteFile(hSerial, szBuff1,5, &dwByteswrote, NULL))
{
cout << "error writing" ;
}
cout << szBuff1 << endl;
cout << dwByteswrote << endl;
while(1)
{
if(!ReadFile(hSerial, szBuff, n1, &dwBytesRead, NULL))
{
cout << "error reading";
break;
}
else
{
cout << dwBytesRead << endl;
szBuff[dwBytesRead]='\0';
if(dwBytesRead>0)
{
cout << (szBuff);
break;
}
}
}
cin >> i;
}
Try this... you will probably need to do the code for exceptions (ex: if the response is bigger than 2024)
bool SendModemATCommand(const string &strCommand, int iModemPort, string &strRetValue)
{
bool bRetValue = false;
strRetValue = "";
char cBuffer[2024];
HANDLE hCom = NULL;
char cComPort[64];
sprintf_s(cComPort,"\\\\.\\COM%d", iModemPort);
hCom = CreateFile( cComPort,
GENERIC_READ | GENERIC_WRITE,
0, // must be opened with exclusive-access
NULL, // no security attributes
OPEN_EXISTING, // must use OPEN_EXISTING
0, // not overlapped I/O
NULL // hTemplate must be NULL for comm devices
);
if (hCom != INVALID_HANDLE_VALUE)
{
COMMTIMEOUTS comTimeOuts;
comTimeOuts.ReadIntervalTimeout = MAXDWORD;
comTimeOuts.ReadTotalTimeoutMultiplier = MAXDWORD;
comTimeOuts.ReadTotalTimeoutConstant = 0;//MAXDWORD;
comTimeOuts.WriteTotalTimeoutMultiplier = 0;
comTimeOuts.WriteTotalTimeoutConstant = 0;
if(SetCommTimeouts(hCom, &comTimeOuts))
{
DCB dcb;
dcb.DCBlength = sizeof(DCB);
if(GetCommState(hCom, &dcb))
{
DWORD dwBytesWritten = 0;
DWORD dwBytesRead = 0;
DWORD dwBytesTotal = 0;
if( WriteFile(hCom, strCommand.c_str(), (int)strCommand.size(), &dwBytesWritten, NULL) )
{
if(dwBytesWritten == strCommand.size())
{
dwBytesRead = 0;
DWORD tickStart = GetTickCount();
bool bTimeOut = false;
while(true)
{
while(ReadFile(hCom, cBuffer + dwBytesTotal, 1, &dwBytesRead, NULL))
{
if(dwBytesRead == 0 && dwBytesTotal != dwBytesWritten)
break;
dwBytesTotal += dwBytesRead;
}
if ( dwBytesTotal == 0 )
{
// timeout
if ( GetTickCount() - tickStart > 10000) // 10 Seconds
{
bTimeOut = true;
break;
}
}
else
break;
}
cBuffer[dwBytesTotal] = '\0';
strRetValue = cBuffer;
if(bTimeOut)
strRetValue = "Timed out:" + strCommand;
else
bRetValue = true;
}
}
}
}
CloseHandle(hCom);
}
return bRetValue;
}
Most likely the problem is with your initialization.
I recall having this type of trouble before and Com Timeouts structure was particularly troublesome.
I suggest you get a null modem cable from COM5 to another port on the machine (if you have one), or to another computer. Then use a terminal program to open up the other port and see you can see the "List" command coming through when you run the program. If not then it's very likely to be related to the way you are initializing & opening the com port.
This link may prove useful. Just strip out the Afx stuff and look particularly at the initialization.
http://www.codeproject.com/KB/system/chaiyasit_t.aspx
One other suggestion, you only send List once. If the device is not already plugged in and ready, nothing will happen. Maybe it should keep sending the list command until it gets a
Response.
Also, do you need "List\r\n" or just "List\r"? What is the other ends expecting?