Qthread signal emit not connecting - c++

I have two signals that are connecting and one that will not.
I have a mainwindow widget to display images processed by a Qthread which emits the original and processed images to the my mainwidow where they are displayed, this works. I would also like to emit a frames per second calculation from this thread, but it will not connect.
all code here:
https://github.com/ianzur/qtGui_imageProcessing
mainwindow.cpp:
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
// Timer for UI responsivness
tmrTimer = new QTimer(this);
connect(tmrTimer,SIGNAL(timeout()),this,SLOT(UpdateGUI()));
tmrTimer->start(20);
// Set initial sliders postion (for selecting ROI)
ui->SldLower->setSliderPosition(0);
ui->SldUpper->setSliderPosition(480);
ui->SldLeft->setSliderPosition(0);
ui->SldRight->setSliderPosition(640);
//connections to video processor thread
const bool c = connect(&processor,SIGNAL(outFPS(float)),ui->FPS,SLOT(setNum(float)));
connect(&processor,SIGNAL(inDisplay(QPixmap)),ui->inVideo,SLOT(setPixmap(QPixmap)));
connect(&processor,SIGNAL(outDisplay(QPixmap)),ui->outVideo,SLOT(setPixmap(QPixmap)));
qDebug() << "connected" << c;
}
Thread declaration
class ProcessorThread : public QThread
{
Q_OBJECT
public:
explicit ProcessorThread(QObject *parent = 0);
void update(QRect r, float low, float high);
int CLOCK();
float avgfps();
signals:
void inDisplay(QPixmap pixmap);
void outDisplay(QPixmap pixmap);
void outFPS(float fps);
protected:
void run() override;
private:
cv::VideoCapture capture;
//Region of interest
cv::Rect myROI;
float lowHz;
float highHz;
//float fps;
int _fpsstart=0;
float _avgfps=0;
float _fps1sec=0;
};
Thread definition
ProcessorThread::ProcessorThread(QObject *parent) : QThread(parent)
{
}
int ProcessorThread::CLOCK()
{
struct timespec t;
clock_gettime(CLOCK_MONOTONIC, &t);
return (t.tv_sec * 1000)+(t.tv_nsec*1e-6);
}
float ProcessorThread::avgfps()
{
if(CLOCK()-_fpsstart>1000)
{
_fpsstart=CLOCK();
_avgfps=0.9*_avgfps+0.1*_fps1sec;
_fps1sec=0;
}
_fps1sec++;
return _avgfps;
}
void ProcessorThread::run()
{
VideoCapture camera(0);
cv::Mat inFrame, outFrame, cropped, bit;
while(camera.isOpened() && !isInterruptionRequested())
{
camera >> inFrame;
if(inFrame.empty())
continue;
outFrame = inFrame.clone();
cropped = inFrame(myROI);
bitwise_not(cropped, bit);
bit.copyTo(outFrame(myROI));
//qDebug() << avgfps();
emit outFPS(avgfps());
emit inDisplay(QPixmap::fromImage(QImage(inFrame.data,inFrame.cols,inFrame.rows,inFrame.step,QImage::Format_RGB888).rgbSwapped()));
emit outDisplay(QPixmap::fromImage(QImage(outFrame.data,outFrame.cols,outFrame.rows,outFrame.step,QImage::Format_RGB888).rgbSwapped()));
}
}
void ProcessorThread::update(QRect r, float low, float high)
{
this->myROI = cv::Rect(r.x(),r.y(),r.width(),r.height());
this->lowHz = low;
this->highHz = high;
}
I cannot figure out why only the outFPS will not connect.

My first thoughts, looking at this line:
const bool c = connect(&processor,SIGNAL(outFPS(float)),ui->FPS, SLOT(setNum(float)));
In your UI, FPS looks like a QPlainTextEdit. I don't see a setNum() slot documented for that class. There is one for a QLabel (it takes an int or double), is that what you meant to use.

Related

How do I use paintEvent to paint a QImage from video data?

I construct a QImage with data I get from an API call. This QImage I will draw with the overridden paintEvent(QPaintEvent*) method into a widget. From the function that does the API call I emit a signal that sends the grabbed data to a slot that constructs the QImage. With this process there is no problem. The data is transfered and the construct of the image works.
My question is how can I use the constucted image in the overridden paintEvent? I can open a image from the disk and draw it into the widget, but this works from inside the paintEvent() method. With the API call I got the data from outside.
class GrabberClass : public QObject
{
Q_OBJECT
public:
GrabberClass(QObject* parent = nullptr);
virtual ~GrabberClass();
bool applyFrameCallback();
bool getFrameData(DtMxData* data);
bool startProcessing();
private:
RenderClass *m_renderClass;
DtMxProcess *m_process;
DTAPI_RESULT m_result;
DtFrame *m_Frame
signals:
void sendFrameData(uchar* pData, int width, int height, int bytesPerRow);
};
//GrabberClass.cpp
//constructor
GrabberClass::GrabberClass(QObject *parent) :
m_renderClass(new RenderClass)
{
const bool connected = QObject::connect(this, &GrabberClass::sendFrameData, m_renderClass, &RenderClass::receiveFrameData, Qt::DirectConnection);
Q_ASSERT(connected); //value is true.
}
bool GrabberClass::applyFrameCallback()
{
auto frameCallback = [](DataContainer* frameData, void* pContext)
{
((GrabberClass*)pContext)->getFrameData(frameData);
};
m_result = m_process->AddMatrixCbFunc(frameCallback, this);
if (m_result != DTAPI_OK) {
return false;
}
return true;
}
bool GrabberClass::grabFrame(DataContainer* frameData)
{
m_Frame = frameData->m_Rows[IN_ROW].m_CurFrame;
uchar* pData = m_Frame->m_Video->m_Planes->m_pBuf;
int bytesPerRow = m_Frame->m_Video->m_Planes->m_Stride;
int frameWidth = m_Frame->m_Video->m_Width;
int frameHeight = m_Frame->m_Video->m_Height;
emit sendFrameData(pData, frameWidth, frameHeight, bytesPerRow);
return true;
}
//Render Class.h
class RenderClass : public QOpenGLWidget {
Q_OBJECT
public:
RenderClass(QWidget* parent = nullptr);
virtual ~RenderClass();
private:
QImage m_srcImage;
protected:
void paintEvent(QPaintEvent*);
public slots:
void receiveFrameData(uchar* pData, int width, int height, int bytesPerRow);
};
//RenderClass.cpp
VideoMonitor::VideoMonitor(QWidget* parent) :
QOpenGLWidget(parent),
m_srcImage(nullptr)
{
}
void RenderClass::receiveFrameData(uchar* pData, int width, int height, int bytesPerRow)
{
m_srcImage = QImage(pData, width, height, bytesPerRow, QImage::Format_Indexed8);
qDebug() << "Slot is called." << m_srcImage; //QImage is constructed with valid data
QWidget::update();
}
void RenderClass::paintEvent(QPaintEvent*)
{
if (!m_srcImage.isNull()) { //m_srcImage is null
QPainter painter(this);
painter.drawImage(this->rect(), m_srcImage);
painter.end();
qDebug() << "Draw image.";
}
else {
qDebug() << "QImage is null";
}
}
I'am new to C++, so it would be nice if someone of you can give me an advice and explaination.

Many active QTimers timekeeping causing GUI lag

I have created a simple app for monitoring the connected devices. This app shows the connection status of 25 client devices.
This app implements a TCP server and listens at port 7777 and as many as 25 clients can be connected to this application. If no data is received from a client for 30 seconds, the app marks the device as "Offline".
For this purpose, QTimer for each connected device is started for 30 sec when some client connects and the payload is received. Each timer is connected to a common SLOT refreshOfflineDevices() Soon as any timer timeout occurs, refreshOfflineDevices() is called and the non-running timers corresponding to the device are marked as "Offline" in the GUI.
The app works fine and the GUI is updated instantly when the connected device count is not more than 4 or 5. As the connected devices rise, (greater than 8 or 9) the lag in the GUI update becomes obvious.
After some desk research, I assume that the parallel timers would need to be moved to a thread to avoid GUI lags. For that, I created a CyclicWorker class for separating the QTimer but not sure how this will work in this case
I need help with moving and managing all timekeeping events to a thread. Also, I need advise on my assumption of GUI lag correctness
my app GUI
monitor.h
#ifndef CENTRALMONITOR_H
#define CENTRALMONITOR_H
#include <QtCore>
#include <QMainWindow>
#include "ui_device_display.h"
#include "tcp_server.h"
#include "cyclic_worker.h"
#define MAX_DEVICES (25)
#define DEVICE_KEEP_ALIVE_MS (30*1000) // keeps track of the connection before marking "Offline"
namespace Ui
{
class MainWindow;
}
class MainWindow : public QMainWindow
{
Q_OBJECT
private:
Ui::MainWindow *ui;
TCPServer *ptr_server = nullptr;
QTimer *ptr_deviceTimer[MAX_DEVICES] = {nullptr};
void GUI_update(const int device_number, const QString device_status);
CyclicWorker timerThread;
public:
explicit MainWindow(QWidget *parent = 0);
~MainWindow();
UIDeviceDisplay *ptr_devices[MAX_DEVICES] = {nullptr};
public slots:
void parseJSON(QString response);
void refreshOfflineDevices();
};
#endif // CENTRALMONITOR_H
monitor.cpp
#include "monitor.h"
#include "ui_monitor.h"
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
system("clear");
ui->setupUi(this);
// MyServer instance (TCPServer contains TCPClient instance).
ptr_server = new TCPServer();
connect(ptr_server, SIGNAL(uiPayloadReady(QString)), this, SLOT(parseJSON(QString)));
// draw (MAX_DEVICES) (initialize as offline)
for(int i=0 ; i<5 ; i++) // 5 rows
{
for(int j=0 ; j<5 ; j++) // 5 columns
{
ptr_devices[i*5 + j] = new UIDeviceDisplay(this, j, i);
ptr_devices[i*5 + j]->show();
QString text_device_number = QString("").append(QString::number( i*5 + j + 1) );
ptr_devices[i*5 + j]->ptr_label_device_number->setText(text_device_number);
}
}
// connect keep alive timers to use later
for(int device_idx=0; device_idx<MAX_DEVICES; device_idx++)
{
ptr_deviceTimer[device_idx] = new QTimer();
connect(ptr_deviceTimer[device_idx], SIGNAL(timeout()), this, SLOT(refreshOfflineDevices()));
this->ptr_deviceTimer[device_idx]->setSingleShot(true);
ptr_deviceTimer[device_idx]->setTimerType(Qt::PreciseTimer);
}
timerThread.start();
timerThread.threadFlag = 1;
}
MainWindow::~MainWindow()
{
delete ui;
}
/*
#brief This slot is emitted by ptr_socket readReady() signal by the TCP client handler
#param The received payload for updating GUI
*/
void MainWindow::parseJSON(const QString response)
{
const QJsonDocument jsonDocument = QJsonDocument::fromJson(response.toUtf8());
const QJsonObject jsonObjectRecords = jsonDocument.object();
const int device_number = jsonObjectRecords.value("device_number").toInt();
const QString device_status = jsonObjectRecords.value("device_status").toString();
// start time keeper for current device.
ptr_deviceTimer[device_number-1]->start(DEVICE_KEEP_ALIVE_MS);
GUI_update(device_number, device_status);
}
/*
#brief This method updates the GUI with provided params.
#param GUI update params
*/
void MainWindow::GUI_update(const int device_number, const QString device_status)
{
const int device_idx = device_number-1;
// update device label.
ptr_devices[device_idx]->ptr_label_device_status->setText(device_status);
// refresh online devices label.
int onlineCount =0;
for(int device_idx=0; device_idx<MAX_DEVICES; device_idx++)
{
if( ptr_deviceTimer[device_idx]->isActive() )
onlineCount++;
}
ui->label_online_devices->setText(QString("Online devices: %1").arg(onlineCount));
}
/*
#brief This method is called upon every device_timer expiration. It updates GUI for all the devices that are offline
*/
void MainWindow::refreshOfflineDevices()
{
for(int device_number=1; device_number<=MAX_DEVICES; device_number++)
{
// if device timer is not running, the device is offline
if( !ptr_deviceTimer[device_number-1]->isActive() )
{
GUI_update(device_number, "Offline");
}
}
}
cyclic_worker.h
#include <QDebug>
#include <QThread>
class CyclicWorker : public QThread
{
Q_OBJECT
public:
bool threadFlag; // variable used to control thread execution
CyclicWorker();
void run();
void quit();
private:
};
cyclic_worker.cpp
#include "cyclic_worker.h"
CyclicWorker::CyclicWorker()
{
qDebug() << "\nCyclicWorker object created";
threadFlag = false;
}
/*----------------------------------------------------------------------------
* void run()
*
* Return value : none
*
* Description : this function runs after thread start
*----------------------------------------------------------------------------*/
void CyclicWorker::run()
{
qDebug("Thread invoked . . .");
while(threadFlag)
{
}
}
/*----------------------------------------------------------------------------
* void quit()
*
* Return value : none
*
* Description : this function stops the running thread
*----------------------------------------------------------------------------*/
void CyclicWorker::quit()
{
qDebug() << "Thread stopped . . .";
}
ui_device_display.h
#ifndef UI_DEVICE_DISPLAY_H
#define UI_DEVICE_DISPLAY_H
#include <QtWidgets>
#define X_PADDING (30) // this is the base container widget co-ordinates
#define Y_PADDING (110)
class UIDeviceDisplay : public QFrame
{
Q_OBJECT
public:
UIDeviceDisplay(QWidget *parent = nullptr, int x=0, int y=0);
QLabel *ptr_label_device_number = nullptr;
QLabel *ptr_label_device_status = nullptr;
static const int frameWidth = 240;
static const int frameHeight = 190;
static const int deviceLabelWidth = 70;
static const int deviceLabelHeight = 50;
static const int statusLabelWidth = 150;
static const int statusLabelHeight = 30;
};
#endif
ui_device_display.cpp
#include "ui_device_display.h"
UIDeviceDisplay::UIDeviceDisplay(QWidget *parent, int x, int y) : QFrame(parent)
{
//QFrame containing all the elements.
this->setGeometry(QRect( X_PADDING + frameWidth*x, Y_PADDING + frameHeight*y, frameWidth, frameHeight));
this->hide();
//QLabel for bed number.
ptr_label_device_number = new QLabel(this);
ptr_label_device_number->setGeometry(QRect((frameWidth/2)-(deviceLabelWidth/2), 20, deviceLabelWidth, deviceLabelHeight));
ptr_label_device_number->setAlignment(Qt::AlignCenter);
//QLabel that displays the device status.
ptr_label_device_status = new QLabel(this);
ptr_label_device_status->setText("Offline");
ptr_label_device_status->setGeometry(QRect(45, 90, statusLabelWidth, statusLabelHeight));
ptr_label_device_status->setAlignment(Qt::AlignCenter);
}
tcp_server.h
#ifndef TCP_SERVER_H
#define TCP_SERVER_H
#include <QTcpServer>
#include <QTcpSocket>
#include <QAbstractSocket>
#include "tcp_client_handler.h"
class TCPServer : public QTcpServer
{
Q_OBJECT
public:
explicit TCPServer(QObject *parent=0);
protected:
void incomingConnection(int handle);
signals:
void uiPayloadReady(QString uiPayload);
public slots:
void payloadReady(QString payload);
};
#endif
tcp_server.cpp
#include "tcp_server.h"
TCPServer::TCPServer(QObject *parent) :
QTcpServer(parent)
{
if(listen(QHostAddress::Any,7777))
qDebug("DEBUG: Server listening at 7777");
else
qDebug("DEBUG: Could not start server");
}
void TCPServer::incomingConnection(int handle)
{
TCPClientHandler *ptr_client = new TCPClientHandler(this);
ptr_client->SetSocket(handle);
connect(ptr_client, SIGNAL(payloadReady(QString)), this, SLOT(payloadReady(QString)));
}
void TCPServer::payloadReady(QString payload)
{
emit uiPayloadReady(payload);
}
tcp_client_handler.h
#ifndef TCP_CLIENT_HANDLER_H
#define TCP_CLIENT_HANDLER_H
#include <QObject>
#include <QTcpSocket>
#include <QDebug>
class TCPClientHandler : public QObject
{
Q_OBJECT
public:
explicit TCPClientHandler(QObject *parent = nullptr);
void SetSocket(int Descriptor);
QTcpSocket *ptr_socket = nullptr;
signals:
void payloadReady(QString payload);
public slots:
void connected();
void disconnected();
void readReady();
private:
};
#endif
tcp_client_handler.cpp
#include "tcp_client_handler.h"
TCPClientHandler::TCPClientHandler(QObject *parent) : QObject(parent)
{
}
void TCPClientHandler::SetSocket(int Descriptor)
{
ptr_socket = new QTcpSocket(this);
connect(ptr_socket, SIGNAL(connected()), this, SLOT(connected()));
connect(ptr_socket, SIGNAL(disconnected()), this, SLOT(disconnected()));
connect(ptr_socket, SIGNAL(readyRead()), this, SLOT(readReady()));
ptr_socket->setSocketDescriptor(Descriptor);
}
void TCPClientHandler::connected()
{
//qDebug("DEBUG: client connect event");
}
void TCPClientHandler::disconnected()
{
//qDebug("DEBUG: client disconnect event");
}
void TCPClientHandler::readReady()
{
const QByteArray byteArrayResponse = ptr_socket->readAll();
const QString stringResponse = QString(byteArrayResponse);
//qDebug() << "DEBUG: " << stringResponse;
emit payloadReady(stringResponse);
}
main.cpp
#include "monitor.h"
#include <QApplication>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
MainWindow w;
w.setWindowFlags(Qt::Window | Qt::FramelessWindowHint);
w.show();
return a.exec();
}

QT Update variable of worker thread from mainwindow

I am displaying opencv video using QT multithreading concept as suggested here
I also wish to update private "path" variable of worker thread class on button click. So I added
connect(this, SIGNAL(send_to_worker(QString)),workers[view], SLOT(get_from_main(QString)));
However, get_from_main(QString) function never gets called.
May I please know what is the safest method to update worker thread class variable from mainwindow ?
Here is full code..
class Worker : public QObject
{
Q_OBJECT
public:
Worker(QString path, int id);
~Worker();
public slots:
void readVideo(QString path = "");
void get_from_main(QString path);
signals:
// frame and index of label which frame will be displayed
void frameFinished(cv::Mat frame, int index);
void finished(int index);
private:
QString filepath;
int index;
};
//worker.cpp
#include "worker.h"
#include <QDebug>
#include <QThread>
#include <QTime>
Worker::Worker(QString path, int id) : filepath(path), index(id)
{
}
Worker::~Worker()
{
}
void Worker::get_from_main(QString path)
{
qDebug() << "updating";
}
void Worker::readVideo(QString path)
{
if (path.length() > 0)
filepath = path;
cv::VideoCapture cap(filepath.toStdString());
if (! cap.isOpened())
{
qDebug() << "Can't open video file " << filepath;
emit finished(index);
return;
}
cv::Mat frame;
while (true)
{
cap >> frame;
if (frame.empty())
{
frame = cv::Mat(cv::Size(720, 576), CV_8UC3, cv::Scalar(192, 0, 0));
emit frameFinished(frame, index);
break;
}
emit frameFinished(frame.clone(), index);
QThread::msleep(30);
}
emit finished(index);
}
//mainwindow.h
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include <opencv2/opencv.hpp>
#include "worker.h"
#define MAX_NUM_CAM 8
namespace Ui {
class MainWindow;
}
class QThread;
class QLabel;
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = 0);
~MainWindow();
void init();
private slots:
void displayFrame(cv::Mat frame, int index);
void file_open_clicked();
signals:
send_to_worker(QString path);
private:
Ui::MainWindow *ui;
int numCams;
QLabel *labels[MAX_NUM_CAM];
QThread* threads[MAX_NUM_CAM];
Worker* workers[MAX_NUM_CAM];
};
#endif // MAINWINDOW_H
//mainwindow.cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QDebug>
#include <QThread>
#include <QLabel>
#include <QGridLayout>
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
qRegisterMetaType< cv::Mat >("cv::Mat");
qDebug() << "Main thread " << QThread::currentThreadId();
init();
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::init()
{
QGridLayout *grid = new QGridLayout;
int numCols = 2;
numCams = 4;
int row = 0, col = 0;
for (int i = 0; i < numCams; i++)
{
labels[i] = new QLabel;
row = i / numCols;
col = i % numCols;
grid->addWidget(labels[i], row, col);
threads[i] = new QThread;
workers[i] = new Worker(QString("/home/shang/Videos/%1.mp4").arg(i+1), i);
workers[i]->moveToThread(threads[i]);
connect(workers[i], SIGNAL(frameFinished(cv::Mat, int)), this, SLOT(displayFrame(cv::Mat,int)));
connect(threads[i], SIGNAL(started()), workers[i], SLOT(readVideo()));
connect(workers[i], SIGNAL(finished(int)), threads[i], SLOT(quit()));
connect(workers[i], SIGNAL(finished(int)), workers[i], SLOT(deleteLater()));
connect(threads[i], SIGNAL(finished()), threads[i], SLOT(deleteLater()));
threads[i]->start();
}
this->centralWidget()->setLayout(grid);
}
void MainWindow::file_open_clicked(){
QString Path = QFileDialog::getSaveFileName( this,tr("OpenVideo"),"","Video (*.avi)");
if(Path.isEmpty())
return;
view =3;
connect(this, SIGNAL(send_to_worker(QString)),workers[view], SLOT(get_from_main(QString)));
emit this->send_to_worker(recorder_Path);
}
void MainWindow::displayFrame(cv::Mat frame, int index)
{
QPixmap p = QPixmap::fromImage(QImage(frame.data, frame.cols, frame.rows, frame.step, QImage::Format_RGB888).rgbSwapped());
p = p.scaled(QSize(frame.cols/2, frame.rows/2));
labels[index]->setPixmap(p);
}
Qt + OpenCV play videos with std::thread
You're trying to perform an operation periodically whilst still processing events -- that's simply calling out for a QTimer.
(Note that the following code is untested.)
Change your Worker class to make use of a QTimer rather than a loop that blocks the event queue...
class Worker: public QObject {
Q_OBJECT;
public:
Worker (QString path, int id);
~Worker();
public slots:
void readVideo(QString path = "");
void get_from_main(QString path);
signals:
// frame and index of label which frame will be displayed
void frameFinished(cv::Mat frame, int index);
void finished(int index);
private:
QString filepath;
int index;
QTimer timer;
cv::VideoCapture cap;
};
Worker::Worker (QString path, int id)
: filepath(path)
, index(id)
, timer(this)
, cap(filepath.toStdString())
{
/*
* Connect QTimer::timeout to the readVideo slot that will read a
* single frame on each signal at 30ms intervals.
*/
connect(&timer, &QTimer::timeout, this, &Worker::readVideo);
timer.start(30);
}
Worker::~Worker ()
{
}
void Worker::get_from_main (QString path)
{
qDebug() << "updating";
filepath = path;
cap = cv::VideoCapture(filepath);
if (!cap.isOpened()) {
qDebug() << "Can't open video file " << filepath;
emit finished(index);
}
}
void Worker::readVideo ()
{
cv::Mat frame;
cap >> frame;
if (frame.empty())
{
frame = cv::Mat(cv::Size(720, 576), CV_8UC3, cv::Scalar(192, 0, 0));
emit frameFinished(frame, index);
break;
}
emit frameFinished(frame.clone(), index);
}
Now Worker::readVideo simply reads a single frame from the capture and then returns to the event loop.
Also remove the line...
connect(threads[i], SIGNAL(started()), workers[i], SLOT(readVideo()));
from MainWindow::init. As I stated above this is untested and probably needs a lot more error checking. But it should give you a good idea as to what's required.

Display a sequence of images on a QGraphicsView/QGraphicsScene

I am developing an application in which I have to display a sequence of images and draw some rectangles on it. That's why I have to chosen to display my images in a QGraphicsScene/QGraphicsView.
Before showing you my code I will detail some of the functions I have developed :
void equalizeHist_16U_linear(Mat &img, int Tolmin, int Tolmax);
and void equalizeHist_16U_linear(Mat &img, int Tolmin, int Tolmax, int x1, int x2, int y1, int y2);
=> Does the linear transformation of an image (img), on all the image,or a specified part of the image (x1->x2, y1->y2),with an upper (Tolmax) and lower (Tolmin) tolerance
void equalizeHist_16U(Mat &img); and void equalizeHist_16U(Mat &img, int x1, int x2, int y1, int y2);
=> Does the egalisation of a 16bits grayscale image, on a specified part of the image (x1->x2, y1->y2) or on all the image.
ZonePoints getZonePoints();
=> Function used to get the zone of the image on which transformations will be done (the points comes from mouse positions), returns a ZonePoints object (int xmin, xma, ymin, ymax)
QPixmap convert16uc1(const cv::Mat& source);
=> Converts an OpenCV 16bits grayscale image (Mat) to a RGB32 QPixmap imgage
I have a folder containing numerous images labelled "ImageRaw_00000.png" to "ImageRaw_02999.png". My images are 16bits grayscale, I open them using OpenCV (as Qt cannot read 16bits images), I want to do some process on it (with OpenCV), convert them to QPixmap, add a rectangle (which represents the zone) on it and display them.
I also need to display 2 images at the same time (on the first I do the transformation on the zone given by "getZonePoints", on the second I display a rectangle corresponding to the zone points). My application looks like this : PrintScreen of the application
This is what I have so far :
Class declaration :
class FenetrePrinc : public QMainWindow
{
Q_OBJECT
public:
explicit FenetrePrinc(QWidget *parent = 0);
~FenetrePrinc();
void compute_hist_16U(Mat &img, long* hist, bool cumul);
void equalizeHist_16U(Mat &img, int x1, int x2, int y1, int y2);
void equalizeHist_16U(Mat &img);
void equalizeHist_16U_linear(Mat &img, int Tolmin, int Tolmax, int x1, int x2, int y1, int y2);
void equalizeHist_16U_linear(Mat &img, int Tolmin, int Tolmax);
QPixmap convert16uc1(const cv::Mat& source);
public slots:
virtual void openFile();
virtual void start();
private:
QString filename;
QGraphicsScene *scene_src, *scene_dst;
QGraphicsItem *item_rect;
QGraphicsItem *img_src, *img_dst;
VideoCapture sequence;
Mat src, dst;
bool transfo_lineaire;
bool coupling;
int Tolmin, Tolmax;
int impsec;
ZonePoints zpoints;
};
Class definition :
FenetrePrinc::FenetrePrinc(QWidget *parent) : QMainWindow(parent), ui(new Ui::FenetrePrinc)
{
scene_src = new QGraphicsScene();
img_src = scene_src->addPixmap(QPixmap("vide.jpg"));
img_src->setZValue(1);
ui->view_src->setScene(scene_src);
scene_dst = new QGraphicsScene();
img_dst = scene_dst->addPixmap(QPixmap("vide.jpg"));
img_dst->setZValue(1);
ui->view_dst->setScene(scene_dst);
}
void FenetrePrinc::openFile()
{
filename = QFileDialog::getOpenFileName(this, tr("Open Video file"), "C:/", tr("Image Files (*.png)"));
sequence.open(filename.toStdString());
if(!sequence.isOpened())
ui->buttonStart->setEnabled(false);
else
ui->buttonStart->setEnabled(true);
}
void FenetrePrinc::start()
{
char key;
for(;;)
{
sequence >> src;
sequence >> dst;
if(src.empty() || dst.empty())
{
cout << "End of sequence" << endl;
break;
} zpoints = getZonePoints();
key = (char)waitKey(1000);
if(transfo_lineaire)
{
equalizeHist_16U_linear(src, Tolmin, Tolmax, zpoints.xmin, zpoints.xmax, zpoints.ymin, zpoints.ymax);
equalizeHist_16U_linear(dst, Tolmin, Tolmax);
}
else
{
equalizeHist_16U(src, zpoints.xmin, zpoints.xmax, zpoints.ymin, zpoints.ymax);
equalizeHist_16U(dst);
}
scene_src->removeItem(img_src);
img_src = scene_src->addPixmap( convert16uc1(src));
img_src->setZValue(1);
scene_dst->removeItem(img_dst);
img_dst = scene_dst->addPixmap( convert16uc1(dst));
img_dst->setZValue(1);
scene_dst->removeItem(item_rect);
item_rect = scene_dst->addRect(zpoints.xmin, zpoints.ymin, zpoints.xmax-zpoints.xmin+1, zpoints.ymax-zpoints.ymin+1, QPen(Qt::red, 2, Qt::SolidLine));
item_rect->setZValue(2);
if(key == 'q' || key == 'Q' || key == 27)
break;
}
}
I had done an working application with the same behavior in which I did not used Qt, I used OpenCV for the image processing and the display. I tried to use the same technique :
Mat img;
VideoCapture sequence;
sequence.open(filename.toStdString());
for(;;)
{
sequence >> img;
... //Process images
... //Display images
}
But it does not work. Only the last image and rectangle is displayed. I guess the technique is similar with Qt but I cannot find examples of code
Thank you in advance
I found a way to do what I want, in the end it is really simple.
I used a timer and signal/slot mechanism.
My new class declaration is the following :
class FenetrePrinc : public QMainWindow
{
Q_OBJECT
public:
explicit FenetrePrinc(QWidget *parent = 0);
~FenetrePrinc();
void compute_hist_16U(Mat &img, long* hist, bool cumul);
void equalizeHist_16U(Mat &img, int x1, int x2, int y1, int y2);
void equalizeHist_16U(Mat &img);
void equalizeHist_16U_linear(Mat &img, int Tolmin, int Tolmax, int x1, int x2, int y1, int y2);
void equalizeHist_16U_linear(Mat &img, int Tolmin, int Tolmax);
QPixmap convert16uc1(const cv::Mat& source);
public slots:
virtual void openFile();
virtual void start();
virtual void tick(); //Added a 'tick()' slot
private:
QString filename;
QGraphicsScene *scene_src, *scene_dst;
QGraphicsItem *item_rect;
QGraphicsItem *img_src, *img_dst;
VideoCapture sequence, sequence2; //Declared a 2nd sequence
//If there's only 1 sequence, both images will be the same
Mat src, dst;
QTimer *timer //Added a QTimer
bool transfo_lineaire;
bool coupling;
int Tolmin, Tolmax;
int impsec;
ZonePoints zpoints;
};
Class definition :
FenetrePrinc::FenetrePrinc(QWidget *parent) : QMainWindow(parent), ui(new Ui::FenetrePrinc)
{
scene_src = new QGraphicsScene();
img_src = scene_src->addPixmap(QPixmap("vide.jpg"));
img_src->setZValue(1);
ui->view_src->setScene(scene_src);
scene_dst = new QGraphicsScene();
img_dst = scene_dst->addPixmap(QPixmap("vide.jpg"));
img_dst->setZValue(1);
ui->view_dst->setScene(scene_dst);
timer = new QTimer(this); //timer instantiation
}
void FenetrePrinc::openFile()
{
filename = QFileDialog::getOpenFileName(this, tr("Open Video file"), "C:/", tr("Image Files (*.png)"));
sequence.open(filename.toStdString());
sequence2.open(filename.toStdString());
if(!sequence.isOpened())
ui->buttonStart->setEnabled(false);
else
ui->buttonStart->setEnabled(true);
}
void FenetrePrinc::start()
{
connect(timer, SIGNAL(timeout()), this, SLOT(tick()));
timer->start(1000/impsec);
}
void FenetrePrinc::tick()
{
char key;
int i=0;
sequence >> src;
sequence2 >> dst;
if(src.empty() || dst.empty())
{
stop_timer();
}
zpoints = mgaze->getZonePoints();
zpoints = mgaze->applyOFFSET(zpoints);
zpoints = mgaze->fixZonePoints(zpoints);
if(transfo_lineaire)
{
equalizeHist_16U_linear(src, Tolmin, Tolmax, zpoints.xmin, zpoints.xmax, zpoints.ymin, zpoints.ymax);
equalizeHist_16U_linear(dst, Tolmin, Tolmax);
}
else
{
equalizeHist_16U(src, zpoints.xmin, zpoints.xmax, zpoints.ymin, zpoints.ymax);
equalizeHist_16U(dst);
}
scene_src->removeItem(img_src);
img_src = scene_src->addPixmap(convert16uc1(src));
img_src->setZValue(1);
scene_dst->removeItem(img_dst);
img_dst = scene_dst->addPixmap(convert16uc1(dst));
img_dst->setZValue(1);
scene_dst->removeItem(item_rect);
item_rect = scene_dst->addRect(zpoints.xmin, zpoints.ymin, zpoints.xmax-zpoints.xmin+1, zpoints.ymax-zpoints.ymin+1, QPen(Qt::red, 2, Qt::SolidLine));
item_rect->setZValue(2);
}
This works well, I just have to create a 'stop' slot in case I want to stop the video and that'll be good

How to calculate total distance covered by a sensor from its set of locations which are 3D points?

I am using Qt GUI to track the motion of a sensor. The mainwindow.cpp file is:
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include "ATC3DG.h"
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/core/core.hpp>
#include "QTimer"
#include "qtimer.h"
#include "math.h"
double square(double x)
{
return x*x;
}
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::on_start_clicked()
{
points.clear(); //points is a global std::vector<cv::Point3> declared in mainwindow.h
errorCode = InitializeBIRDSystem();
errorCode = GetBIRDSystemConfiguration(&ATC3DG.m_config);
id = 0;
errorCode = SetSystemParameter(SELECT_TRANSMITTER, &id, sizeof(id));
EM_time = new QTimer(this);
connect(EM_time, SIGNAL(timeout()), this, SLOT(showValues()));
EM_time->start();
}
void MainWindow::showValues()
{
EM_time->stop();
pRecord = &record;
{
sensorID = 0;
{
errorCode = GetAsynchronousRecord(sensorID, pRecord, sizeof(record));
unsigned int status = GetSensorStatus(sensorID);
if ( status == VALID_STATUS )
{
points.push_back(cv::Point3f(record.x, record.y, record.z));
QString str;
str.sprintf("%f, %f, %f",record.x, record.y, record.z );
this->ui->label->setText(str);
}
}
}
EM_time->start();
}
void MainWindow::on_stop_clicked()
{
EM_time->stop();
double sum = 0;
double dist;
QString str;
for (int i=0; i<points.size()-1; i++)
{
dist = sqrt(square(points[i].x - points[i+1].x) + square(points[i].y - points[i+1].y) + square(points[i].z - points[i+1].z));
sum = sum+dist;
}
str.sprintf("%d cm", sum*2.54);
this->ui->distance->setText(str);
}
ATC3DG.h is the header file of the sensor. record.x, record.y, record.z gives the 3D location of x, y and z location of the sensor in inches. Basically what I am doing is, when I click the start button, the sensor is switched on and the QTimer starts with its signal emitted during timeouts and the showvalues() function will start to execute. This function displays the position of the sensor in label of the Qt GUI. During this loop, points will be filled with all the position values of the sensor.
The stop button stops the timer and calculates the distance using all the points containing in the points vector. This is done using:
double sum=0;
double dist;
for (int i=0; i<points.size()-1; i++)
{
dist = sqrt(square(points[i].x - points[i+1].x) + square((int)points[i].y - (int)points[i+1].y) + square(points[i].z - points[i+1].z));
sum = sum+dist;
}
The sum is giving me totally weird values. For example, when the sensor has moved only about 5 or 6 inches, it is showing values in the range of 100s and like that.
My mainwindow.h file is:
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include "ATC3DG.h"
#include "QTimer"
#include <opencv2/core/core.hpp>
namespace Ui {
class MainWindow;
}
class CSystem
{
public:
SYSTEM_CONFIGURATION m_config;
};
class CSensor
{
public: SENSOR_CONFIGURATION m_config;
};
class CXmtr
{
public: TRANSMITTER_CONFIGURATION m_config;
};
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = 0);
~MainWindow();
public slots:
void on_start_clicked();
void showValues();
void on_stop_clicked();
private:
Ui::MainWindow *ui;
private:
DOUBLE_POSITION_ANGLES_RECORD record, *pRecord;
CSystem ATC3DG;
CSensor *pSensor;
CXmtr *pXmtr;
int errorCode;
int sensorID;
int i;
short id;
QTimer *EM_time;
std::vector<cv::Point3f> points;
};
#endif // MAINWINDOW_H
issues I can see in your code:
overuse of braces (they do nothing) - this looks strange and may led to errors
GetAsynchronousRecord suggest asynchronous action and you are using value immediately! I don't known this library but this looks suspicious.
start and stop timer in same method.
you are calculating sum of distances from probably very noisy data. This means that when you do not move sensor over a time you are calculating sum of noise and as a result you have large distance when sensor is not moved at all. You have to filter data before calculating such distance (the easiest is low pass filter).