Commit 84c7fa5b authored by Thiago Santini's avatar Thiago Santini

Refactoring data classes into their own files

parent 5aa822e0
......@@ -37,8 +37,8 @@ SOURCES +=\
$${TOP}/src/pupil-detection/ElSe.cpp \
$${TOP}/src/pupil-detection/ExCuSe.cpp \
$${TOP}/src/CameraWidget.cpp \
$${TOP}/src/InputWidget.cpp \
$${TOP}/src/FieldImageProcessor.cpp \
$${TOP}/src/InputWidget.cpp \
$${TOP}/src/FieldImageProcessor.cpp \
$${TOP}/src/Synchronizer.cpp \
$${TOP}/src/GazeEstimationWidget.cpp \
$${TOP}/src/GazeEstimation.cpp \
......@@ -50,17 +50,18 @@ SOURCES +=\
$${TOP}/src/LogWidget.cpp \
$${TOP}/src/PerformanceMonitor.cpp \
$${TOP}/src/PerformanceMonitorWidget.cpp \
$${TOP}/src/CameraCalibration.cpp \
$${TOP}/src/pupil-detection/PupilDetectionMethod.cpp \
$${TOP}/src/Overlay.cpp \
$${TOP}/src/CommandManager.cpp \
$${TOP}/src/ERWidget.cpp \
$${TOP}/src/pupil-tracking/PuReST.cpp \
$${TOP}/src/pupil-detection/PuRe.cpp \
$${TOP}/src/pupil-tracking/PupilTrackingMethod.cpp \
$${TOP}/src/ocv_utils.cpp \
$${TOP}/src/globals.cpp \
$${TOP}/src/AudioRecorder.cpp
$${TOP}/src/CameraCalibration.cpp \
$${TOP}/src/pupil-detection/PupilDetectionMethod.cpp \
$${TOP}/src/Overlay.cpp \
$${TOP}/src/CommandManager.cpp \
$${TOP}/src/ERWidget.cpp \
$${TOP}/src/pupil-tracking/PuReST.cpp \
$${TOP}/src/pupil-detection/PuRe.cpp \
$${TOP}/src/pupil-tracking/PupilTrackingMethod.cpp \
$${TOP}/src/ocv_utils.cpp \
$${TOP}/src/globals.cpp \
$${TOP}/src/AudioRecorder.cpp \
$${TOP}/src/data/common.cpp
HEADERS += \
$${TOP}/src/MainWindow.h\
......@@ -87,23 +88,29 @@ HEADERS += \
$${TOP}/src/LogWidget.h \
$${TOP}/src/PerformanceMonitor.h \
$${TOP}/src/PerformanceMonitorWidget.h \
$${TOP}/src/CameraCalibration.h \
$${TOP}/src/Overlay.h \
$${TOP}/src/CommandManager.h \
$${TOP}/src/ERWidget.h \
$${TOP}/src/pupil-tracking/PupilTrackingMethod.h \
$${TOP}/src/pupil-detection/PuRe.h \
$${TOP}/src/pupil-tracking/PuReST.h \
$${TOP}/src/ocv_utils.h \
$${TOP}/src/globals.h \
$${TOP}/src/AudioRecorder.h
$${TOP}/src/CameraCalibration.h \
$${TOP}/src/Overlay.h \
$${TOP}/src/CommandManager.h \
$${TOP}/src/ERWidget.h \
$${TOP}/src/pupil-tracking/PupilTrackingMethod.h \
$${TOP}/src/pupil-detection/PuRe.h \
$${TOP}/src/pupil-tracking/PuReST.h \
$${TOP}/src/ocv_utils.h \
$${TOP}/src/globals.h \
$${TOP}/src/AudioRecorder.h \
$${TOP}/src/data/CollectionTuple.h \
$${TOP}/src/data/DataTuple.h \
$${TOP}/src/data/FieldData.h \
$${TOP}/src/data/InputData.h \
$${TOP}/src/data/EyeData.h \
$${TOP}/src/data/Marker.h
FORMS += \
$${TOP}/src/MainWindow.ui \
$${TOP}/src/CameraWidget.ui \
$${TOP}/src/GazeEstimationWidget.ui \
$${TOP}/src/LogWidget.ui \
$${TOP}/src/PerformanceMonitorWidget.ui
$${TOP}/src/PerformanceMonitorWidget.ui
RESOURCES += \
$${TOP}/resources.qrc
......@@ -133,27 +140,27 @@ LIBS += \
-lopencv_imgcodecs$${CV_SUFFIX} \
-lopencv_imgproc$${CV_SUFFIX} \
-lopencv_videoio$${CV_SUFFIX} \
-lopencv_video$${CV_SUFFIX} \
-lopencv_aruco$${CV_SUFFIX}
-lopencv_video$${CV_SUFFIX} \
-lopencv_aruco$${CV_SUFFIX}
# JPEG-TURBO
contains(DEFINES, TURBOJPEG) {
INCLUDEPATH += "$${TURBOJPEGPATH}/include/"
INCLUDEPATH += "$${TURBOJPEGPATH}/include/"
win32:contains(QMAKE_HOST.arch, x86_64) {
LIBS += "-L$${TURBOJPEGPATH}/lib/"
LIBS += "-L$${TURBOJPEGPATH}/lib/"
} else {
LIBS += "-L$${TURBOJPEGPATH}/lib/"
LIBS += "-L$${TURBOJPEGPATH}/lib/"
}
LIBS += -lturbojpeg
}
contains(DEFINES, STARBURST) {
SOURCES += $${TOP}/src/pupil-detection/Starburst.cpp
HEADERS += $${TOP}/src/pupil-detection/Starburst.h
SOURCES += $${TOP}/src/pupil-detection/Starburst.cpp
HEADERS += $${TOP}/src/pupil-detection/Starburst.h
}
contains(DEFINES, SWIRSKI) {
SOURCES += $${TOP}/src/pupil-detection/Swirski.cpp
HEADERS += $${TOP}/src/pupil-detection/Swirski.h
SOURCES += $${TOP}/src/pupil-detection/Swirski.cpp
HEADERS += $${TOP}/src/pupil-detection/Swirski.h
TBB_INC_DIR = $${TOP}/deps/tbb43/include
TBB_LIB_DIR = $${TOP}/deps/tbb43/lib/ia32/vc11
......@@ -189,11 +196,11 @@ unix{
}
system("git --version"):{
GIT_BRANCH=$$system(git rev-parse --abbrev-ref HEAD)
GIT_COMMIT_HASH=$$system(git rev-parse --short HEAD)
GIT_BRANCH=$$system(git rev-parse --abbrev-ref HEAD)
GIT_COMMIT_HASH=$$system(git rev-parse --short HEAD)
} else {
GIT_BRANCH="unknown"
GIT_COMMIT_HASH="unknown"
GIT_BRANCH="unknown"
GIT_COMMIT_HASH="unknown"
}
DEFINES += VERSION=\\\"$$VERSION\\\"
DEFINES += GIT_BRANCH=\\\"$$GIT_BRANCH\\\"
......
......@@ -3,10 +3,10 @@
#include "globals.h"
static int gQCameraInfoMetaTypeId = qRegisterMetaType<QCameraInfo>("QCameraInfo");
static int gMatMetaTypeId = qRegisterMetaType<cv::Mat>("cv::Mat");
static int gTimestampMetaTypeId = qRegisterMetaType<Timestamp>("Timestamp");
static int gQListQCameraViewfinderSettingsId = qRegisterMetaType<QList<QCameraViewfinderSettings>>("QList<QCameraViewfinderSettings>");
static const int gQCameraInfoMetaTypeId = qRegisterMetaType<QCameraInfo>("QCameraInfo");
static const int gMatMetaTypeId = qRegisterMetaType<cv::Mat>("cv::Mat");
static const int gTimestampMetaTypeId = qRegisterMetaType<Timestamp>("Timestamp");
static const int gQListQCameraViewfinderSettingsId = qRegisterMetaType<QList<QCameraViewfinderSettings>>("QList<QCameraViewfinderSettings>");
QMutex Camera::setCameraMutex;
......
......@@ -3,8 +3,6 @@
using namespace cv;
static int gEyeDataId = qRegisterMetaType<EyeData>("EyeData");
EyeImageProcessor::EyeImageProcessor(QString id, QObject* parent)
: QObject(parent)
, cameraCalibration(nullptr)
......
......@@ -19,7 +19,7 @@
#include <opencv/cv.h>
#include "InputWidget.h"
#include "data/EyeData.h"
#include "pupil-detection/ElSe.h"
#include "pupil-detection/ExCuSe.h"
#include "pupil-detection/PuRe.h"
......@@ -29,63 +29,14 @@
#ifdef SWIRSKI
#include "pupil-detection/Swirski.h"
#endif
#include "pupil-detection/PupilDetectionMethod.h"
#include "pupil-tracking/PuReST.h"
#include "pupil-tracking/PupilTrackingMethod.h"
#include "CameraCalibration.h"
#include "globals.h"
#include "ocv_utils.h"
#include "pupil-detection/PupilDetectionMethod.h"
#include "pupil-tracking/PuReST.h"
#include "pupil-tracking/PupilTrackingMethod.h"
#include "utils.h"
class EyeData : public InputData {
public:
EyeData()
: InputData()
, input()
, pupil()
, validPupil(false)
{
}
cv::Mat input;
Pupil pupil;
bool validPupil;
cv::Rect coarseROI;
std::shared_ptr<CameraCalibration> cameraCalibration;
// TODO: header, toQString, and the reading from file (see the Calibration class) should be unified
// to avoid placing things in the wrong order / with the wrong string
QString header(QString prefix = "") const override
{
return prefix % journalField(QStringLiteral("timestamp"))
% prefix % journalField(QStringLiteral("pupil.x"))
% prefix % journalField(QStringLiteral("pupil.y"))
% prefix % journalField(QStringLiteral("pupil.width"))
% prefix % journalField(QStringLiteral("pupil.height"))
% prefix % journalField(QStringLiteral("pupil.angle"))
% prefix % journalField(QStringLiteral("pupil.confidence"))
% prefix % journalField(QStringLiteral("pupil.valid"))
% prefix % journalField(QStringLiteral("processingTime"));
}
void toQStringImpl(QString& str) const override
{
str.append(journalField(timestamp));
str.append(journalField(pupil.center.x));
str.append(journalField(pupil.center.y));
str.append(journalField(pupil.size.width));
str.append(journalField(pupil.size.height));
str.append(journalField(pupil.angle));
str.append(journalField(pupil.confidence));
str.append(journalField(validPupil));
str.append(journalField(processingTimestamp));
}
};
Q_DECLARE_METATYPE(EyeData)
class EyeImageProcessorConfig {
public:
EyeImageProcessorConfig()
......
......@@ -5,9 +5,6 @@ using namespace std;
using namespace cv;
using namespace aruco;
static int gFieldDataId = qRegisterMetaType<FieldData>("FieldData");
int Marker::cachedStrSize = 0;
FieldImageProcessor::FieldImageProcessor(QString id, QObject* parent)
: id(id)
, sROI(QPointF(0, 0))
......
......@@ -22,126 +22,13 @@
#include <opencv2/aruco/charuco.hpp>
#include "CameraCalibration.h"
#include "GazeEstimation.h"
#include "InputWidget.h"
#include "data/FieldData.h"
#include "globals.h"
#include "ocv_utils.h"
#include "utils.h"
class Marker {
public:
Marker()
: corners(std::vector<cv::Point2f>())
, center(cv::Point3f(0, 0, 0))
, id(-1)
, rv(cv::Mat())
, tv(cv::Mat())
{
}
Marker(std::vector<cv::Point2f> corners, int id)
: corners(corners)
, center(cv::Point3f(0, 0, 0))
, id(id)
, rv(cv::Mat())
, tv(cv::Mat())
{
}
QString str() const
{
QString tmp;
tmp.reserve(cachedStrSize);
tmp.append(idStr());
tmp.append(valueStr());
tmp.append(Token::MarkerEnd);
tmp.squeeze();
cachedStrSize = std::max(cachedStrSize, tmp.size());
return tmp;
}
std::vector<cv::Point2f> corners;
cv::Point3f center;
// Not exported atm
int id;
cv::Mat rv;
cv::Mat tv;
private:
static int cachedStrSize;
QString idStr() const { return QString("%1%2").arg(id).arg(Token::MarkerIdDelimiter); }
QString strWithPrecision(const double v) const { return QString::number(v, 'g', Token::Precision); }
QString strWithPrecision(const float v) const { return strWithPrecision(static_cast<double>(v)); }
QString valueStr() const { return QString("%2%1%3%1%4").arg(Token::MarkerValueDelimiter).arg(strWithPrecision(center.x)).arg(strWithPrecision(center.y)).arg(strWithPrecision(center.z)); }
};
class FieldData : public InputData {
public:
FieldData()
: InputData()
, input()
, gazeEstimate(0, 0, 0)
, validGazeEstimate(false)
, extrapolatedGazeEstimate(0)
, collectionMarker()
, markers()
, undistorted(false)
, width(0)
, height(0)
{
}
cv::Mat input;
cv::Point3f gazeEstimate;
bool validGazeEstimate;
int extrapolatedGazeEstimate;
Marker collectionMarker;
std::vector<Marker> markers;
bool undistorted;
unsigned int width;
unsigned int height;
std::shared_ptr<CameraCalibration> cameraCalibration;
QString
header(QString prefix = "") const override
{
return prefix % journalField(QStringLiteral("timestamp"))
% prefix % journalField(QStringLiteral("gaze.x"))
% prefix % journalField(QStringLiteral("gaze.y"))
% prefix % journalField(QStringLiteral("gaze.z"))
% prefix % journalField(QStringLiteral("gaze.valid"))
% prefix % journalField(QStringLiteral("collectionMarker.id"))
% prefix % journalField(QStringLiteral("collectionMarker.x"))
% prefix % journalField(QStringLiteral("collectionMarker.y"))
% prefix % journalField(QStringLiteral("collectionMarker.z"))
% prefix % journalField(QStringLiteral("undistorted"))
% prefix % journalField(QStringLiteral("width"))
% prefix % journalField(QStringLiteral("height"))
% prefix % journalField(QStringLiteral("markers"))
% prefix % journalField(QStringLiteral("processingTime"));
}
void toQStringImpl(QString& str) const override
{
str.append(journalField(timestamp));
str.append(journalField(gazeEstimate.x));
str.append(journalField(gazeEstimate.y));
str.append(journalField(gazeEstimate.z));
str.append(journalField(validGazeEstimate));
str.append(journalField(collectionMarker.id));
str.append(journalField(collectionMarker.center.x));
str.append(journalField(collectionMarker.center.y));
str.append(journalField(collectionMarker.center.z));
str.append(journalField(undistorted));
str.append(journalField(width));
str.append(journalField(height));
for (unsigned int i = 0; i < markers.size(); i++)
str.append(markers[i].str());
str.append(Token::Delimiter);
}
};
Q_DECLARE_METATYPE(FieldData)
class FieldImageProcessorConfig {
public:
FieldImageProcessorConfig()
......
......@@ -11,18 +11,16 @@ GazeEstimation::GazeEstimation(QObject* parent)
, lastOverlayIdx(0)
, settings(nullptr)
{
availableGazeEstimationMethods.push_back(new PolyFit(PolyFit::POLY_1_X_Y_XY_XX_YY_XYY_YXX_XXYY));
availableGazeEstimationMethods.push_back(new PolyFit(PolyFit::POLY_1_X_Y_XY));
availableGazeEstimationMethods.push_back(new PolyFit(PolyFit::POLY_1_X_Y_XY_XX_YY));
availableGazeEstimationMethods.push_back(new PolyFit(PolyFit::POLY_1_X_Y_XY_XX_YY_XXYY));
availableGazeEstimationMethods.push_back(new PolyFit(PolyFit::POLY_1_X_Y_XY_XX_YY_XYY_YXX));
availableGazeEstimationMethods.push_back(new PolyFit(PolyFit::POLY_1_X_Y_XY_XX_YY_XYY_YXX_XXX_YYY));
availableGazeEstimationMethods.push_back(new Homography());
availableGazeEstimationMethods.push_back(make_shared<PolyFit>(PolyFit::POLY_1_X_Y_XY_XX_YY_XYY_YXX_XXYY));
availableGazeEstimationMethods.push_back(make_shared<PolyFit>(PolyFit::POLY_1_X_Y_XY));
availableGazeEstimationMethods.push_back(make_shared<PolyFit>(PolyFit::POLY_1_X_Y_XY_XX_YY));
availableGazeEstimationMethods.push_back(make_shared<PolyFit>(PolyFit::POLY_1_X_Y_XY_XX_YY_XXYY));
availableGazeEstimationMethods.push_back(make_shared<PolyFit>(PolyFit::POLY_1_X_Y_XY_XX_YY_XYY_YXX));
availableGazeEstimationMethods.push_back(make_shared<PolyFit>(PolyFit::POLY_1_X_Y_XY_XX_YY_XYY_YXX_XXX_YYY));
availableGazeEstimationMethods.push_back(make_shared<Homography>());
}
GazeEstimation::~GazeEstimation()
{
for (int i = 0; i < availableGazeEstimationMethods.size(); i++)
delete availableGazeEstimationMethods[i];
availableGazeEstimationMethods.clear();
}
......
......@@ -3,15 +3,17 @@
#include <deque>
#include <map>
#include <memory>
#include <vector>
#include <QObject>
#include "Reference.h"
#include "data/DataTuple.h"
#include "gaze-estimation/GazeEstimationMethod.h"
#include "gaze-estimation/Homography.h"
#include "gaze-estimation/PolyFit.h"
#include "Reference.h"
#include "globals.h"
/*******************************************************************************
* Helper Classes
......@@ -197,7 +199,7 @@ public:
explicit GazeEstimation(QObject* parent = 0);
virtual ~GazeEstimation();
QSettings* settings;
std::vector<GazeEstimationMethod*> availableGazeEstimationMethods;
std::vector<std::shared_ptr<GazeEstimationMethod>> availableGazeEstimationMethods;
signals:
void gazeEstimationDone(DataTuple dataTuple);
......@@ -229,7 +231,7 @@ private:
std::vector<CollectionTuple*> calibrationTuples;
std::vector<CollectionTuple*> evaluationTuples;
GazeEstimationMethod* gazeEstimationMethod;
std::shared_ptr<GazeEstimationMethod> gazeEstimationMethod;
QMutex cfgMutex;
float centralHullCoverage;
......
......@@ -4,10 +4,6 @@
using namespace std;
using namespace cv;
static int gCollectionTupleId = qRegisterMetaType<CollectionTuple>("CollectionTuple");
static int gVectorCollectionTupleId = qRegisterMetaType<std::vector<CollectionTuple>>("std::vector<CollectionTuple>");
static int gCollectionTupleTypeId = qRegisterMetaType<CollectionTuple::TupleType>("CollectionTuple::TupleType");
GazeEstimationWidget::GazeEstimationWidget(QString id, QWidget* parent)
: ERWidget(id, parent)
, isCollecting(false)
......
#include "InputWidget.h"
#include "data/InputData.h"
int InputData::cachedSize = 0;
#ifndef INPUTWIDGET_H
#define INPUTWIDGET_H
#include "utils.h"
class InputData {
public:
// TODO: in the future we will have an enum for the type of data derived
// (e.g., Left Eye, Right Eye) and make the synchronizer generic
// (i.e., taking InputData and operating over a vector of these).
// This would allow us to dynamically add input widgets
explicit InputData()
{
timestamp = maxTimestamp;
processingTimestamp = maxTimestamp;
}
virtual ~InputData() {}
Timestamp timestamp;
// TODO: Timestamp dt;
Timestamp processingTimestamp;
virtual QString header(QString prefix) const = 0;
QString toQString() const
{
QString tmp;
tmp.reserve(cachedSize);
toQStringImpl(tmp);
cachedSize = std::max(cachedSize, tmp.size());
tmp.squeeze();
return tmp;
}
private:
virtual void toQStringImpl(QString& str) const = 0;
static int cachedSize;
};
class InputWidget {
// When we have the generic synchronizer, we should enforce a signal
// void newData(InputData) here.
......
......@@ -474,14 +474,12 @@ void MainWindow::freezeCameraImages()
{
disconnect(gazeEstimationWidget, SIGNAL(outDataTuple(DataTuple)),
fieldWidget, SLOT(preview(DataTuple)));
// TODO: freeze eye cameras
}
void MainWindow::unfreezeCameraImages()
{
connect(gazeEstimationWidget, SIGNAL(outDataTuple(DataTuple)),
fieldWidget, SLOT(preview(DataTuple)));
// TODO: unfreeze eye cameras
}
void MainWindow::menuOption(QAction* action)
......
#include "Synchronizer.h"
#include <opencv2/highgui.hpp>
static int gDataTupleId = qRegisterMetaType<DataTuple>("DataTuple");
using namespace std;
......
......@@ -7,43 +7,8 @@
#include <QSize>
#include <QTimer>
#include "EyeImageProcessor.h"
#include "FieldImageProcessor.h"
#include "utils.h"
class DataTuple {
public:
DataTuple()
: timestamp(maxTimestamp)
, lEye()
, rEye()
, field()
, showGazeEstimationVisualization(false)
{
}
explicit DataTuple(Timestamp timestamp, EyeData lEye, EyeData rEye, FieldData field)
: timestamp(timestamp)
, lEye(lEye)
, rEye(rEye)
, field(field)
{
}
Timestamp timestamp;
EyeData lEye;
EyeData rEye;
FieldData field;
cv::Mat gazeEstimationVisualization;
bool showGazeEstimationVisualization;
static QString header()
{
return journalField(QStringLiteral("sync.timestamp")) % FieldData().header(QStringLiteral("field.")) % EyeData().header(QStringLiteral("left.")) % EyeData().header(QStringLiteral("right."));
}
QString toQString()
{
return journalField(timestamp) % field.toQString() % lEye.toQString() % rEye.toQString();
}
};
#include "data/DataTuple.h"
#include "globals.h"
class Synchronizer : public QObject {
Q_OBJECT
......
#ifndef COLLECTIONTUPLE_H
#define COLLECTIONTUPLE_H
#include "data/DataTuple.h"
class CollectionTuple : public DataTuple {
public:
CollectionTuple()
{
tupleType = UNKNOWN;
}
explicit CollectionTuple(DataTuple dataTuple)
: DataTuple(dataTuple)
{
auto fakeMat = [](cv::Mat in) {
cv::Mat tmp;
tmp.rows = in.rows;
tmp.cols = in.cols;
return tmp;
};
// Keep everything but the input images for calibration.
// This allows us to gather significantly more points without running out of memory
lEye.input = fakeMat(lEye.input);
rEye.input = fakeMat(rEye.input);