Commit ba515798 authored by Thiago Santini's avatar Thiago Santini

Fixes timer offset estimation and updates uvcengine/libuvc

libuvc: 69e45682220ff93abc6d675acfaa8fc6de8f5fca

uvcengine: fce1f275eb53cc87725f7232944813bc36da75cf
parent 3ae7a8e0
......@@ -47,7 +47,8 @@ SOURCES +=\
$${TOP}/src/ERWidget.cpp \
src/pupil-tracking/PupiTtrackingMethod.cpp \
src/pupil-tracking/PuReTy.cpp \
src/pupil-detection/PuRe.cpp
src/pupil-detection/PuRe.cpp \
src/Evaluation.cpp
HEADERS += \
$${TOP}/src/MainWindow.h\
......@@ -80,14 +81,16 @@ HEADERS += \
$${TOP}/src/ERWidget.h \
$${TOP}/src/pupil-tracking/PupilTrackingMethod.h \
src/pupil-tracking/PuReTy.h \
src/pupil-detection/PuRe.h
src/pupil-detection/PuRe.h \
src/Evaluation.h
FORMS += \
$${TOP}/src/MainWindow.ui \
$${TOP}/src/CameraWidget.ui \
$${TOP}/src/GazeEstimationWidget.ui \
$${TOP}/src/LogWidget.ui \
$${TOP}/src/PerformanceMonitorWidget.ui
$${TOP}/src/PerformanceMonitorWidget.ui \
src/Evaluation.ui
RESOURCES += \
$${TOP}/resources.qrc
......@@ -117,6 +120,7 @@ LIBS += \
-lopencv_imgcodecs$${CV_SUFFIX} \
-lopencv_imgproc$${CV_SUFFIX} \
-lopencv_videoio$${CV_SUFFIX} \
-lopencv_video$${CV_SUFFIX} \
-lopencv_aruco$${CV_SUFFIX}
# JPEG-TURBO
......
......@@ -11,14 +11,14 @@ public:
explicit ERWidget(QWidget *parent = 0);
signals:
void closed();
void closed(bool b);
void keyPress(QKeyEvent *event);
void keyRelease(QKeyEvent *event);
public slots:
protected:
void closeEvent(QCloseEvent *event) Q_DECL_OVERRIDE { Q_UNUSED(event) emit closed(); }
void closeEvent(QCloseEvent *event) Q_DECL_OVERRIDE { Q_UNUSED(event) emit closed(false); }
void keyPressEvent(QKeyEvent *event) Q_DECL_OVERRIDE { emit keyPress(event); }
void keyReleaseEvent(QKeyEvent *event) Q_DECL_OVERRIDE { emit keyRelease(event); }
};
......
......@@ -54,8 +54,8 @@ EyeImageProcessor::~EyeImageProcessor()
void EyeImageProcessor::process(Timestamp timestamp, const Mat &frame)
{
// TODO: parametrize frame drop due to lack of processing power
if ( gPerformanceMonitor.shouldDrop(pmIdx, gTimer.elapsed() - timestamp, 50) )
return;
//if ( gPerformanceMonitor.shouldDrop(pmIdx, gTimer.elapsed() - timestamp, 50) )
// return;
QMutexLocker locker(&cfgMutex);
......@@ -109,8 +109,7 @@ void EyeImageProcessor::process(Timestamp timestamp, const Mat &frame)
} else
data.coarseROI = Rect();
bool tracking = true;
if (tracking && pupilTrackingMethod) {
if (cfg.tracking && pupilTrackingMethod) {
pupilTrackingMethod->run(timestamp, downscaled, coarseROI, data.pupil, *pupilDetectionMethod);
} else {
pupilDetectionMethod->run( downscaled, coarseROI, data.pupil );
......
......@@ -111,7 +111,8 @@ public:
undistort(false),
coarseDetection(true),
processingDownscalingFactor(2),
pupilDetectionMethod(PuRe::desc.c_str())
pupilDetectionMethod(PuRe::desc.c_str()),
tracking(true)
{}
cv::Size inputSize;
......@@ -120,6 +121,7 @@ public:
bool coarseDetection;
double processingDownscalingFactor;
QString pupilDetectionMethod;
bool tracking;
void save(QSettings *settings)
{
......@@ -131,6 +133,7 @@ public:
settings->setValue("coarseDetection", coarseDetection);
settings->setValue("processingDownscalingFactor", processingDownscalingFactor);
settings->setValue("pupilDetectionMethod", pupilDetectionMethod);
settings->setValue("tracking", tracking);
}
void load(QSettings *settings)
......@@ -143,6 +146,7 @@ public:
set(settings, "coarseDetection", coarseDetection);
set(settings, "processingDownscalingFactor", processingDownscalingFactor);
set(settings, "pupilDetectionMethod", pupilDetectionMethod);
set(settings, "tracking", tracking);
}
};
......@@ -216,6 +220,10 @@ public:
formLayout->addRow( new QLabel("Coarse Detection:"), coarseDetectionBox );
pupilDetectionComboBox = new QComboBox();
formLayout->addRow(pupilDetectionComboBox);
trackingBox = new QCheckBox();
trackingBox->setWhatsThis("Track the pupil after detection.");
trackingBox->setToolTip(box->whatsThis());
formLayout->addRow( new QLabel("Tracking:"), trackingBox );
layout->addWidget(box);
applyButton = new QPushButton("Apply");
......@@ -248,6 +256,7 @@ public slots:
for (int i=0; i<pupilDetectionComboBox->count(); i++)
if (pupilDetectionComboBox->itemData(i).toString() == cfg.pupilDetectionMethod)
pupilDetectionComboBox->setCurrentIndex(i);
trackingBox->setChecked(cfg.tracking);
move(pos);
show();
}
......@@ -260,6 +269,7 @@ public slots:
cfg.flip = (CVFlip) flipComboBox->currentData().toInt();
cfg.coarseDetection = coarseDetectionBox->isChecked();
cfg.pupilDetectionMethod = pupilDetectionComboBox->currentData().toString();
cfg.tracking = trackingBox->isChecked();
cfg.save(settings);
emit updateConfig();
}
......@@ -271,6 +281,7 @@ private:
QCheckBox *coarseDetectionBox;
QComboBox *flipComboBox;
QDoubleSpinBox *downscalingSB;
QCheckBox *trackingBox;
};
class EyeImageProcessor : public QObject
......@@ -281,6 +292,8 @@ public:
~EyeImageProcessor();
QSettings *settings;
QVector<PupilDetectionMethod*> availablePupilDetectionMethods;
EyeImageProcessorConfig cfg;
EyeData data;
signals:
void newData(EyeData data);
......@@ -292,9 +305,7 @@ public slots:
private:
QString id;
EyeImageProcessorConfig cfg;
QMutex cfgMutex;
EyeData data;
QPointF sROI, eROI;
PupilDetectionMethod *pupilDetectionMethod;
......
......@@ -83,7 +83,7 @@ void FieldImageProcessor::process(Timestamp timestamp, const Mat &frame)
downscaled = data.input;
}
if (cfg.markerDetectionMethod == "aruco") {
if (cfg.markerDetectionMethod == "aruco" || gCalibrating) {
detectMarkers(downscaled, dict, corners, ids, detectorParameters);
......
......@@ -261,8 +261,8 @@ void GazeEstimation::estimate(DataTuple dataTuple)
if (calibrated) {
GazeEstimationMethod::InputType inputType = cfg.inputType;
bool lValid = dataTuple.lEye.pupil.center.x > 0 && dataTuple.lEye.pupil.center.y > 0;
bool rValid = dataTuple.rEye.pupil.center.x > 0 && dataTuple.rEye.pupil.center.y > 0;
bool lValid = dataTuple.lEye.pupil.center.x > 0 && dataTuple.lEye.pupil.center.y > 0 && dataTuple.lEye.pupil.confidence > 0.66;
bool rValid = dataTuple.rEye.pupil.center.x > 0 && dataTuple.rEye.pupil.center.y > 0 && dataTuple.rEye.pupil.confidence > 0.66;
switch(inputType) {
case GazeEstimationMethod::BINOCULAR:
......
......@@ -255,6 +255,7 @@ void GazeEstimationWidget::on_loadTuples_clicked()
void GazeEstimationWidget::on_startFinishButton_toggled(bool checked)
{
if (checked) {
gCalibrating = true;
ui->startFinishButton->setText("Finish");
statusBarLabel->setText("Calibrating: click on the field widget\nor hold 'c' to collect from marker.");
statusBarLabel->setStyleSheet("QLabel { color : black; font : bold }");
......@@ -267,6 +268,7 @@ void GazeEstimationWidget::on_startFinishButton_toggled(bool checked)
startSound.play();
isCollecting = true;
} else {
gCalibrating = false;
ui->startFinishButton->setText("Start");
ui->loadTuples->setEnabled(true);
ui->saveTuples->setEnabled(true);
......
......@@ -18,6 +18,8 @@ MainWindow::MainWindow(QWidget *parent) :
ui(new Ui::MainWindow)
{
ui->setupUi(this);
//#define EYEREC
#ifdef EYEREC
createExtraMenus();
connect(ui->menuBar, SIGNAL(triggered(QAction*)), this, SLOT(menuOption(QAction*)) );
......@@ -36,8 +38,6 @@ MainWindow::MainWindow(QWidget *parent) :
logWidget = new LogWidget();
setupWidget(logWidget, cfg.logWidgetPos, cfg.logWidgetSize, cfg.logWidgetVisible, ui->log);
connect(logWidget, SIGNAL(closed()),
this, SLOT(logWidgetClosed()) );
gLogWidget = logWidget;
/*
......@@ -62,8 +62,6 @@ MainWindow::MainWindow(QWidget *parent) :
fieldWidget = new CameraWidget("Field", ImageProcessor::Field);
fieldWidget->setWindowIcon(QIcon(":/icons/fieldWidget.png"));
setupWidget(fieldWidget, cfg.fieldWidgetPos, cfg.fieldWidgetSize, cfg.fieldWidgetVisible, ui->fieldCam);
connect(fieldWidget, SIGNAL(closed()),
this, SLOT(fieldWidgetClosed()) );
/*
* Synchronizer
......@@ -85,8 +83,6 @@ MainWindow::MainWindow(QWidget *parent) :
gazeEstimationWidget, SIGNAL(inDataTuple(DataTuple)) );
connect(fieldWidget, SIGNAL(newClick(Timestamp,QPoint,QSize)),
gazeEstimationWidget, SIGNAL(newClick(Timestamp,QPoint,QSize)) );
connect(gazeEstimationWidget, SIGNAL(closed()),
this, SLOT(gazeEstimationWidgetClosed()) );
connect(gazeEstimationWidget, SIGNAL(outDataTuple(DataTuple)),
fieldWidget, SLOT(preview(DataTuple)) );
......@@ -106,8 +102,6 @@ MainWindow::MainWindow(QWidget *parent) :
performanceMonitorWidget = new PerformanceMonitorWidget();
setupWidget(performanceMonitorWidget, cfg.performanceMonitorWidgetPos, cfg.performanceMonitorWidgetSize, cfg.performanceMonitorWidgetVisible, ui->performanceMonitor);
connect(performanceMonitorWidget, SIGNAL(closed()),
this, SLOT(performanceMonitorWidgetClosed()) );
// GUI to Widgets signals
connect(this, SIGNAL(startRecording()),
......@@ -147,6 +141,11 @@ MainWindow::MainWindow(QWidget *parent) :
this, SLOT(freezeCameraImages()) );
connect(&commandManager, SIGNAL(unfreezeCameraImages()),
this, SLOT(unfreezeCameraImages()) );
#else
evaluation = new Evaluation();
evaluation->show();
QMetaObject::invokeMethod(evaluation, "run", Qt::QueuedConnection);
#endif
}
MainWindow::~MainWindow()
......@@ -157,6 +156,7 @@ MainWindow::~MainWindow()
void MainWindow::closeEvent(QCloseEvent *event)
{
#ifdef EYEREC
if (ui->recordingToggle->isChecked()) {
ui->recordingToggle->setChecked(false);
on_recordingToggle_clicked();
......@@ -251,6 +251,13 @@ void MainWindow::closeEvent(QCloseEvent *event)
settings->deleteLater();
settings = NULL;
}
#else
if ( evaluation ) {
evaluation->close();
evaluation->deleteLater();
evaluation = NULL;
}
#endif
event->accept();
}
......@@ -556,21 +563,18 @@ void MainWindow::setupWidget(ERWidget *widget, QPoint &position, const QSize &si
if (visible)
widget->show();
if (button)
if (button) {
button->setChecked(visible);
connect(widget, SIGNAL(closed(bool)),
button, SLOT(setChecked(bool)) );
}
connect(widget, SIGNAL(keyPress(QKeyEvent*)),
&commandManager, SLOT(keyPress(QKeyEvent*)) );
connect(widget, SIGNAL(keyRelease(QKeyEvent*)),
&commandManager, SLOT(keyRelease(QKeyEvent*)) );
}
void MainWindow::logWidgetClosed() { ui->log->setChecked(false); }
void MainWindow::lEyeWidgetClosed() { ui->leftEyeCam->setChecked(false); }
void MainWindow::rEyeWidgetClosed() { ui->rightEyeCam->setChecked(false); }
void MainWindow::fieldWidgetClosed() { ui->fieldCam->setChecked(false); }
void MainWindow::gazeEstimationWidgetClosed() { ui->gazeEstimation->setChecked(false); }
void MainWindow::performanceMonitorWidgetClosed() { ui->performanceMonitor->setChecked(false); }
}
void MainWindow::toggleRecording()
{
......
......@@ -31,6 +31,8 @@
#include "utils.h"
#include "Evaluation.h"
class MainWindowConfig
{
public:
......@@ -166,6 +168,7 @@ private:
LogWidget *logWidget;
PerformanceMonitorWidget *performanceMonitorWidget;
CommandManager commandManager;
Evaluation *evaluation;
QElapsedTimer elapsedTime;
int elapsedTimeUpdateTimer;
......@@ -205,12 +208,6 @@ private slots:
void freezeCameraImages();
void unfreezeCameraImages();
void logWidgetClosed();
void lEyeWidgetClosed();
void rEyeWidgetClosed();
void fieldWidgetClosed();
void gazeEstimationWidgetClosed();
void performanceMonitorWidgetClosed();
};
#endif // MAINWINDOW_H
......@@ -72,12 +72,14 @@ void FieldOverlay::drawGaze()
painter.scale(scale.width(), scale.height());
painter.setPen(QPen( QColor(0, 255, 0, alpha), refPx, Qt::SolidLine));
painter.setPen(QPen( QColor(0, 255, 0, alpha), 2*refPx, Qt::SolidLine));
double radius = 6*refPx; // TODO: change based on evaluation error :-)
if (radius > 0) {
radius *= 1.25;
painter.drawEllipse(dataTuple->field.gazeEstimate.x - radius, dataTuple->field.gazeEstimate.y - radius , 2*radius, 2*radius);
painter.drawLine(dataTuple->field.gazeEstimate.x, 0, dataTuple->field.gazeEstimate.x, dataTuple->field.width );
painter.drawLine(0, dataTuple->field.gazeEstimate.y, dataTuple->field.width, dataTuple->field.gazeEstimate.y );
//painter.drawEllipse(dataTuple->field.gazeEstimate.x - radius, dataTuple->field.gazeEstimate.y - radius , 2*radius, 2*radius);
//painter.drawLine(dataTuple->field.gazeEstimate.x, 0, dataTuple->field.gazeEstimate.x, dataTuple->field.width );
//painter.drawLine(0, dataTuple->field.gazeEstimate.y, dataTuple->field.width, dataTuple->field.gazeEstimate.y );
}
painter.resetTransform();
......
......@@ -404,3 +404,15 @@ float PupilDetectionMethod::outlineContrastConfidence(const Mat &frame, const Pu
return validCount / (float) evaluated;
}
float PupilDetectionMethod::edgeRatioConfidence(const Mat &edgeImage, const Pupil &pupil, vector<Point> &edgePoints, const int &band)
{
if (!pupil.valid())
return NO_CONFIDENCE;
Mat outlineMask = Mat::zeros(edgeImage.rows, edgeImage.cols, CV_8U);
ellipse(outlineMask, pupil, Scalar(255), band);
Mat inBandEdges = edgeImage.clone();
inBandEdges.setTo(0, 255 - outlineMask);
findNonZero(inBandEdges, edgePoints);
return min<float>( edgePoints.size() / pupil.circumference(), 1.0 );
}
......@@ -59,6 +59,11 @@ public:
int majorAxis() const { return std::max<int>(size.width, size.height); }
int minorAxis() const { return std::min<int>(size.width, size.height); }
int diameter() const { return majorAxis(); }
float circumference() const {
float a = 0.5*majorAxis();
float b = 0.5*minorAxis();
return CV_PI * abs( 3*(a+b) - sqrt( 10*a*b + 3*( pow(a,2) + pow(b,2) ) ) );
}
};
Q_DECLARE_METATYPE(Pupil);
......@@ -86,19 +91,23 @@ public:
run(frame, pupil);
}
// Pupil detection interface used in the tracking; uses an homogeneous confidence measure
// Pupil detection interface used in the tracking
Pupil runWithConfidence(const cv::Mat &frame, const cv::Rect &roi, const float &minPupilDiameterPx=-1, const float &maxPupilDiameterPx=-1) {
Pupil pupil;
run(frame, roi, pupil, minPupilDiameterPx, maxPupilDiameterPx);
if ( ! hasConfidence() )
pupil.confidence = outlineContrastConfidence(frame, pupil);
return pupil;
}
virtual Pupil getNextCandidate() { return Pupil(); }
// Generic coarse pupil detection
static cv::Rect coarsePupilDetection(const cv::Mat &frame, const float &minCoverage=0.5f, const int &workingWidth=80, const int &workingHeight=60);
// Generic confidence metrics
static float outlineContrastConfidence(const cv::Mat &frame, const Pupil &pupil, const int &bias=5);
static float edgeRatioConfidence(const cv::Mat &edgeImage, const Pupil &pupil, std::vector<cv::Point> &edgePoints, const int &band=5);
//Pupil test(const cv::Mat &frame, const cv::Rect &roi, Pupil pupil) { return pupil; }
protected:
......
......@@ -10,6 +10,31 @@ void PupilTrackingMethod::reset()
{
previousPupils.clear();
previousPupil = TrackedPupil();
pupilDiameterKf.statePost.ptr<float>(0)[0] = 0.5*expectedFrameSize.width;
}
void PupilTrackingMethod::registerPupil( const Timestamp &ts, Pupil &pupil ) {
Mat measurement = ( Mat_<float>(1,1) << pupil.majorAxis() );
//if (predictedMaxPupilDiameter > 0) {
// float &majorAxis = measurement.ptr<float>(0)[0];
// if ( majorAxis > predictedMaxPupilDiameter) {
// pupil.clear();
// return;
// }
//}
if (pupil.confidence > minDetectionConfidence) {
previousPupil = TrackedPupil(ts, pupil);
previousPupils.emplace_back( previousPupil );
pupilDiameterKf.correct(measurement);
} else
previousPupil = TrackedPupil();
//if (pupil.confidence > minDetectionConfidence) {
// previousPupil = TrackedPupil(ts, pupil);
// previousPupils.push_back( previousPupil );
//} else
// previousPupil = TrackedPupil();
}
void PupilTrackingMethod::run(const Timestamp &ts, const cv::Mat &frame, const cv::Rect &roi, Pupil &pupil, PupilDetectionMethod &pupilDetectionMethod)
......@@ -29,7 +54,20 @@ void PupilTrackingMethod::run(const Timestamp &ts, const cv::Mat &frame, const c
break;
}
pupil.clear();
predictMaxPupilDiameter();
if ( previousPupil.confidence == NO_CONFIDENCE ) {
pupil = pupilDetectionMethod.runWithConfidence(frame, roi, -1, predictedMaxPupilDiameter);
} else {
run(frame, roi, previousPupil, pupil);
}
registerPupil(ts, pupil);
return;
/*
int minPupilDiameterPx = -1;
int maxPupilDiameterPx = -1;
if ( ! previousPupils.empty() ) {
......@@ -80,24 +118,5 @@ void PupilTrackingMethod::run(const Timestamp &ts, const cv::Mat &frame, const c
} else
updatePreviousPupil(ts, pupil);
}
/*
// Track
Pupil previousPupil = previousPupils.back();
run(frame, previousPupil, pupil);
if ( ! shouldTrack(pupil, minTrackConfidence) ) {
//qDebug() << pupil.confidence << pupil.valid();
// Tracking failed, run detection
//qDebug() << "tracking failed, detect";
detectAndTrack(ts, frame, pupil, pupilDetectionMethod);
return;
}
redetect(ts, frame, pupil, pupilDetectionMethod);
//qDebug() << (redetect(ts, frame, pupil, pupilDetectionMethod) ? "redetected" : "tracked");
track(ts, pupil);
*/
}
......@@ -5,7 +5,9 @@
#include <deque>
#include <QFuture>
#include <opencv2/core.hpp>
#include "opencv2/core.hpp"
#include "opencv2/video/tracking.hpp"
#include "pupil-detection/PupilDetectionMethod.h"
#include "utils.h"
......@@ -29,7 +31,14 @@ public:
class PupilTrackingMethod
{
public:
PupilTrackingMethod() {}
PupilTrackingMethod() {
pupilDiameterKf.init(1, 1);
pupilDiameterKf.transitionMatrix = ( cv::Mat_<float>(1, 1) << 1 );
cv::setIdentity( pupilDiameterKf.measurementMatrix );
cv::setIdentity( pupilDiameterKf.processNoiseCov, cv::Scalar::all(1e-4) );
cv::setIdentity( pupilDiameterKf.measurementNoiseCov, cv::Scalar::all(1e-2) );
cv::setIdentity( pupilDiameterKf.errorCovPost, cv::Scalar::all(1e-1) );
}
~PupilTrackingMethod() {}
// Tracking and detection logic
......@@ -56,13 +65,15 @@ protected:
float minDetectionConfidence = 0.7f;
float minTrackConfidence = 0.9f;
void updatePreviousPupil( const Timestamp &ts, const Pupil &pupil ) {
if (pupil.confidence > minDetectionConfidence) {
previousPupil = TrackedPupil(ts, pupil);
previousPupils.push_back( previousPupil );
} else
previousPupil = TrackedPupil();
cv::KalmanFilter pupilDiameterKf;
float predictedMaxPupilDiameter = -1;
void predictMaxPupilDiameter() {
predictedMaxPupilDiameter = 1.5*pupilDiameterKf.predict().ptr<float>(0)[0];
if (previousPupils.size() < 20)
predictedMaxPupilDiameter = -1;
}
void registerPupil(const Timestamp &ts, Pupil &pupil);
void reset();
};
......
......@@ -23,6 +23,8 @@ std::vector<QString> gLogBuffer;
PerformanceMonitor gPerformanceMonitor;
bool gCalibrating = false;
/*
* Utility functions
*/
......
......@@ -110,5 +110,6 @@ extern std::vector<QString> gLogBuffer;
extern PerformanceMonitor gPerformanceMonitor;
extern bool gCalibrating;
#endif // UTILS_H
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment