Commit e0749d56 authored by Thiago Santini's avatar Thiago Santini

Scrolls log on new messages

parent ea1d372e
......@@ -46,7 +46,7 @@ SOURCES +=\
$${TOP}/src/CommandManager.cpp \
$${TOP}/src/ERWidget.cpp \
src/pupil-tracking/PupiTtrackingMethod.cpp \
src/pupil-tracking/PuReTy.cpp \
src/pupil-tracking/PuReST.cpp \
src/pupil-detection/PuRe.cpp \
src/Evaluation.cpp
......@@ -80,9 +80,9 @@ HEADERS += \
$${TOP}/src/CommandManager.h \
$${TOP}/src/ERWidget.h \
$${TOP}/src/pupil-tracking/PupilTrackingMethod.h \
src/pupil-tracking/PuReTy.h \
src/pupil-detection/PuRe.h \
src/Evaluation.h
src/Evaluation.h \
src/pupil-tracking/PuReST.h
FORMS += \
$${TOP}/src/MainWindow.ui \
......
......@@ -12,7 +12,7 @@ CameraWidget::CameraWidget(QString id, ImageProcessor::Type type, QWidget *paren
eROI(QPoint(0,0)),
settingROI(false),
lastUpdate(0),
updateIntervalMs(50),
updateIntervalMs(80),
maxAgeMs(300),
cameraCalibrationSampleRequested(false),
ui(new Ui::CameraWidget)
......@@ -24,7 +24,7 @@ CameraWidget::CameraWidget(QString id, ImageProcessor::Type type, QWidget *paren
switch (type) {
case ImageProcessor::Eye:
ui->viewFinder->setToolTip("You can select a region of interest by clicking and holding the right mouse button.");
ui->viewFinder->setToolTip("You can select a region of interest by clicking and holding the left mouse button.");
break;
case ImageProcessor::Field:
ui->viewFinder->setToolTip("After starting the collection you can select points by clicking in this view with the left mouse button.");
......@@ -123,7 +123,7 @@ CameraWidget::CameraWidget(QString id, ImageProcessor::Type type, QWidget *paren
imageProcessor, SIGNAL(newROI(QPointF,QPointF)) );
// Initial roi
setROI( QPointF(0, 0), QPointF(1, 1) );
setROI( QPointF(0.15, 0.1), QPointF(0.85, 0.9) );
QMetaObject::invokeMethod(camera, "loadCfg");
}
......@@ -322,10 +322,10 @@ void CameraWidget::stopRecording()
void CameraWidget::mousePressEvent(QMouseEvent *event)
{
if (ui->viewFinder->underMouse()) {
if (event->button() == Qt::LeftButton)
if (event->button() == Qt::LeftButton && type == ImageProcessor::Field)
emit newClick( gTimer.elapsed(), ui->viewFinder->mapFromGlobal(this->mapToGlobal(event->pos())), ui->viewFinder->size());
if (event->button() == Qt::RightButton) {
if (event->button() == Qt::LeftButton && type == ImageProcessor::Eye) {
sROI = ui->viewFinder->mapFrom(this, event->pos());
validatePoint(sROI);
sROI.setX( sROI.x() / ui->viewFinder->width());
......@@ -350,7 +350,7 @@ void CameraWidget::mouseMoveEvent(QMouseEvent *event)
void CameraWidget::mouseReleaseEvent(QMouseEvent *event)
{
if (event->button() == Qt::RightButton) {
if (event->button() == Qt::LeftButton && type == ImageProcessor::Eye) {
eROI = ui->viewFinder->mapFrom(this, event->pos());
validatePoint(eROI);
eROI.setX( eROI.x() / ui->viewFinder->width());
......@@ -471,6 +471,12 @@ void CameraWidget::onCameraCalibrationFinished(bool success)
void CameraWidget::updateWidgetSize(const int &width, const int &height)
{
if ( this->type == ImageProcessor::Eye)
this->setMaximumSize(320, 240);
if ( this->type == ImageProcessor::Field)
this->setMaximumSize(640, 360);
return;
// Logic to limit the size of the camera widgets
QSize newFrameSize = { width, height };
if (frameSize == newFrameSize)
......
......@@ -133,7 +133,7 @@ void DataRecorder::storeData(T &data)
videoIdx++;
}
if ( gPerformanceMonitor.shouldDrop(pmIdx, gTimer.elapsed() - data.timestamp, 1000) )
if ( gPerformanceMonitor.shouldDrop(pmIdx, gTimer.elapsed() - data.timestamp, 2000) )
return;
if (videoWriter->isOpened())
......
......@@ -27,7 +27,7 @@ EyeImageProcessor::EyeImageProcessor(QString id, QObject *parent)
pmIdx = gPerformanceMonitor.enrol(id, "Image Processor");
pupilTrackingMethod = new PuReTy();
pupilTrackingMethod = new PuReST();
}
void EyeImageProcessor::updateConfig()
......@@ -54,8 +54,8 @@ EyeImageProcessor::~EyeImageProcessor()
void EyeImageProcessor::process(Timestamp timestamp, const Mat &frame)
{
// TODO: parametrize frame drop due to lack of processing power
//if ( gPerformanceMonitor.shouldDrop(pmIdx, gTimer.elapsed() - timestamp, 50) )
// return;
if ( gPerformanceMonitor.shouldDrop(pmIdx, gTimer.elapsed() - timestamp, 50) )
return;
QMutexLocker locker(&cfgMutex);
......@@ -79,7 +79,6 @@ void EyeImageProcessor::process(Timestamp timestamp, const Mat &frame)
data.pupil = Pupil();
data.validPupil = false;
if (pupilDetectionMethod != NULL) {
Rect userROI = Rect(
Point(sROI.x() * data.input.cols, sROI.y() * data.input.rows),
Point( eROI.x() * data.input.cols, eROI.y() * data.input.rows)
......
......@@ -29,7 +29,7 @@
#endif
#include "pupil-detection/PupilDetectionMethod.h"
#include "pupil-tracking/PuReTy.h"
#include "pupil-tracking/PuReST.h"
#include "pupil-tracking/PupilTrackingMethod.h"
#include "utils.h"
......
......@@ -7,6 +7,9 @@ using namespace aruco;
static int gFieldDataId = qRegisterMetaType<FieldData>("FieldData");
static int markerDetectionCounter = 0;
static int redetectMarkerCounter = 1;
FieldImageProcessor::FieldImageProcessor(QString id, QObject *parent)
: id(id),
sROI(QPointF(0,0)),
......@@ -91,6 +94,20 @@ void FieldImageProcessor::process(Timestamp timestamp, const Mat &frame)
}
if (cfg.markerDetectionMethod == "aruco" || gCalibrating) {
#define LIMIT_MARKER_DETECTION
#ifdef LIMIT_MARKER_DETECTION
if ( markerDetectionCounter == redetectMarkerCounter ) {
detectMarkers(downscaled, dict, corners, ids, detectorParameters);
if (cfg.processingDownscalingFactor > 1) { // Upscale if necessary
for (unsigned int i=0; i<ids.size(); i++)
for (unsigned int j=0; j<corners[i].size(); j++)
corners[i][j] = cfg.processingDownscalingFactor*corners[i][j];
}
markerDetectionCounter = 0;
} else
markerDetectionCounter++;
#else
detectMarkers(downscaled, dict, corners, ids, detectorParameters);
if (cfg.processingDownscalingFactor > 1) { // Upscale if necessary
......@@ -98,6 +115,7 @@ void FieldImageProcessor::process(Timestamp timestamp, const Mat &frame)
for (unsigned int j=0; j<corners[i].size(); j++)
corners[i][j] = cfg.processingDownscalingFactor*corners[i][j];
}
#endif
}
// Filling the marker data
......
......@@ -18,7 +18,8 @@ LogWidget::~LogWidget()
void LogWidget::appendMessage(const QString &msg)
{
ui->log->appendPlainText(msg.trimmed());
ui->log->appendPlainText( msg.trimmed() );
ui->log->verticalScrollBar()->setValue( ui->log->verticalScrollBar()->maximum() );
}
void LogWidget::on_usrMsg_returnPressed()
......
......@@ -3,6 +3,7 @@
#include <QMainWindow>
#include <QKeyEvent>
#include <QScrollBar>
#include <QDebug>
#include "ERWidget.h"
......
......@@ -48,6 +48,8 @@ MainWindow::MainWindow(QWidget *parent) :
volatile QList<QCameraInfo> tmp = QCameraInfo::availableCameras();
Q_UNUSED(tmp);
gPerformanceMonitor.setFrameDrop(true);
/*
* Asynchronous elements
*/
......@@ -142,6 +144,7 @@ MainWindow::MainWindow(QWidget *parent) :
connect(&commandManager, SIGNAL(unfreezeCameraImages()),
this, SLOT(unfreezeCameraImages()) );
#else
gPerformanceMonitor.setFrameDrop(false);
evaluation = new Evaluation();
evaluation->show();
QMetaObject::invokeMethod(evaluation, "run", Qt::QueuedConnection);
......
......@@ -12,7 +12,7 @@ Rect PupilDetectionMethod::coarsePupilDetection(const Mat &frame, const float &m
// We can afford to work on a very small input for haar features, but retain the aspect ratio
float xr = frame.cols / (float) workingWidth;
float yr = frame.rows / (float) workingHeight;
float r = min( xr, yr );
float r = max( xr, yr );
Mat downscaled;
resize(frame, downscaled, Size(), 1/r, 1/r, CV_INTER_LINEAR);
......@@ -405,6 +405,38 @@ float PupilDetectionMethod::outlineContrastConfidence(const Mat &frame, const Pu
return validCount / (float) evaluated;
}
float PupilDetectionMethod::angularSpreadConfidence(const vector<Point> &points, const Point2f &center)
{
enum {
Q0 = 0,
Q1 = 1,
Q2 = 2,
Q3 = 3,
};
std::bitset<4> anchorPointSlices;
anchorPointSlices.reset();
for (auto p=points.begin(); p!=points.end(); p++) {
if (p->x - center.x < 0) {
if (p->y - center.y < 0)
anchorPointSlices.set(Q0);
else
anchorPointSlices.set(Q3);
} else {
if (p->y - center.y < 0)
anchorPointSlices.set(Q1);
else
anchorPointSlices.set(Q2);
}
}
return anchorPointSlices.count() / (float) anchorPointSlices.size();
}
float PupilDetectionMethod::aspectRatioConfidence(const Pupil &pupil)
{
return pupil.minorAxis() / (float) pupil.majorAxis();
}
float PupilDetectionMethod::edgeRatioConfidence(const Mat &edgeImage, const Pupil &pupil, vector<Point> &edgePoints, const int &band)
{
if (!pupil.valid())
......@@ -416,3 +448,4 @@ float PupilDetectionMethod::edgeRatioConfidence(const Mat &edgeImage, const Pupi
findNonZero(inBandEdges, edgePoints);
return min<float>( edgePoints.size() / pupil.circumference(), 1.0 );
}
......@@ -6,6 +6,7 @@
#include <string>
#include <deque>
#include <bitset>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
......@@ -108,6 +109,8 @@ public:
// Generic confidence metrics
static float outlineContrastConfidence(const cv::Mat &frame, const Pupil &pupil, const int &bias=5);
static float edgeRatioConfidence(const cv::Mat &edgeImage, const Pupil &pupil, std::vector<cv::Point> &edgePoints, const int &band=5);
static float angularSpreadConfidence(const std::vector<cv::Point> &points, const cv::Point2f &center);
static float aspectRatioConfidence(const Pupil &pupil);
//Pupil test(const cv::Mat &frame, const cv::Rect &roi, Pupil pupil) { return pupil; }
protected:
......
......@@ -64,7 +64,6 @@ void PupilTrackingMethod::run(const Timestamp &ts, const cv::Mat &frame, const c
predictMaxPupilDiameter();
if ( previousPupil.confidence == NO_CONFIDENCE ) {
//pupil = pupilDetectionMethod.runWithConfidence(frame, roi, -1, predictedMaxPupilDiameter);
pupil = pupilDetectionMethod.runWithConfidence(frame, roi, -1, -1);
} else {
run(frame, roi, previousPupil, pupil);
......@@ -72,58 +71,4 @@ void PupilTrackingMethod::run(const Timestamp &ts, const cv::Mat &frame, const c
registerPupil(ts, pupil);
return;
/*
int minPupilDiameterPx = -1;
int maxPupilDiameterPx = -1;
if ( ! previousPupils.empty() ) {
float meanDiameterPx = std::accumulate( previousPupils.begin(), previousPupils.end(), 0,
[](int sum, const TrackedPupil &p) { return sum + max<int>( p.size.width, p.size.height ); } ) / (float) previousPupils.size();
minPupilDiameterPx = 0.75 * meanDiameterPx;
maxPupilDiameterPx = 1.25 * meanDiameterPx;
}
if ( previousPupil.confidence == NO_CONFIDENCE ) {
pupil = pupilDetectionMethod.runWithConfidence(frame, roi, minPupilDiameterPx, maxPupilDiameterPx);
if (previousPupils.size() > 15) {
if (pupil.diameter() <= maxPupilDiameterPx)
updatePreviousPupil(ts, pupil);
else
pupil.clear();
} else
updatePreviousPupil(ts, pupil);
return;
}
QFuture<Pupil> future;
if (ts - lastDetection > maxTrackingWithoutDetectionTime) {
float previousDiameter = max<float>( previousPupil.size.width, previousPupil.size.height );
future = QtConcurrent::run(&pupilDetectionMethod, &PupilDetectionMethod::runWithConfidence, frame, roi, 0.9*previousDiameter, 1.1*previousDiameter);
parallelDetection = true;
}
run(frame, roi, previousPupil, pupil);
if (parallelDetection) {
parallelDetection = false;
lastDetection = ts;
Pupil detectedPupil = future.result();
if (detectedPupil.confidence > pupil.confidence)
pupil = detectedPupil;
} else {
if ( pupil.confidence < minDetectionConfidence)
pupil = pupilDetectionMethod.runWithConfidence(frame, roi);
}
if ( pupil.confidence > minDetectionConfidence) {
if (previousPupils.size() > 15) {
if (pupil.diameter() <= maxPupilDiameterPx)
updatePreviousPupil(ts, pupil);
else
pupil.clear();
} else
updatePreviousPupil(ts, pupil);
}
*/
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment