Commit 06c6fb67 authored by Thiago Santini's avatar Thiago Santini

Improves calibration overlay

parent b6802aee
......@@ -7,7 +7,8 @@ GazeEstimation::GazeEstimation(QObject *parent)
: QObject(parent),
calibrated(false),
isCalibrating(false),
gazeEstimationMethod(NULL),
gazeEstimationMethod(NULL),
lastOverlayIdx(0),
settings(NULL)
{
availableGazeEstimationMethods.push_back( new PolyFit(PolyFit::POLY_1_X_Y_XY_XX_YY_XYY_YXX_XXYY) );
......@@ -37,7 +38,7 @@ void GazeEstimation::reset(CollectionTuple::TupleType type)
{
for (size_t i=collectedTuples.size(); i-->0;)
if (collectedTuples[i].tupleType == type)
collectedTuples.erase(collectedTuples.begin() + i);
collectedTuples.erase(collectedTuples.begin() + i);
}
bool GazeEstimation::isPupilOutlineValid(const EyeData &cur)
......@@ -595,52 +596,66 @@ void GazeEstimation::drawGazeEstimationInfo(DataTuple &dataTuple)
if (!shouldDisplay)
return;
// avoid drawing every single frame
static Timestamp lastGazeEstimationVisualizationTimestamp = 0;
Timestamp current = gTimer.elapsed();
bool shouldDraw = current - lastGazeEstimationVisualizationTimestamp > 40;
if (!shouldDraw)
return;
dataTuple.showGazeEstimationVisualization = true;
dataTuple.showGazeEstimationVisualization = true;
dataTuple.gazeEstimationVisualization = vis;
// frame is already old; display old visualization instead to save processing
if (current - dataTuple.field.timestamp > 100)
return;
// avoid drawing every single frame
static Timestamp lastGazeEstimationVisualizationTimestamp = 0;
Timestamp current = gTimer.elapsed();
bool shouldDraw = current - lastGazeEstimationVisualizationTimestamp > 40;
if (!shouldDraw)
return;
lastGazeEstimationVisualizationTimestamp = current;
vis = dataTuple.field.input.clone();
vis = dataTuple.field.input.clone();
int r = max<int>( 1, 0.003125*max<int>(vis.rows, vis.cols) );
if (isCalibrating) {
for (size_t i=0; i<collectedTuples.size(); i++) {
circle(vis, to2D(collectedTuples[i].field.collectionMarker.center), r, CV_BLACK, -1);
if (collectedTuples[i].isCalibration())
circle(vis, to2D(collectedTuples[i].field.collectionMarker.center), r+1, CV_GREEN, 0.5*r);
else
circle(vis, to2D(collectedTuples[i].field.collectionMarker.center), r+1, CV_CYAN, 0.5*r);
}
} else {
if (!interpolationHull.empty()) {
if (isCalibrating) {
if (lastOverlayIdx > collectedTuples.size()) // sample removed, restart
lastOverlayIdx = 0;
if (lastOverlayIdx == 0)
overlay = Mat::zeros( vis.rows, vis.cols, CV_8UC3);
for ( ; lastOverlayIdx < collectedTuples.size(); lastOverlayIdx++) {
circle(overlay, to2D(collectedTuples[lastOverlayIdx].field.collectionMarker.center), r, CV_ALMOST_BLACK, -1);
if (collectedTuples[lastOverlayIdx].isCalibration())
circle(overlay, to2D(collectedTuples[lastOverlayIdx].field.collectionMarker.center), r+1, CV_GREEN, 0.5*r);
else
circle(overlay, to2D(collectedTuples[lastOverlayIdx].field.collectionMarker.center), r+1, CV_CYAN, 0.5*r);
}
} else {
// Calibration finished; overlay results and restart
overlay = Mat::zeros( vis.rows, vis.cols, CV_8UC3);
lastOverlayIdx = 0;
if (!interpolationHull.empty()) {
vector< vector<Point> > contours;
contours.push_back(interpolationHull);
drawContours(vis, contours, 0, CV_GREEN, r);
drawContours(overlay, contours, 0, CV_GREEN, r);
}
for (size_t i=0; i<errorVectors.size(); i++)
errorVectors[i].draw(vis, 2, CV_RED);
errorVectors[i].draw(overlay, 2, CV_RED);
if (cfg.autoEvaluation)
for (size_t i=0; i<evaluationRegions.size(); i++)
evaluationRegions[i].draw(vis, r);
evaluationRegions[i].draw(overlay, r);
for (size_t i=0; i<collectedTuples.size(); i++) {
if (!collectedTuples[i].isCalibration()) {
circle(vis, to2D(collectedTuples[i].field.collectionMarker.center), r, CV_BLACK, -1);
circle(vis, to2D(collectedTuples[i].field.collectionMarker.center), r+1, CV_CYAN, 0.5*r);
}
}
}
dataTuple.gazeEstimationVisualization = vis;
for (size_t i=0; i<collectedTuples.size(); i++) {
if (!collectedTuples[i].isCalibration()) {
circle(overlay, to2D(collectedTuples[i].field.collectionMarker.center), r, CV_BLACK, -1);
circle(overlay, to2D(collectedTuples[i].field.collectionMarker.center), r+1, CV_CYAN, 0.5*r);
}
}
}
// Overlay on visualization image; notice we use CV_ALMOST_BLACK instead of
// CV_BLACK so we don't need to create an additional mask :-)
overlay.copyTo(vis, overlay);
}
void GazeEstimation::setCalibrating(bool v)
......
......@@ -39,7 +39,7 @@ public:
cv::RotatedRect el(cv::Point(x,y), cv::Size(w,h), 0);
if (selected){
cv::ellipse(in, el, CV_GREEN, 0.5*r);
cv::circle( in, to2D(selected->field.collectionMarker.center), r, CV_BLACK, -1);
cv::circle( in, to2D(selected->field.collectionMarker.center), r, CV_ALMOST_BLACK, -1);
cv::circle( in, to2D(selected->field.collectionMarker.center), r+1, CV_GREEN, r);
} else
cv::ellipse(in, el, CV_RED, 0.5*r);
......@@ -207,6 +207,9 @@ private:
std::vector<ErrorVector> errorVectors;
QElapsedTimer autoVisualizationTimer;
cv::Mat vis;
cv::Mat overlay;
int lastOverlayIdx;
private slots:
void detectOutliers();
......
......@@ -93,6 +93,7 @@ Q_DECLARE_METATYPE(enum CVFlip)
#define CV_MAGENT cv::Scalar(0x81,0x40,0xff)
#define CV_WHITE cv::Scalar(0xff,0xff,0xff)
#define CV_BLACK cv::Scalar(0x00,0x00,0x00)
#define CV_ALMOST_BLACK cv::Scalar(0x01,0x01,0x01)
template<typename T> void set(const QSettings *settings, const QString key, T &v)
{
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment