#include "ImageAcquisition.h" #include static bool isMaster(string str) { if (settings.value("eyeCam/name").toString().toStdString().compare(str) == 0) return true; else return false; } static bool isSlave(string str) { if (settings.value("fieldCam/name").toString().toStdString().compare(str) == 0) return true; else return false; } // This is a workaround to videoman getFrame not respecting the wait parameter. // As a result, some eye trackers will block while no new image is available whereas others won't. // Basicaly, we pool the slave camera on a separate thread as not to block the master callback. static void updateSlave(ImageAcquisition *imageAcquisition) { QElapsedTimer counter; Q_ASSERT(counter.isMonotonic()); double period = 1000/imageAcquisition->slave->format.fps; while (imageAcquisition->running) { imageAcquisition->slave->buffer.imageData = imageAcquisition->videoManControl.getFrame(imageAcquisition->slave->id, true); counter.start(); Mat tmp; if (imageAcquisition->slave->buffer.imageData != NULL) { tmp = cvarrToMat(&imageAcquisition->slave->buffer, true); flip(tmp, tmp, settings.value("fieldCam/flip").toInt()); // flip before undistorting remap(tmp, tmp, imageAcquisition->map1, imageAcquisition->map2, cv::INTER_CUBIC); if ( settings.value("fieldCam/outResX").toInt() != tmp.cols || settings.value("fieldCam/outResY").toInt() != tmp.rows ) { resize( tmp, tmp, Size(settings.value("fieldCam/outResX").toInt(), settings.value("fieldCam/outResY").toInt()), INTER_CUBIC ); } } imageAcquisition->videoManControl.releaseFrame( imageAcquisition->slave->id ); imageAcquisition->slaveMutex.lock(); tmp.copyTo(imageAcquisition->latestSlaveFrame); imageAcquisition->slaveMutex.unlock(); double sleepPeriod = period - counter.elapsed(); if (sleepPeriod > 0) QThread::msleep(sleepPeriod); } } void ImageAcquisition::onThreadStarted(){ running = true; if (slave) slaveThread = QtConcurrent::run(updateSlave, this); } void ImageAcquisition::onThreadFinishing() { running = false; slaveThread.waitForFinished(); qDebug() << QThread::currentThread()->objectName() << " done."; } ImageAcquisition::ImageAcquisition(QObject *parent) : QObject(parent), videoManControl(VideoManControl::NO_RENDERER) { running = false; ready = false; master = NULL; slave = NULL; VMInputIdentification *devList; int devCount; PerfCounter perfCounter; vector tmpList; // Update the device list videoManControl.getAvailableDevices( &devList, devCount ); qDebug() << "Found " << devCount << " devices."; cameras.clear(); tmpList.clear(); for (int i=0; iformat.width, camera->format.height), camera->format.depth, camera->format.nChannels ); tmp->origin = 0; camera->buffer = *tmp; cvReleaseImage(&tmp); if ( isMaster(camera->name) ) master = camera; if ( isSlave(camera->name )) slave = camera; } if ( !master || (!slave && !isSlave("dummy")) ) { QString masterName = settings.value("eyeCam/name").toString(); if (masterName.isEmpty()) masterName = "Empty"; QString tmp(QString("Current master: %1\n").arg(masterName)); QString slaveName = settings.value("fieldCam/name").toString(); if (slaveName.isEmpty()) slaveName = "Empty"; tmp.append(QString("Current slave: %1\n").arg(slaveName)); tmp.append("\nThese are the cameras I found:\n\n"); for (unsigned int i=0; iformat.width, slave->format.height) ); latestSlaveFrame = Mat::zeros(slave->format.width, slave->format.height, CV_8UC3); putText(latestSlaveFrame, "No valid frame so far...", Point(40,latestSlaveFrame.cols/2), FONT_HERSHEY_SIMPLEX, 1, Scalar(255,255,255), 2); } else { qDebug() << "Using dummy slave"; latestSlaveFrame = Mat::zeros(settings.value("fieldCam/outResY").toInt(), settings.value("fieldCam/outResX").toInt(), CV_8UC3); putText(latestSlaveFrame, "No valid frame so far...", Point(40,latestSlaveFrame.cols/2), FONT_HERSHEY_SIMPLEX, 1, Scalar(255,255,255), 2); } if (master) { qDebug() << "Master is " << camToStr(*master); // support frame callback does not work properly so we test the callback ourselves // if( videoManControl.supportFrameCallback(master->id) ) //videoManControl.setFrameCallback(master->id, &ImageAcquisition::frameCallback, this); videoManControl.setFrameCallback(master->id, this->frameCallback, (void*)this); qDebug() << "Testing callback..."; QElapsedTimer timer; timer.start(); while (!ready && !timer.hasExpired(settings.value("eyeCam/callbackMilliSecondTimeout").toInt()) ) QThread::msleep(100); if (ready) qDebug() << "Callback works!"; else { qDebug() << "Callback seems broken!"; QString tmp; tmp.append(QString("The master camera callback did not function properly within %1 milliseconds.").arg(settings.value("eyeCam/callbackMilliSecondTimeout").toInt())); tmp.append("\nPossible solutions:\n"); tmp.append("1) Turn off the cameras and replug.\n"); tmp.append("2) Reboot your computer.\n"); tmp.append("3) Try a different master.\n"); tmp.append("4) Increase the callbackMilliSecondTimeout parameter.\n"); QMessageBox::critical(NULL, "Master callback failed.", tmp, QMessageBox::Ok, QMessageBox::NoButton); } } } ImageAcquisition::~ImageAcquisition() { if (master) { videoManControl.setFrameCallback(master->id, NULL, NULL); QThread::msleep(100); // Just in case a frame callback was being processed while we are finishing, give it some time to finish } videoManControl.deleteInputs(); cameras.clear(); } void ImageAcquisition::frameCallback(char *pixelBuffer, size_t input, double timestamp, void *data) { ImageAcquisition* imageAcquisition = static_cast(data); imageAcquisition->frameCallbackInternal(pixelBuffer, input, timestamp); } void ImageAcquisition::frameCallbackInternal(char *pixelBuffer, size_t input, double timestamp) { if (!ready) { recordingStarted = GetTickCount64(); gTimer.setOffset(recordingStarted+1e3*timestamp); } ready = true; if (running && pixelBuffer != NULL) { JournalEntry entry(recordingStarted+timestamp*1e3); entry.perfCounter.start(); master->buffer.imageData = pixelBuffer; Mat tmp = cvarrToMat(&master->buffer, false); if ( settings.value("eyeCam/deinterlace").toBool() ) naiveDeinterlace(tmp); resize( tmp, entry.leftEyeFrame, Size(settings.value("eyeCam/outResX").toInt(), settings.value("eyeCam/outResY").toInt()), INTER_CUBIC ); cvtColor(entry.leftEyeFrame, entry.leftEyeFrame, CV_RGB2GRAY); // Assumes the pupil detection uses a grayscale input, may have to be changed later on flip(entry.leftEyeFrame, entry.leftEyeFrame, settings.value("eyeCam/flip").toInt()); // videoMan getFrame function does not respect the wait=false parameter, so we have to update the image on another thread (see updateSlave) slaveMutex.lock(); latestSlaveFrame.copyTo(entry.fieldFrame); slaveMutex.unlock(); entry.incIdx(); emit imageAcquisitionDone(entry); } videoManControl.releaseFrame( master->id ); } void ImageAcquisition::setUndistortionMaps( Size size ) { /* Reference: http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html * * | Fx, 0, Cx | * | 0, Fy, Cy | * | 0. 0, 1 | * * Fx and Fy are the focal lengths (in pixels) * ( Cx , Cy ) is the principal point. * */ Mat cameraCalibration = (Mat_(3,3) << settings.value("fieldCam/Fx").toFloat() , 0.0 , settings.value("fieldCam/Cx").toFloat() , 0.0 , settings.value("fieldCam/Fy").toFloat() , settings.value("fieldCam/Cy").toFloat() , 0.0 , 0.0 , 1.0 ); /* Reference: http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html * * (k1, k2, p1, p2, k3, k4, k5, k6) * * k1, k2, k3, k4, k5 and k6 are radial distortion coeficients * p1 and p2 are tangential distortion coeficients * */ Mat cameraDistortion = (Mat_(1,8) << settings.value("fieldCam/k1").toFloat(), settings.value("fieldCam/k2").toFloat(), settings.value("fieldCam/p1").toFloat(), settings.value("fieldCam/p2").toFloat(), settings.value("fieldCam/k3").toFloat(), settings.value("fieldCam/k4").toFloat(), settings.value("fieldCam/k5").toFloat(), settings.value("fieldCam/k6").toFloat() ); initUndistortRectifyMap( cameraCalibration, cameraDistortion, Mat(), cameraCalibration, size, CV_32FC1, map1, map2 ); } QString ImageAcquisition::camToStr(Camera cam) { QString fps = cam.format.fps == 0 ? "unknown" : QString::number(cam.format.fps); return QString("'%1' %2x%3 @ %4 fps").arg(cam.name.c_str()).arg(cam.format.width).arg(cam.format.height).arg(fps); } void ImageAcquisition::naiveDeinterlace(Mat &in) { for (int i=2; i