#include "PostProcessingWidget.h" #include "ui_PostProcessingWidget.h" /******************************************************************************* * * WARNING: This is beta! Here be dragons! * *******************************************************************************/ // TODO: refactor this into a QStateMachine using namespace std; PostProcessingWidget::PostProcessingWidget(QString id, QWidget* parent) : ERWidget(id, parent) , ui(new Ui::PostProcessingWidget) , postVideoProcessor(nullptr) , videoSource(nullptr) , videoThread(nullptr) , processing(false) , injectionPaused(false) , playbackStarted(false) , presentIdx(0) , calibrated(false) , leftEyeVideoSource(nullptr) , rightEyeVideoSource(nullptr) , postDirBaseName("post") , videoPostProcessingId("VideoPostProcessing") , doneStr("Ready.") { ui->setupUi(this); ui->synchronize->setVisible(false); // Turn on for debugging ui->tabWidget->setEnabled(false); } PostProcessingWidget::~PostProcessingWidget() { on_actionClose_Recording_triggered(); delete ui; } void PostProcessingWidget::updateSelections() { auto updateComboBox = [](const QDir& dir, const QString& pattern, QComboBox& comboBox) { QString current = comboBox.currentData().toString(); comboBox.clear(); QDirIterator it(dir.absolutePath(), QDir::Files, QDirIterator::Subdirectories); QRegularExpression re(pattern, QRegularExpression::CaseInsensitiveOption); while (it.hasNext()) { QFileInfo info(it.next()); if (!re.match(info.fileName()).hasMatch()) continue; QString relative = info.absoluteFilePath().replace(dir.absolutePath(), ""); if (relative.startsWith('/')) relative.remove(0, 1); comboBox.addItem(relative, info.absoluteFilePath()); if (current == info.absoluteFilePath()) comboBox.setCurrentText(relative); } }; updateComboBox(dir, ".*\\.tup", *ui->calibrationSelection); updateComboBox(dir, ".*left.*\\.(tsv|csv)", *ui->leftEyeDataSelection); updateComboBox(dir, ".*right.*\\.(tsv|csv)", *ui->rightEyeDataSelection); updateComboBox(dir, ".*field.*\\.(tsv|csv)", *ui->fieldDataSelection); updateComboBox(dir, ".*journal.*\\.(tsv|csv)", *ui->journalSelection); updateComboBox(dir, ".*\\.tup", *ui->eventSelection); } void PostProcessingWidget::fillInfos() { ui->videoProcessingSelection->clear(); auto find = [](const QFileInfoList& entries, const QString& pattern) { for (const auto& entry : entries) { if (entry.fileName().contains(pattern, Qt::CaseInsensitive)) return entry; } return QFileInfo(); }; auto entries = dir.entryInfoList(QStringList() << "*.avi" << "*.mp4"); auto set = [this](const QFileInfo& info, QLabel& label) { if (info.isFile()) { label.setText(info.fileName()); if (isEyeVideo(info) || isFieldVideo(info)) this->ui->videoProcessingSelection->addItem(info.fileName(), info.absoluteFilePath()); } else { label.setText("N/A"); } }; leftEyeInfo = find(entries, "left"); set(leftEyeInfo, *ui->leftEyeFileName); rightEyeInfo = find(entries, "right"); set(rightEyeInfo, *ui->rightEyeFileName); fieldInfo = find(entries, "field"); set(fieldInfo, *ui->fieldFileName); updateSelections(); } void PostProcessingWidget::on_actionClose_Recording_triggered() { if (processing) { if (ui->asyncStartStopToggle->isChecked()) ui->asyncStartStopToggle->click(); if (playbackStarted) ui->stop->click(); QCoreApplication::processEvents(); } if (gPostProcessing) { ui->tabWidget->setEnabled(false); ui->recording->clear(); ui->leftEyeFileName->clear(); ui->rightEyeFileName->clear(); ui->fieldFileName->clear(); ui->videoProcessingSelection->clear(); ui->calibrationSelection->clear(); ui->leftEyeDataSelection->clear(); ui->rightEyeDataSelection->clear(); ui->fieldDataSelection->clear(); ui->journalSelection->clear(); emit restorePreviousPwd(); gPostProcessing = false; emit recordingClosed(); } } void PostProcessingWidget::on_actionOpen_Recording_triggered() { if (processing) { qInfo() << "Can't open recording while processing"; return; } QString dirName = QFileDialog::getExistingDirectory(this, "Open Recording", "", QFileDialog::ShowDirsOnly | QFileDialog::DontResolveSymlinks); if (dirName.isEmpty()) return; openRecording(dirName); } void PostProcessingWidget::openRecording(const QString& dirName) { on_actionClose_Recording_triggered(); ui->tabWidget->setEnabled(true); gPostProcessing = true; dir.setPath(dirName); postDir.setPath(dir.absolutePath() + "/" + postDirBaseName); postDir.mkpath(postDir.absolutePath()); emit setWorkingDirectory(postDir.absolutePath()); ui->recording->setText(dir.absolutePath()); ui->recording->setToolTip(dir.absolutePath()); fillInfos(); emit recordingOpened(); } void PostProcessingWidget::startVideoProcessing(const QFileInfo& info) { qInfo() << "Starting to process" << info.absoluteFilePath(); currentVideoInfo = info; if (isEyeVideo(currentVideoInfo)) postVideoProcessor = new CameraWidget(videoPostProcessingId, ImageProcessor::Eye); if (isFieldVideo(currentVideoInfo)) postVideoProcessor = new CameraWidget(videoPostProcessingId, ImageProcessor::Field); postVideoProcessor->setWindowTitle(videoPostProcessingId); postVideoProcessor->show(); postVideoProcessor->move(QPoint(pos().x() + width(), pos().y())); allocateVideoSource(currentVideoInfo); connect(this, SIGNAL(requestNextFrame()), videoSource, SLOT(next())); connect(videoSource, SIGNAL(newFrame(VideoFrame)), this, SLOT(newInjectionFrame(VideoFrame))); connect(this, SIGNAL(injectFrame(Timestamp, cv::Mat)), postVideoProcessor, SIGNAL(injectFrame(Timestamp, cv::Mat))); connect(postVideoProcessor, SIGNAL(newData(EyeData)), this, SLOT(newData(EyeData))); connect(postVideoProcessor, SIGNAL(newData(FieldData)), this, SLOT(newData(FieldData))); connect(postVideoProcessor, SIGNAL(destroyed(QObject*)), this, SLOT(finishVideoProcessing(QObject*))); processedFrames = 0; injectedFrames = 0; framesToProcess = videoSource->getFrameCount(); lastTimestamp = videoSource->lastTimestamp(); } QString PostProcessingWidget::findAssociatedDataFile(const QDir& dir, const QFileInfo& info) { for (auto entry : dir.entryInfoList(QStringList() << "*.tsv" << "*.csv")) { if (entry.baseName() == info.baseName() + "Data") return entry.absoluteFilePath(); } return QString(); } void PostProcessingWidget::allocateVideoSource(const QFileInfo& info) { videoSource = new VideoSource(); videoThread = new QThread(); videoThread->start(); videoSource->moveToThread(videoThread); QMetaObject::invokeMethod(videoSource, "init", Qt::BlockingQueuedConnection, Q_ARG(QString, info.absoluteFilePath()), Q_ARG(QString, findAssociatedDataFile(dir, info))); on_realTimeCheckBox_toggled(ui->realTimeCheckBox->isChecked()); } void PostProcessingWidget::deallocateVideoSource() { if (videoSource) { videoSource->deleteLater(); videoSource = nullptr; } if (videoThread) { videoThread->deleteLater(); videoThread->quit(); videoThread->wait(); videoThread = nullptr; } } void PostProcessingWidget::stopVideoProcessing() { deallocateVideoSource(); QMetaObject::invokeMethod(postVideoProcessor, "stopRecording"); if (postVideoProcessor) { postVideoProcessor->deleteLater(); postVideoProcessor = nullptr; } } void PostProcessingWidget::finishVideoProcessing(QObject* obj) { Q_UNUSED(obj); QString temporaryVideoFile = QString("%1/%2Data.tsv").arg(postDir.absolutePath()).arg(videoPostProcessingId); QString target = QString("%1/%2%3Data.tsv").arg(postDir.absolutePath()).arg(ui->prefix->text()).arg(currentVideoInfo.baseName()); ui->statusbar->showMessage("Renaming file..."); postDir.remove(target); postDir.rename(temporaryVideoFile, target); postDir.remove(temporaryVideoFile); processing = false; ui->progressBar->setValue(0); ui->asyncStartStopToggle->setText("Start"); updateSelections(); ui->statusbar->showMessage(doneStr); emit videoProcessingFinished(); } void PostProcessingWidget::newData(FieldData data) { DataTuple tuple; tuple.field = data; tuple.lEye = EyeData(); tuple.rEye = EyeData(); QMetaObject::invokeMethod(postVideoProcessor, "preview", Qt::QueuedConnection, Q_ARG(DataTuple, tuple)); newProcessedFrame(data.timestamp); } void PostProcessingWidget::newData(EyeData data) { Q_UNUSED(data); newProcessedFrame(data.timestamp); } void PostProcessingWidget::newProcessedFrame(Timestamp& timestamp) { processedFrames++; ui->progressBar->setValue(100.0 * processedFrames / static_cast(framesToProcess)); auto stop = processedFrames == framesToProcess || lastTimestamp == timestamp; if (stop) { if (ui->asyncStartStopToggle->isChecked()) ui->asyncStartStopToggle->toggle(); } } void PostProcessingWidget::newInjectionFrame(const VideoFrame& videoFrame) { if (videoFrame.frame.empty()) return; while (injectedFrames - processedFrames > 10 || injectionPaused) // in case processing is slower, we wait QCoreApplication::processEvents(); injectedFrames++; emit injectFrame(videoFrame.t, videoFrame.frame); emit requestNextFrame(); } void PostProcessingWidget::on_asyncStartStopToggle_toggled(bool started) { if (started) { if (processing) return; processing = true; ui->statusbar->showMessage("Processing video..."); ui->asyncStartStopToggle->setText("Stop"); QFileInfo info(ui->videoProcessingSelection->currentData().toString()); QMetaObject::invokeMethod(this, "startVideoProcessing", Q_ARG(QFileInfo, info)); QMetaObject::invokeMethod(postVideoProcessor, "startRecordingWithoutVideo"); emit requestNextFrame(); // Kick off // processing will be set to false in finishEyeProcessing either through: // 1) user interaction (see below) // 2) by detection of last frame injection in newData } else { if (ui->asyncPauseResumeToggle->isChecked()) ui->asyncPauseResumeToggle->click(); stopVideoProcessing(); } } void PostProcessingWidget::on_asyncPauseResumeToggle_toggled(bool paused) { if (paused) ui->asyncPauseResumeToggle->setText("Resume"); else ui->asyncPauseResumeToggle->setText("Pause"); injectionPaused = paused; } void PostProcessingWidget::on_setProcessingPrefix_clicked() { QString tmp = QInputDialog::getText(this, "Set processing prefix", "Prefix:", QLineEdit::Normal, QString(), nullptr, Qt::CustomizeWindowHint); QRegExp re("^[a-zA-Z0-9-_]"); // TODO: prevent user from adding weird prefixes ui->prefix->setText(tmp); } void PostProcessingWidget::on_synchronize_clicked() { if (processing) return; processing = true; ui->statusbar->showMessage("Synchronizing tuples..."); QElapsedTimer timer; timer.start(); synchronizedTuples.clear(); TSVReader leftEyeData(ui->leftEyeDataSelection->currentData().toString()); TSVReader rightEyeData(ui->rightEyeDataSelection->currentData().toString()); TSVReader fieldData(ui->fieldDataSelection->currentData().toString()); vector> timestamps; auto appendTimestamps = [×tamps](TSVReader* tsv) { for (int idx = 0; idx < tsv->size(); idx++) { Timestamp ts; if (tsv->getData(idx, "timestamp", ts)) timestamps.push_back(make_tuple(ts, tsv, idx)); } }; appendTimestamps(&leftEyeData); appendTimestamps(&rightEyeData); appendTimestamps(&fieldData); sort(timestamps.begin(), timestamps.end()); Synchronizer synchronizer; connect(&synchronizer, SIGNAL(newData(DataTuple)), this, SLOT(newData(DataTuple)), Qt::DirectConnection); cv::Mat leftEyeFakeFrame(VideoSource::getVideoFrameSize(leftEyeInfo.absoluteFilePath()), CV_8UC3); cv::Mat rightEyeFakeFrame(VideoSource::getVideoFrameSize(rightEyeInfo.absoluteFilePath()), CV_8UC3); cv::Mat fieldFakeFrame(VideoSource::getVideoFrameSize(fieldInfo.absoluteFilePath()), CV_8UC3); float count = timestamps.size(); int read = 0; for (const auto& ts : timestamps) { //auto t = get(ts); auto tsvPtr = get(ts); auto idx = get(ts); if (tsvPtr == &leftEyeData) { auto ed = EyeData(*tsvPtr, idx); ed.input = leftEyeFakeFrame; synchronizer.newLeftEyeData(ed); } if (tsvPtr == &rightEyeData) { auto ed = EyeData(*tsvPtr, idx); ed.input = rightEyeFakeFrame; synchronizer.newRightEyeData(ed); } if (tsvPtr == &fieldData) { auto fd = FieldData(*tsvPtr, idx); fd.input = fieldFakeFrame; synchronizer.newFieldData(fd); } read++; ui->progressBar->setValue(100 * read / count); } ui->statusbar->showMessage(doneStr); processing = false; } void PostProcessingWidget::newData(DataTuple data) { synchronizedTuples.emplace_back(data); } void PostProcessingWidget::on_calibrate_clicked() { if (synchronizedTuples.size() == 0) on_synchronize_clicked(); if (processing) return; processing = true; emit resetCalibration(CollectionTuple::CALIBRATION); emit resetCalibration(CollectionTuple::EVALUATION); TSVReader calibrationTuples(ui->calibrationSelection->currentData().toString()); Timestamp calibrationStart, calibrationEnd; calibrationTuples.getData(calibrationTuples.front(), "sync.timestamp", calibrationStart); calibrationTuples.getData(calibrationTuples.back(), "sync.timestamp", calibrationEnd); for (const auto& dataTuple : synchronizedTuples) { QCoreApplication::processEvents(); if (dataTuple.timestamp >= calibrationStart && dataTuple.timestamp <= calibrationEnd) { if (dataTuple.field.collectionMarker.id == -1) continue; CollectionTuple ct(dataTuple); ct.tupleType = CollectionTuple::CALIBRATION; emit injectCalibrationTuple(ct); } } emit requestCalibration(); //processing will be set to false when the calibration status is received (see calibrationFinished) } void PostProcessingWidget::calibrationFinished(bool status, QString msg) { (void)msg; if (!processing) return; calibrated = status; processing = false; ui->statusbar->showMessage(doneStr); emit calibratingFinished(); } void PostProcessingWidget::on_complete_clicked() { if (!calibrated) { QMessageBox msgBox(QMessageBox::Warning, "Uncalibrated.", "Either no calibration has been done or the calibration failed.\nI can still generate data, but the gaze information will be null.\nProceed anyway?", QMessageBox::Yes | QMessageBox::No); msgBox.setDefaultButton(QMessageBox::No); if (msgBox.exec() == QMessageBox::No) return; } if (synchronizedTuples.size() == 0) on_synchronize_clicked(); if (processing) return; processing = true; ui->statusbar->showMessage("Estimating gaze..."); completedTuples.clear(); for (auto& dataTuple : synchronizedTuples) emit outDataTuple(dataTuple); //processing will be set to false when the last tuple is received (see inDataTuple) } void PostProcessingWidget::inDataTuple(DataTuple dataTuple) { if (!processing) return; completedTuples.emplace_back(dataTuple); ui->progressBar->setValue(100.0 * completedTuples.size() / static_cast(synchronizedTuples.size())); if (completedTuples.size() == synchronizedTuples.size()) { ui->statusbar->showMessage("Creating Journal..."); DataRecorder journal(QString("%1Journal").arg(ui->prefix->text()), DataTuple::header()); journal.startRecording(); for (size_t i = 0; i < completedTuples.size(); i++) { ui->progressBar->setValue(100.0 * i / static_cast(completedTuples.size() - 1)); QCoreApplication::processEvents(); journal.newData(completedTuples[i]); } journal.stopRecording(); ui->statusbar->showMessage(doneStr); updateSelections(); processing = false; emit completingFinished(); } } void PostProcessingWidget::on_calibrationSelection_currentIndexChanged(int index) { Q_UNUSED(index); calibrated = false; completedTuples.clear(); } void PostProcessingWidget::on_leftEyeDataSelection_currentIndexChanged(int index) { Q_UNUSED(index); synchronizedTuples.clear(); completedTuples.clear(); calibrated = false; } void PostProcessingWidget::on_rightEyeDataSelection_currentIndexChanged(int index) { Q_UNUSED(index); synchronizedTuples.clear(); completedTuples.clear(); calibrated = false; } void PostProcessingWidget::on_fieldDataSelection_currentIndexChanged(int index) { Q_UNUSED(index); synchronizedTuples.clear(); completedTuples.clear(); calibrated = false; } void PostProcessingWidget::startPlayback(QFileInfo& info) { if (processing) return; processing = true; ui->statusbar->showMessage("Reading journal..."); playbackTuples.clear(); TSVReader journal(ui->journalSelection->currentData().toString()); for (size_t idx = 0; idx < journal.size(); idx++) playbackTuples.emplace_back(DataTuple(journal, static_cast(idx))); playbackStarted = true; ui->statusbar->showMessage("Loading video..."); allocateVideoSource(info); connect(videoSource, SIGNAL(newFrame(VideoFrame)), this, SLOT(present(VideoFrame))); ui->playbackSlider->setMinimum(0); ui->playbackSlider->setMaximum(videoSource->getFrameCount()); ui->playbackSlider->setValue(0); auto openVideo = [&](const QFileInfo& info) { VideoSource* vs = nullptr; if (info.exists() && info.isFile()) { vs = new VideoSource(); vs->init(info.absoluteFilePath(), findAssociatedDataFile(dir, info)); if (!vs->isReady()) { vs->deleteLater(); vs = nullptr; } } return vs; }; leftEyeVideoSource = openVideo(leftEyeInfo); rightEyeVideoSource = openVideo(rightEyeInfo); QMetaObject::invokeMethod(videoSource, "play"); ui->statusbar->showMessage("Playing..."); } void PostProcessingWidget::stopPlayback() { playbackStarted = false; deallocateVideoSource(); if (leftEyeVideoSource) { leftEyeVideoSource->deleteLater(); leftEyeVideoSource = nullptr; } if (rightEyeVideoSource) { rightEyeVideoSource->deleteLater(); rightEyeVideoSource = nullptr; } processing = false; ui->statusbar->showMessage(doneStr); } void PostProcessingWidget::present(const VideoFrame& videoFrame) { updatePresentIdx(videoFrame.idx); ui->playbackSlider->setValue(presentIdx); // Note that given a field frame, there might be more than one data tuple that contains it // we simply pick one of them auto getTimestamp = [](const DataTuple& d) { return d.field.timestamp; }; auto idx = findClosestIdx(playbackTuples, videoFrame.t, getTimestamp); DataTuple tuple; if (idx < playbackTuples.size()) tuple = playbackTuples[idx]; else tuple.field.timestamp = videoFrame.t; tuple.field.input = videoFrame.frame; if (ui->realTimeCheckBox->isChecked()) { // TODO: only show eyes if playback in real time as to not overload the event loop if (ui->showEyesCheckBox->isChecked()) { // TODO: // temporary solution to get the eye projection, fix later auto projectEye = [](EyeData& ed, float radius = 12) { float focal_length = std::hypot(ed.input.cols, ed.input.rows); auto& center = ed.modelData.center; auto& projection = ed.modelData.eyeProjection; projection.center.x = 0.5 * ed.input.cols + focal_length * center(0) / center(2); projection.center.y = 0.5 * ed.input.rows + focal_length * center(1) / center(2); projection.size.width = projection.size.height = 2 * focal_length * radius / center(2); }; if (leftEyeVideoSource && leftEyeVideoSource->isReady()) { tuple.lEye.input = leftEyeVideoSource->getFrameAt(tuple.lEye.timestamp); projectEye(tuple.lEye); emit leftEyePresent(tuple.lEye); } if (rightEyeVideoSource && rightEyeVideoSource->isReady()) { tuple.rEye.input = rightEyeVideoSource->getFrameAt(tuple.rEye.timestamp); projectEye(tuple.rEye); emit rightEyePresent(tuple.rEye); } } } emit present(tuple); } void PostProcessingWidget::on_playPause_toggled(bool start) { if (start) { if (!playbackStarted) startPlayback(fieldInfo); else QMetaObject::invokeMethod(videoSource, "play"); } else { QMetaObject::invokeMethod(videoSource, "pause"); } } void PostProcessingWidget::on_stop_clicked() { if (ui->playPause->isChecked()) { on_playPause_toggled(false); ui->playPause->setChecked(false); } stopPlayback(); } void PostProcessingWidget::on_playbackSlider_valueChanged(int value) { if (std::abs(value - presentIdx) > 100) // in case it changed due to present setPosition(value); } void PostProcessingWidget::on_realTimeCheckBox_toggled(bool checked) { if (videoSource) QMetaObject::invokeMethod(videoSource, "setRealTime", Q_ARG(bool, checked)); } void PostProcessingWidget::setPosition(int idx) { if (!videoSource) return; QMetaObject::invokeMethod(videoSource, "setPosition", Qt::BlockingQueuedConnection, Q_ARG(int, idx)); } void PostProcessingWidget::on_playbackRewind_clicked() { if (videoSource) setPosition(presentIdx - 5 * videoSource->getFPS()); } void PostProcessingWidget::on_playbackForward_clicked() { if (videoSource) setPosition(presentIdx + 5 * videoSource->getFPS()); } void PostProcessingWidget::keyPressEvent(QKeyEvent* event) { int key = event->key(); switch (key) { case Qt::Key_J: setPosition(presentIdx - 1); break; case Qt::Key_K: setPosition(presentIdx + 1); break; case Qt::Key_L: ui->playbackForward->click(); break; case Qt::Key_H: ui->playbackRewind->click(); break; } } void PostProcessingWidget::on_goToEventButton_clicked() { TSVReader calibrationTuples(ui->eventSelection->currentData().toString()); Timestamp calibrationStart; calibrationTuples.getData(calibrationTuples.front(), "sync.timestamp", calibrationStart); QMetaObject::invokeMethod(videoSource, "setPositionByTimestamp", Q_ARG(Timestamp, calibrationStart)); } void PostProcessingWidget::updatePresentIdx(const int idx) { presentIdx = idx; ui->presentIdx->setText(QString::number(presentIdx)); } void PostProcessingWidget::batchProcess() { // FOR INTERNAL USE ONLY AT THE MOMENT QStringList influence = { "G:/influence-data/p1/1/", "G:/influence-data/p2/1/", "G:/influence-data/p3/1/", "G:/influence-data/p4/1/", "G:/influence-data/p5/1/", "G:/influence-data/p6/1/", "G:/influence-data/p7/1/", "G:/influence-data/p8/1/", "G:/influence-data/p9/1/", "G:/influence-data/p10/1/", "G:/influence-data/p11/1/", "G:/influence-data/p12/1/", "G:/influence-data/p13/1/", "G:/influence-data/p14/1/", "G:/influence-data/p15/1/", "G:/influence-data/p16/1/", "G:/influence-data/g1/1/", "G:/influence-data/g5/1/", "G:/influence-data/g8/1/", "G:/influence-data/g14/1/", "G:/influence-data/g15/1/", "G:/influence-data/g16/1/", }; //QString belvedere = "/media/santini/SAMSUNG/Belvedere/"; QString belvedere = "G:/Belvedere/"; QStringList day22 = { belvedere + "/data/2018-01-22/bb-eye-s001/", belvedere + "/data/2018-01-22/bb-eye-s002/", belvedere + "/data/2018-01-22/bb-eye-s003/", }; QStringList day23 = { belvedere + "/data/2018-01-23/bb-eye-s004/", belvedere + "/data/2018-01-23/bb-eye-s005/", belvedere + "/data/2018-01-23/bb-eye-s006/", belvedere + "/data/2018-01-23/bb-eye-s007/", belvedere + "/data/2018-01-23/bb-eye-s008/", belvedere + "/data/2018-01-23/bb-eye-s009/", belvedere + "/data/2018-01-23/bb-eye-s010/", belvedere + "/data/2018-01-23/bb-eye-s011/", belvedere + "/data/2018-01-23/bb-eye-s012/", belvedere + "/data/2018-01-23/bb-eye-s013/", //belvedere + "/data/2018-01-23/bb-eye-s014/", // not enough collections belvedere + "/data/2018-01-23/bb-eye-s015/", belvedere + "/data/2018-01-23/bb-eye-s016/", //belvedere + "/data/2018-01-23/bb-eye-s017/", // bad camera and calibration, epicanthic folds belvedere + "/data/2018-01-23/bb-eye-s018/", //belvedere + "/data/2018-01-23/bb-eye-s019/", // bad camera and calibration, epicanthic folds }; QStringList day24 = { belvedere + "/data/2018-01-24/bb-eye-s020/", belvedere + "/data/2018-01-24/bb-eye-s021/", belvedere + "/data/2018-01-24/bb-eye-s022/", belvedere + "/data/2018-01-24/bb-eye-s023/", belvedere + "/data/2018-01-24/bb-eye-s024/", //belvedere + "/data/2018-01-24/bb-eye-s025/", // Pupil blocked by glasses lenses on right eye belvedere + "/data/2018-01-24/bb-eye-s026/", //belvedere + "/data/2018-01-24/bb-eye-s027/", // Pupils often out of view during C2 //belvedere + "/data/2018-01-24/bb-eye-s028/", // not enough collections belvedere + "/data/2018-01-24/bb-eye-s029/", belvedere + "/data/2018-01-24/bb-eye-s030/", //belvedere + "/data/2018-01-24/bb-eye-s031/", // not enough collections belvedere + "/data/2018-01-24/bb-eye-s032/", belvedere + "/data/2018-01-24/bb-eye-s033/", //belvedere + "/data/2018-01-24/bb-eye-s034/", // Unstable pupil detection (slight bright pupil, bad contrast) //belvedere + "/data/2018-01-24/bb-eye-s035/", // epicanthic folds, partial bright pupil belvedere + "/data/2018-01-24/bb-eye-s036/", //belvedere + "/data/2018-01-24/bb-eye-s037/", // not enough collections belvedere + "/data/2018-01-24/bb-eye-s038/", belvedere + "/data/2018-01-24/bb-eye-s040/", }; QStringList day25 = { belvedere + "/data/2018-01-25/bb-eye-s039/", belvedere + "/data/2018-01-25/bb-eye-s041/", belvedere + "/data/2018-01-25/bb-eye-s042/", belvedere + "/data/2018-01-25/bb-eye-s043/", belvedere + "/data/2018-01-25/bb-eye-s044/", belvedere + "/data/2018-01-25/bb-eye-s045/", //belvedere + "/data/2018-01-25/bb-eye-s046/", // glasses (frameless), very bad contrast for left eye belvedere + "/data/2018-01-25/bb-eye-s047/", //belvedere + "/data/2018-01-25/bb-eye-s048/", // not enough collections //belvedere + "/data/2018-01-25/bb-eye-s049/", // Nystagmus belvedere + "/data/2018-01-25/bb-eye-s050/", belvedere + "/data/2018-01-25/bb-eye-s051/", belvedere + "/data/2018-01-25/bb-eye-s052/", belvedere + "/data/2018-01-25/bb-eye-s053/", belvedere + "/data/2018-01-25/bb-eye-s054/", belvedere + "/data/2018-01-25/bb-eye-s055/", //belvedere + "/data/2018-01-25/bb-eye-s056/", // not enough collections belvedere + "/data/2018-01-25/bb-eye-s057/", belvedere + "/data/2018-01-25/bb-eye-s058/", //belvedere + "/data/2018-01-25/bb-eye-s059/", // not enough collections //belvedere + "/data/2018-01-25/bb-eye-s060/", // Huge pupils, mascara, slight bright pupil, bad calibration belvedere + "/data/2018-01-25/bb-eye-s061/", belvedere + "/data/2018-01-25/bb-eye-s062/", belvedere + "/data/2018-01-25/bb-eye-s063/", }; QStringList day27 = { //belvedere + "/data/2018-01-27/bb-eye-s064/", // Bad angle, thick glasses frames belvedere + "/data/2018-01-27/bb-eye-s065/", belvedere + "/data/2018-01-27/bb-eye-s066/", belvedere + "/data/2018-01-27/bb-eye-s067/", //belvedere + "/data/2018-01-27/bb-eye-s068/", // not enough collections belvedere + "/data/2018-01-27/bb-eye-s069/", //belvedere + "/data/2018-01-27/bb-eye-s070/", // not enough collections //belvedere + "/data/2018-01-27/bb-eye-s071/", // Nystagmus belvedere + "/data/2018-01-27/bb-eye-s072/", //belvedere + "/data/2018-01-27/bb-eye-s073/", // Nystagmus belvedere + "/data/2018-01-27/bb-eye-s074/", belvedere + "/data/2018-01-27/bb-eye-s075/", belvedere + "/data/2018-01-27/bb-eye-s076/", //belvedere + "/data/2018-01-27/bb-eye-s077/", // Large pupils during calibration, slight bright pupil belvedere + "/data/2018-01-27/bb-eye-s078/", belvedere + "/data/2018-01-27/bb-eye-s079/", belvedere + "/data/2018-01-27/bb-eye-s080/", belvedere + "/data/2018-01-27/bb-eye-s081/", belvedere + "/data/2018-01-27/bb-eye-s082/", //belvedere + "/data/2018-01-27/bb-eye-s083/", // Eye tracker doesn't fit over glasses, big glint reflections blocking the pupil belvedere + "/data/2018-01-27/bb-eye-s084/", }; QStringList day28 = { //belvedere + "/data/2018-01-28/bb-eye-s085/", // Nystagmus //belvedere + "/data/2018-01-28/bb-eye-s086/", // not enough collections belvedere + "/data/2018-01-28/bb-eye-s087/", belvedere + "/data/2018-01-28/bb-eye-s088/", //belvedere + "/data/2018-01-28/bb-eye-s089/", // not enough collections //belvedere + "/data/2018-01-28/bb-eye-s090/", // partial nystagmus belvedere + "/data/2018-01-28/bb-eye-s091/", belvedere + "/data/2018-01-28/bb-eye-s092/", belvedere + "/data/2018-01-28/bb-eye-s093/", belvedere + "/data/2018-01-28/bb-eye-s094/", belvedere + "/data/2018-01-28/bb-eye-s095/", belvedere + "/data/2018-01-28/bb-eye-s096/", belvedere + "/data/2018-01-28/bb-eye-s097/", //belvedere + "/data/2018-01-28/bb-eye-s098/", // Partial nystagmus ? belvedere + "/data/2018-01-28/bb-eye-s099/", belvedere + "/data/2018-01-28/bb-eye-s100/", belvedere + "/data/2018-01-28/bb-eye-s101/", belvedere + "/data/2018-01-28/bb-eye-s102/", belvedere + "/data/2018-01-28/bb-eye-s103/", //belvedere + "/data/2018-01-28/bb-eye-s104/", // Bad ET position over glasses belvedere + "/data/2018-01-28/bb-eye-s105/", //belvedere + "/data/2018-01-28/bb-eye-s106/", // Slight nystagmus belvedere + "/data/2018-01-28/bb-eye-s107/", belvedere + "/data/2018-01-28/bb-eye-s108/", belvedere + "/data/2018-01-28/bb-eye-s109/", }; enum BatchType { VIDEO_BATCH_PROCESSING = 0, GAZE_BATCH_PROCESSING = 1, }; /*************************************************************************** * Batch processing config **************************************************************************/ static QStringList batchRecordings = day22 + day23 + day24 + day25 + day27 + day28; //static QStringList batchRecordings = chi; //static QStringList batchRecordings = linuxTest; static struct { // Video processing const QString outputPrefix = ""; // Gaze processing const QString gazeInputPrefix = ""; //const QString outputPrefix = ""; BatchType type = VIDEO_BATCH_PROCESSING; //BatchType type = GAZE_BATCH_PROCESSING; // video processing control const QString leftEyeVideoFile = "LeftEye.mp4"; const QString rightEyeVideoFile = "RightEye.mp4"; const QString fieldVideoFile = "Field.mp4"; const bool doLeftEyeProcessing = true; const bool doRightEyeProcessing = true; const bool doFieldProcessing = false; // gaze processing control const QString calibrationFile = "post/collection1.tup"; const QString leftEyeDataFile = QString("post/%1LeftEyeData.tsv").arg(gazeInputPrefix); const QString rightEyeDataFile = QString("post/%1RightEyeData.tsv").arg(gazeInputPrefix); const QString fieldDataFile = QString("post/FieldData.tsv"); } batchCfg; /*************************************************************************** * Batch processing logic **************************************************************************/ ui->prefix->setText(batchCfg.outputPrefix); static int batchIdx = 0; switch (batchCfg.type) { case VIDEO_BATCH_PROCESSING: { ui->tabWidget->setCurrentIndex(1); QState* starting = new QState(); connect(starting, &QState::entered, [this] { if (!QFileInfo(batchRecordings[batchIdx]).exists()) { qDebug() << QString("Non existent recording: %1").arg(batchRecordings[batchIdx]); batchIdx++; return; } openRecording(batchRecordings[batchIdx]); batchIdx++; }); // TODO: In the future replace this by two states that iterate through all the available videos QState* processFirstEye = new QState(); starting->addTransition(this, SIGNAL(recordingOpened()), processFirstEye); starting->addTransition(this, SIGNAL(processNextRecording()), starting); connect(processFirstEye, &QState::entered, [this]() { int selectionIdx = ui->videoProcessingSelection->findText(batchCfg.leftEyeVideoFile); if (selectionIdx >= 0 && batchCfg.doLeftEyeProcessing) { ui->videoProcessingSelection->setCurrentIndex(selectionIdx); ui->asyncStartStopToggle->click(); } else emit videoProcessingFinished(); }); QState* processSecondEye = new QState(); processFirstEye->addTransition(this, SIGNAL(videoProcessingFinished()), processSecondEye); connect(processSecondEye, &QState::entered, [this]() { int selectionIdx = ui->videoProcessingSelection->findText(batchCfg.rightEyeVideoFile); if (selectionIdx >= 0 && batchCfg.doRightEyeProcessing) { ui->videoProcessingSelection->setCurrentIndex(selectionIdx); ui->asyncStartStopToggle->click(); } else emit videoProcessingFinished(); }); QState* processField = new QState(); processSecondEye->addTransition(this, SIGNAL(videoProcessingFinished()), processField); connect(processField, &QState::entered, [this]() { int selectionIdx = ui->videoProcessingSelection->findText(batchCfg.fieldVideoFile); if (selectionIdx >= 0 && batchCfg.doFieldProcessing) { ui->videoProcessingSelection->setCurrentIndex(selectionIdx); ui->asyncStartStopToggle->click(); } else emit videoProcessingFinished(); }); QState* goToNextRecording = new QState(); processField->addTransition(this, SIGNAL(videoProcessingFinished()), goToNextRecording); connect(goToNextRecording, &QState::entered, [this]() { if (batchIdx < batchRecordings.size()) emit processNextRecording(); else { on_actionClose_Recording_triggered(); qInfo() << "Finished batch processing"; } }); goToNextRecording->addTransition(this, SIGNAL(processNextRecording()), starting); batchStateMachine.addState(starting); batchStateMachine.addState(processFirstEye); batchStateMachine.addState(processSecondEye); batchStateMachine.addState(processField); batchStateMachine.addState(goToNextRecording); batchStateMachine.setInitialState(starting); batchStateMachine.start(); } break; case GAZE_BATCH_PROCESSING: { ui->tabWidget->setCurrentIndex(2); QState* starting = new QState(); connect(starting, &QState::entered, [this] { if (!QFileInfo(batchRecordings[batchIdx]).exists()) { qDebug() << QString("Non existent video: %1").arg(batchRecordings[batchIdx]); batchIdx++; return; } openRecording(batchRecordings[batchIdx]); batchIdx++; }); QState* calibrate = new QState(); starting->addTransition(this, SIGNAL(recordingOpened()), calibrate); connect(calibrate, &QState::entered, [this]() { auto pick = [](QComboBox* box, const QString& which) { box->setCurrentIndex(box->findText(which)); }; pick(ui->calibrationSelection, batchCfg.calibrationFile); pick(ui->leftEyeDataSelection, batchCfg.leftEyeDataFile); pick(ui->rightEyeDataSelection, batchCfg.rightEyeDataFile); pick(ui->fieldDataSelection, batchCfg.fieldDataFile); ui->calibrate->click(); }); QState* estimateGaze = new QState(); calibrate->addTransition(this, SIGNAL(calibratingFinished()), estimateGaze); connect(estimateGaze, &QState::entered, [this]() { ui->complete->click(); }); QState* goToNextRecording = new QState(); estimateGaze->addTransition(this, SIGNAL(completingFinished()), goToNextRecording); connect(goToNextRecording, &QState::entered, [this]() { if (batchIdx < batchRecordings.size()) emit processNextRecording(); else { on_actionClose_Recording_triggered(); qInfo() << "Finished batch processing"; } }); goToNextRecording->addTransition(this, SIGNAL(processNextRecording()), starting); batchStateMachine.addState(starting); batchStateMachine.addState(calibrate); batchStateMachine.addState(estimateGaze); batchStateMachine.addState(goToNextRecording); batchStateMachine.setInitialState(starting); batchStateMachine.start(); } break; } }