Commit d71dfbfc authored by Thiago Santini's avatar Thiago Santini

Switching from QElapsedTimer to steady_clock + hw timer

parent 027ecdff
#include "uvccamerasession.h"
#include <QThread>
#include <QtConcurrent/QtConcurrent>
#include <QDebug>
#include <QCoreApplication>
#include <QDebug>
#include <QDir>
#include <QSettings>
#include <QThread>
#include <QtConcurrent/QtConcurrent>
QMutex UVCCameraSession::sessionMutex;
QMap<QString,bool> UVCCameraSession::devices;
QMap<QString, bool> UVCCameraSession::devices;
QMutex UVCCameraSession::devicesMutex;
UVCCameraSession::UVCCameraSession(QObject *parent)
: QObject(parent),
streaming(false),
interval(0),
bandwidthFactor(1.3f),
m_surface(Q_NULLPTR),
ctx(nullptr),
dev(nullptr),
devh(nullptr)
UVCCameraSession::UVCCameraSession(QObject* parent)
: QObject(parent)
, streaming(false)
, interval(0)
, bandwidthFactor(1.3f)
, m_surface(nullptr)
, hwts_usec(-1)
, ctx(nullptr)
, dev(nullptr)
, devh(nullptr)
{
DefaultParameters tmp;
// Pupil Cam1 Eye
tmp.bandwidthFactor = 1.3;
tmp.brightness = 0.5;
tmp.contrast = 0.5;
tmp.saturation = 0;
tmp.white_balance = 0.486486;
tmp.sharpness = 0.333333;
tmp.backlight_compensation = 1.0;
tmp.gamma = 0.0654206;
tmp.gain = 0;
tmp.exposure_abs = 0.0124025;
tmp.hue = 0.5;
tmp.ae_mode = 1;
tmp.ae_priority = 0;
tmp.contrast_auto = 0;
tmp.focus_auto = 0;
tmp.white_balance_component_auto = 0;
tmp.white_balance_temperature_auto = 1;
tmp.hue_auto = 0;
defaults["Pupil Cam1 ID0"] = tmp;
defaults["Pupil Cam1 ID1"] = tmp;
// Pupil Cam1 Field
tmp.bandwidthFactor = 2.0;
tmp.brightness = 0.5;
tmp.contrast = 0.5;
tmp.saturation = 0.46875;
tmp.white_balance = 0.486486;
tmp.sharpness = 0.333333;
tmp.backlight_compensation = 1.0;
tmp.gamma = 0.0654206;
tmp.gain = 0;
tmp.exposure_abs = 0.0312062;
tmp.hue = 0.5;
tmp.ae_mode = 8;
tmp.ae_priority = 1;
tmp.contrast_auto = 0;
tmp.focus_auto = 0;
tmp.white_balance_component_auto = 0;
tmp.white_balance_temperature_auto = 1;
tmp.hue_auto = 0;
defaults["Pupil Cam1 ID2"] = tmp;
// Pupil Cam2 Eye
tmp.bandwidthFactor = 1.3;
tmp.brightness = 0.5;
tmp.contrast = 0.5;
tmp.saturation = 0;
tmp.white_balance = 0.486486;
tmp.sharpness = 0.166667;
tmp.backlight_compensation = 1.0;
tmp.gamma = 0.5;
tmp.gain = 0;
tmp.exposure_abs = 1;
tmp.hue = 0.5;
tmp.ae_mode = 1;
tmp.ae_priority = 0;
tmp.contrast_auto = 0;
tmp.focus_auto = 0;
tmp.white_balance_component_auto = 0;
tmp.white_balance_temperature_auto = 1;
tmp.hue_auto = 0;
defaults["Pupil Cam2 ID0"] = tmp;
defaults["Pupil Cam2 ID1"] = tmp;
DefaultParameters tmp;
// Pupil Cam1 Eye
tmp.bandwidthFactor = 1.3;
tmp.brightness = 0.5;
tmp.contrast = 0.5;
tmp.saturation = 0;
tmp.white_balance = 0.486486;
tmp.sharpness = 0.333333;
tmp.backlight_compensation = 1.0;
tmp.gamma = 0.0654206;
tmp.gain = 0;
tmp.exposure_abs = 0.0124025;
tmp.hue = 0.5;
tmp.ae_mode = 1;
tmp.ae_priority = 0;
tmp.contrast_auto = 0;
tmp.focus_auto = 0;
tmp.white_balance_component_auto = 0;
tmp.white_balance_temperature_auto = 1;
tmp.hue_auto = 0;
defaults["Pupil Cam1 ID0"] = tmp;
defaults["Pupil Cam1 ID1"] = tmp;
// Pupil Cam1 Field
tmp.bandwidthFactor = 2.0;
tmp.brightness = 0.5;
tmp.contrast = 0.5;
tmp.saturation = 0.46875;
tmp.white_balance = 0.486486;
tmp.sharpness = 0.333333;
tmp.backlight_compensation = 1.0;
tmp.gamma = 0.0654206;
tmp.gain = 0;
tmp.exposure_abs = 0.0312062;
tmp.hue = 0.5;
tmp.ae_mode = 8;
tmp.ae_priority = 1;
tmp.contrast_auto = 0;
tmp.focus_auto = 0;
tmp.white_balance_component_auto = 0;
tmp.white_balance_temperature_auto = 1;
tmp.hue_auto = 0;
defaults["Pupil Cam1 ID2"] = tmp;
// Pupil Cam2 Eye
tmp.bandwidthFactor = 1.3;
tmp.brightness = 0.5;
tmp.contrast = 0.5;
tmp.saturation = 0;
tmp.white_balance = 0.486486;
tmp.sharpness = 0.166667;
tmp.backlight_compensation = 1.0;
tmp.gamma = 0.5;
tmp.gain = 0;
tmp.exposure_abs = 1;
tmp.hue = 0.5;
tmp.ae_mode = 1;
tmp.ae_priority = 0;
tmp.contrast_auto = 0;
tmp.focus_auto = 0;
tmp.white_balance_component_auto = 0;
tmp.white_balance_temperature_auto = 1;
tmp.hue_auto = 0;
defaults["Pupil Cam2 ID0"] = tmp;
defaults["Pupil Cam2 ID1"] = tmp;
}
UVCCameraSession::~UVCCameraSession()
{
unload();
release(deviceName);
if (ctx)
uvc_exit(ctx);
unload();
release(deviceName);
if (ctx)
uvc_exit(ctx);
}
bool UVCCameraSession::setDevice(const QString &device)
bool UVCCameraSession::setDevice(const QString& device)
{
if ( ! acquire(device) ) {
qWarning() << device << "already in use.";
return false;
}
uvc_error_t res;
if (!ctx) {
res = uvc_init(&ctx, nullptr);
if (res < 0) {
qWarning() << "uvc_init" << uvc_strerror(res);
ctx = nullptr;
return false;
}
}
uvc_device_t **devList;
res = uvc_get_device_list(ctx, &devList);
if (res < 0) {
qWarning() << "uvc_get_device_list" << uvc_strerror(res);
return false;
}
ProductFactory pf;
bool ret = false;
int idx = 0;
while (1) {
dev = devList[idx];
idx++;
if (dev == nullptr)
break;
uvc_device_descriptor_t *desc;
res = uvc_get_device_descriptor(dev, &desc);
if (res < 0) {
qWarning() << "uvc_get_device_descriptor" << uvc_strerror(res);
return false;
}
QString product = pf.make(desc);
uvc_free_device_descriptor(desc);
if (product.compare(device) == 0) {
deviceName = device;
ret = true;
break;
}
}
uvc_free_device_list(devList, 0);
return ret;
if (!acquire(device)) {
qWarning() << device << "already in use.";
return false;
}
uvc_error_t res;
if (!ctx) {
res = uvc_init(&ctx, nullptr);
if (res < 0) {
qWarning() << "uvc_init" << uvc_strerror(res);
ctx = nullptr;
return false;
}
}
uvc_device_t** devList;
res = uvc_get_device_list(ctx, &devList);
if (res < 0) {
qWarning() << "uvc_get_device_list" << uvc_strerror(res);
return false;
}
ProductFactory pf;
bool ret = false;
int idx = 0;
while (1) {
dev = devList[idx];
idx++;
if (dev == nullptr)
break;
uvc_device_descriptor_t* desc;
res = uvc_get_device_descriptor(dev, &desc);
if (res < 0) {
qWarning() << "uvc_get_device_descriptor" << uvc_strerror(res);
return false;
}
QString product = pf.make(desc);
uvc_free_device_descriptor(desc);
if (product.compare(device) == 0) {
deviceName = device;
ret = true;
break;
}
}
uvc_free_device_list(devList, 0);
return ret;
}
void UVCCameraSession::setSurface(QAbstractVideoSurface* surface)
......@@ -153,102 +154,90 @@ void UVCCameraSession::setSurface(QAbstractVideoSurface* surface)
m_surface = surface;
}
// If the following is defined, frame grabbing is performed by the callback thread
// If the following is defined, frame grabbing is performed by the callback
// thread
//#define BLOCKING_FRAME_GRABBING
void cb(uvc_frame_t *frame, void *ptr) { if (ptr && frame) static_cast<UVCCameraSession*>(ptr)->callback(frame); }
void UVCCameraSession::callback(uvc_frame_t *frame)
void cb(uvc_frame_t* frame, void* ptr)
{
if (ptr && frame)
static_cast<UVCCameraSession*>(ptr)->callback(frame);
}
void UVCCameraSession::callback(uvc_frame_t* frame)
{
qreal t = 1e-6 * frameReference.nsecsElapsed();
sw.update(t);
hw.update(1e3*frame->capture_time.tv_sec + 1e-3*frame->capture_time.tv_usec); // use libuvc timestamp instead of our own
hybrid.update(t);
hybrid.correct(hw, settings.maximumFrameRate());
/* TODO:
*
* Consider changing the time reference here when hybrid has been tested.
* For now, we just export delta t (dt)
*
* 1) SW only.
* Pros: does not drift (same source as EyeRecToo).
* Cons: unprecise.
*
* t = sw.cur;
*
* 2) HW only.
* Pros: precise (after libuvc fix; seehttps://github.com/pupil-labs/libuvc/commit/ec5837e538aad0357c22a806eb8ea40ff73e6a33)
* Cons: drifts!
*
* t = hw.cur;
*
* 3) Hybrid.
* Pros: shouldn't drift, precise.
* Cons: Untested!
*
* t = hybrid.cur;
*
*/
using namespace std::chrono;
auto now = steady_clock::now();
if (!streaming)
return;
QMutexLocker locker(&surfaceMutex);
if (m_surface) {
QVideoFrame qFrame;
switch(frame->frame_format) {
case UVC_FRAME_FORMAT_MJPEG:
qFrame = QVideoFrame( static_cast<int>(frame->data_bytes), QSize(frame->width, frame->height), 0, QVideoFrame::Format_Jpeg);
qFrame.map(QAbstractVideoBuffer::WriteOnly);
memcpy( qFrame.bits(), frame->data, frame->data_bytes); // copied; safe to retun from callback now
qFrame.unmap();
break;
case UVC_FRAME_FORMAT_YUYV:
qFrame = QVideoFrame( static_cast<int>(frame->data_bytes), QSize(frame->width, frame->height), 0, QVideoFrame::Format_YUYV);
qFrame.map(QAbstractVideoBuffer::WriteOnly);
memcpy( qFrame.bits(), frame->data, frame->data_bytes); // copied; safe to retun from callback now
qFrame.unmap();
break;
default:
return;
}
// Meta data
qFrame.setMetaData("timestamp", t);
qFrame.setMetaData("dt", hybrid.isp);
QVideoFrame qFrame;
// get the data
switch (frame->frame_format) {
case UVC_FRAME_FORMAT_MJPEG:
qFrame = QVideoFrame(static_cast<int>(frame->data_bytes),
QSize(frame->width, frame->height), 0,
QVideoFrame::Format_Jpeg);
qFrame.map(QAbstractVideoBuffer::WriteOnly);
memcpy(qFrame.bits(), frame->data,
frame->data_bytes); // copied; safe to retun from callback now
qFrame.unmap();
break;
case UVC_FRAME_FORMAT_YUYV:
qFrame = QVideoFrame(static_cast<int>(frame->data_bytes),
QSize(frame->width, frame->height), 0,
QVideoFrame::Format_YUYV);
qFrame.map(QAbstractVideoBuffer::WriteOnly);
memcpy(qFrame.bits(), frame->data,
frame->data_bytes); // copied; safe to retun from callback now
qFrame.unmap();
break;
default:
return;
}
// update meta data
qFrame.setMetaData("steady_clock::now", QVariant::fromValue(now));
qFrame.setStartTime(hwts_usec);
hwts_usec = 1e6 * frame->capture_time.tv_sec + frame->capture_time.tv_usec;
qFrame.setEndTime(hwts_usec);
#ifdef BLOCKING_FRAME_GRABBING
m_surface->present( qFrame );
m_surface->present(qFrame);
#else
QMetaObject::invokeMethod(this, "presentFrame", Qt::QueuedConnection, Q_ARG(const QVideoFrame&, qFrame), Q_ARG(const qreal, t) );
QMetaObject::invokeMethod(this, "presentFrame", Qt::QueuedConnection,
Q_ARG(const QVideoFrame&, qFrame));
#endif
}
}
void UVCCameraSession::presentFrame(QVideoFrame frame, const qreal t)
void UVCCameraSession::presentFrame(QVideoFrame frame) const
{
(void) t;
if (m_surface)
m_surface->present(frame);
//else
// qWarning() << "Dropping frame (" << latency << "ms old )";
if (m_surface)
m_surface->present(frame);
// else
// qWarning() << "Dropping frame (" << latency << "ms old )";
}
template<typename T> bool set(const QSettings *settings, const QString key, T &v)
template <typename T>
bool set(const QSettings* settings, const QString key, T& v)
{
if (!settings)
return false;
QVariant variant = settings->value(key);
if (variant.isValid()) {
v = qvariant_cast<T>(variant);
return true;
}
return false;
if (!settings)
return false;
QVariant variant = settings->value(key);
if (variant.isValid()) {
v = qvariant_cast<T>(variant);
return true;
}
return false;
}
bool UVCCameraSession::load()
{
QMutexLocker sessionLocker(&sessionMutex);
QMutexLocker sessionLocker(&sessionMutex);
uvc_error_t res;
if (!dev)
......@@ -257,11 +246,11 @@ bool UVCCameraSession::load()
res = uvc_open(dev, &devh);
if (res != UVC_SUCCESS) {
qWarning() << "uvc_open" << uvc_strerror(res);
devh = nullptr;
devh = nullptr;
return false;
}
}
setDefaultParameters();
setDefaultParameters();
updateSourceCapabilities();
......@@ -273,30 +262,29 @@ bool UVCCameraSession::unload()
QMutexLocker sessionLocker(&sessionMutex);
if (streaming)
stopPreview();
stopPreview();
if (devh)
uvc_close(devh);
devh = nullptr;
if (devh)
uvc_close(devh);
devh = nullptr;
m_supportedViewfinderSettings.clear();
return true;
}
void concurrentCustomCallback(UVCCameraSession *session)
void concurrentCustomCallback(UVCCameraSession* session)
{
QMutexLocker getFrameLocker( &session->getFrameMutex );
QMutexLocker getFrameLocker(&session->getFrameMutex);
while (session->streaming) {
if (session->strmh == nullptr)
if (session->strmh == nullptr)
continue;
uvc_frame_t *frame = nullptr;
uvc_frame_t* frame = nullptr;
uvc_error_t res;
res = uvc_stream_get_frame(session->strmh, &frame, 0);
if ( res == UVC_SUCCESS && frame != nullptr)
res = uvc_stream_get_frame(session->strmh, &frame, 0);
if (res == UVC_SUCCESS && frame != nullptr)
session->callback(frame);
else
if (res != UVC_ERROR_TIMEOUT)
qWarning() << session->streaming << uvc_strerror(res) << frame;
else if (res != UVC_ERROR_TIMEOUT)
qWarning() << session->streaming << uvc_strerror(res) << frame;
}
}
......@@ -310,7 +298,7 @@ bool UVCCameraSession::startPreview()
if (streaming)
stopPreview();
QMutexLocker getFrameLocker( &getFrameMutex );
QMutexLocker getFrameLocker(&getFrameMutex);
uvc_frame_format format;
if (!qPixelFormat2UVCFrameFormat(settings.pixelFormat(), format)) {
......@@ -318,7 +306,9 @@ bool UVCCameraSession::startPreview()
return false;
}
res = uvc_get_stream_ctrl_format_size(devh, &ctrl, format, settings.resolution().width(), settings.resolution().height(), settings.maximumFrameRate());
res = uvc_get_stream_ctrl_format_size(
devh, &ctrl, format, settings.resolution().width(),
settings.resolution().height(), settings.maximumFrameRate());
if (res != UVC_SUCCESS) {
qWarning() << "uvc_get_stream_ctrl" << uvc_strerror(res);
......@@ -332,9 +322,10 @@ bool UVCCameraSession::startPreview()
}
#ifdef USE_CUSTOM_CALLBACK
res = uvc_stream_start(strmh, nullptr, (void*) this, getBandwidthFactor(), 0);
res = uvc_stream_start(strmh, nullptr, (void*)this, getBandwidthFactor(), 0);
#else
res = uvc_stream_start(strmh, cb, static_cast<void*>(this), getBandwidthFactor(), 0);
res = uvc_stream_start(strmh, cb, static_cast<void*>(this),
getBandwidthFactor(), 0);
#endif
if (res != UVC_SUCCESS) {
qWarning() << "uvc_stream_start" << uvc_strerror(res);
......@@ -352,26 +343,27 @@ bool UVCCameraSession::startPreview()
float UVCCameraSession::getBandwidthFactor()
{
switch(settings.pixelFormat()) {
// compressed; user can change it through the default settings
case QVideoFrame::Format_Jpeg:
return bandwidthFactor;
default:
return -1; // use hw estimation
}
switch (settings.pixelFormat()) {
// compressed; user can change it through the default settings
case QVideoFrame::Format_Jpeg:
return bandwidthFactor;
default:
return -1; // use hw estimation
}
}
bool UVCCameraSession::qPixelFormat2UVCFrameFormat(const QVideoFrame::PixelFormat &qFormat, uvc_frame_format &uvcFormat)
bool UVCCameraSession::qPixelFormat2UVCFrameFormat(
const QVideoFrame::PixelFormat& qFormat, uvc_frame_format& uvcFormat)
{
switch(qFormat) {
case QVideoFrame::Format_Jpeg:
uvcFormat = UVC_FRAME_FORMAT_MJPEG;
return true;
case QVideoFrame::Format_YUYV:
uvcFormat = UVC_FRAME_FORMAT_YUYV;
return true;
default:
return false;
switch (qFormat) {
case QVideoFrame::Format_Jpeg:
uvcFormat = UVC_FRAME_FORMAT_MJPEG;
return true;
case QVideoFrame::Format_YUYV:
uvcFormat = UVC_FRAME_FORMAT_YUYV;
return true;
default:
return false;
}
}
......@@ -383,39 +375,41 @@ void UVCCameraSession::updateSourceCapabilities()
return;
const uvc_format_desc_t* formatDesc = uvc_get_format_descs(devh);
while (formatDesc != nullptr) {
while (formatDesc != nullptr) {
QVideoFrame::PixelFormat qFormat;
QString formatStr;
switch (formatDesc->bDescriptorSubtype) {
case UVC_VS_FORMAT_MJPEG:
qFormat = QVideoFrame::Format_Jpeg;
QVideoFrame::PixelFormat qFormat;
QString formatStr;
switch (formatDesc->bDescriptorSubtype) {
case UVC_VS_FORMAT_MJPEG:
qFormat = QVideoFrame::Format_Jpeg;
break;
case UVC_VS_FORMAT_UNCOMPRESSED:
qFormat = fourcc2PixelFormat(formatDesc->fourccFormat, formatStr);
if (qFormat != QVideoFrame::Format_Invalid)
break;
case UVC_VS_FORMAT_UNCOMPRESSED:
qFormat = fourcc2PixelFormat(formatDesc->fourccFormat, formatStr);
if ( qFormat != QVideoFrame::Format_Invalid )
break;
default: // format not supported, next!
qFormat = fourcc2PixelFormat(formatDesc->fourccFormat, formatStr);
qInfo() << "UVCEngine: format not supported:" << formatDesc->bDescriptorSubtype << formatStr;
formatDesc = formatDesc->next;
continue;
default: // format not supported, next!
qFormat = fourcc2PixelFormat(formatDesc->fourccFormat, formatStr);
qInfo() << "UVCEngine: format not supported:"
<< formatDesc->bDescriptorSubtype << formatStr;
formatDesc = formatDesc->next;
continue;
}
const uvc_frame_desc *frameDesc = formatDesc->frame_descs;
while (frameDesc!=nullptr) {
const uvc_frame_desc* frameDesc = formatDesc->frame_descs;
while (frameDesc != nullptr) {
QCameraViewfinderSettings settings;
settings.setResolution(frameDesc->wWidth, frameDesc->wHeight);
settings.setPixelAspectRatio(frameDesc->wWidth, frameDesc->wHeight);
settings.setPixelAspectRatio(formatDesc->bAspectRatioX,formatDesc->bAspectRatioY);
settings.setPixelAspectRatio(formatDesc->bAspectRatioX,
formatDesc->bAspectRatioY);
settings.setPixelFormat(qFormat);
uint32_t *intervals = frameDesc->intervals;
uint32_t* intervals = frameDesc->intervals;
while (*intervals) {
// libuvc uses 100ns units
double fps = 1.0e7 / (*intervals);
settings.setMinimumFrameRate( fps );
settings.setMaximumFrameRate( fps );
settings.setMinimumFrameRate(fps);
settings.setMaximumFrameRate(fps);
m_supportedViewfinderSettings.push_back(settings);
intervals++;
}
......@@ -424,8 +418,7 @@ void UVCCameraSession::updateSourceCapabilities()
}
formatDesc = formatDesc->next;
}
}
}
bool UVCCameraSession::stopPreview()
......@@ -434,19 +427,20 @@ bool UVCCameraSession::stopPreview()
streaming = false;
// for some reason libuvc seems to timeout on uvc_stream_stop
for (unsigned int i=0; i<100;i++)
if ( (res = uvc_stream_stop(strmh)) != UVC_ERROR_TIMEOUT)
for (unsigned int i = 0; i < 100; i++)
if ((res = uvc_stream_stop(strmh)) != UVC_ERROR_TIMEOUT)
break;
if (res != UVC_SUCCESS && res != UVC_ERROR_INVALID_PARAM)
qWarning() << "uvc_stream_stop" << uvc_strerror(res);
uvc_stream_close(strmh);
strmh = nullptr;
uvc_stream_close(strmh);
strmh = nullptr;
frameReference.invalidate();
return true;
}
QList<QCameraViewfinderSettings> UVCCameraSession::supportedViewfinderSettings()
QList<QCameraViewfinderSettings>
UVCCameraSession::supportedViewfinderSettings()
{
return m_supportedViewfinderSettings;
}
......@@ -456,253 +450,280 @@ QCameraViewfinderSettings UVCCameraSession::viewfinderSettings()
return QCameraViewfinderSettings();
}
void UVCCameraSession::setViewfinderSettings(const QCameraViewfinderSettings &settings)
void UVCCameraSession::setViewfinderSettings(
const QCameraViewfinderSettings& settings)
{
this->settings = settings;
interval = 1 / settings.maximumFrameRate();
}
bool UVCCameraSession::acquire(const QString &device)
bool UVCCameraSession::acquire(const QString& device)
{
QMutexLocker locker(&devicesMutex);
if (devices.contains(device))
return false;
devices[device] = true;
return true;
QMutexLocker locker(&devicesMutex);
if (devices.contains(device))
return false;
devices[device] = true;
return true;
}
void UVCCameraSession::release(const QString &device)
void UVCCameraSession::release(const QString& device)
{
QMutexLocker locker(&devicesMutex);
if (devices.contains(device))
devices.remove(device);
QMutexLocker locker(&devicesMutex);
if (devices.contains(device))
devices.remove(device);
}