Browse Source

Switch VideoReceiver to new API (big refactoring)

QGC4.4
Andrew Voznytsa 5 years ago
parent
commit
fb03713fc4
  1. 8
      src/Camera/QGCCameraControl.cc
  2. 8
      src/FlightDisplay/FlightDisplayViewVideo.qml
  3. 10
      src/FlightMap/Widgets/VideoPageWidget.qml
  4. 63
      src/VideoStreaming/SubtitleWriter.cc
  5. 13
      src/VideoStreaming/SubtitleWriter.h
  6. 331
      src/VideoStreaming/VideoManager.cc
  7. 29
      src/VideoStreaming/VideoManager.h
  8. 1953
      src/VideoStreaming/VideoReceiver.cc
  9. 274
      src/VideoStreaming/VideoReceiver.h
  10. 3
      src/VideoStreaming/gstqgcvideosinkbin.c

8
src/Camera/QGCCameraControl.cc

@ -388,11 +388,11 @@ QGCCameraControl::takePhoto() @@ -388,11 +388,11 @@ QGCCameraControl::takePhoto()
_setPhotoStatus(PHOTO_CAPTURE_IN_PROGRESS);
_captureInfoRetries = 0;
//-- Capture local image as well
if(qgcApp()->toolbox()->videoManager()->videoReceiver()) {
if(qgcApp()->toolbox()->videoManager()) {
QString photoPath = qgcApp()->toolbox()->settingsManager()->appSettings()->savePath()->rawValue().toString() + QStringLiteral("/Photo");
QDir().mkpath(photoPath);
photoPath += + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss.zzz") + ".jpg";
qgcApp()->toolbox()->videoManager()->videoReceiver()->grabImage(photoPath);
qgcApp()->toolbox()->videoManager()->grabImage(photoPath);
}
return true;
}
@ -1542,11 +1542,11 @@ QGCCameraControl::handleCaptureStatus(const mavlink_camera_capture_status_t& cap @@ -1542,11 +1542,11 @@ QGCCameraControl::handleCaptureStatus(const mavlink_camera_capture_status_t& cap
//-- Time Lapse
if(photoStatus() == PHOTO_CAPTURE_INTERVAL_IDLE || photoStatus() == PHOTO_CAPTURE_INTERVAL_IN_PROGRESS) {
//-- Capture local image as well
if(qgcApp()->toolbox()->videoManager()->videoReceiver()) {
if(qgcApp()->toolbox()->videoManager()) {
QString photoPath = qgcApp()->toolbox()->settingsManager()->appSettings()->savePath()->rawValue().toString() + QStringLiteral("/Photo");
QDir().mkpath(photoPath);
photoPath += + "/" + QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss.zzz") + ".jpg";
qgcApp()->toolbox()->videoManager()->videoReceiver()->grabImage(photoPath);
qgcApp()->toolbox()->videoManager()->grabImage(photoPath);
}
}
}

8
src/FlightDisplay/FlightDisplayViewVideo.qml

@ -40,7 +40,7 @@ Item { @@ -40,7 +40,7 @@ Item {
id: noVideo
anchors.fill: parent
color: Qt.rgba(0,0,0,0.75)
visible: !(_videoReceiver && _videoReceiver.videoRunning)
visible: !(_videoReceiver && _videoReceiver.decoding)
QGCLabel {
text: QGroundControl.settingsManager.videoSettings.streamEnabled.rawValue ? qsTr("WAITING FOR VIDEO") : qsTr("VIDEO DISABLED")
font.family: ScreenTools.demiboldFontFamily
@ -58,7 +58,7 @@ Item { @@ -58,7 +58,7 @@ Item {
Rectangle {
anchors.fill: parent
color: "black"
visible: _videoReceiver && _videoReceiver.videoRunning
visible: _videoReceiver && _videoReceiver.decoding
function getWidth() {
//-- Fit Width or Stretch
if(_fitMode === 0 || _fitMode === 2) {
@ -86,7 +86,7 @@ Item { @@ -86,7 +86,7 @@ Item {
target: _videoReceiver
onImageFileChanged: {
videoContent.grabToImage(function(result) {
if (!result.saveToFile(_videoReceiver.imageFile)) {
if (!result.saveToFile(QGroundControl.videoManager.imageFile)) {
console.error('Error capturing video frame');
}
});
@ -130,7 +130,7 @@ Item { @@ -130,7 +130,7 @@ Item {
height: parent.getHeight()
width: parent.getWidth()
anchors.centerIn: parent
visible: _videoReceiver && _videoReceiver.videoRunning
visible: _videoReceiver && _videoReceiver.decoding
sourceComponent: videoBackgroundComponent
property bool videoDisabled: QGroundControl.settingsManager.videoSettings.videoSource.rawValue === QGroundControl.settingsManager.videoSettings.disabledVideoSource

10
src/FlightMap/Widgets/VideoPageWidget.qml

@ -33,7 +33,7 @@ Item { @@ -33,7 +33,7 @@ Item {
property bool _communicationLost: activeVehicle ? activeVehicle.connectionLost : false
property var _videoReceiver: QGroundControl.videoManager.videoReceiver
property bool _recordingVideo: _videoReceiver && _videoReceiver.recording
property bool _videoRunning: _videoReceiver && _videoReceiver.videoRunning
property bool _decodingVideo: _videoReceiver && _videoReceiver.decoding
property bool _streamingEnabled: QGroundControl.settingsManager.videoSettings.streamConfigured
property var _dynamicCameras: activeVehicle ? activeVehicle.dynamicCameras : null
property int _curCameraIndex: _dynamicCameras ? _dynamicCameras.currentCamera : 0
@ -136,7 +136,7 @@ Item { @@ -136,7 +136,7 @@ Item {
anchors.bottom: parent.bottom
width: height
radius: _recordingVideo ? 0 : height
color: (_videoRunning && _streamingEnabled) ? "red" : "gray"
color: (_decodingVideo && _streamingEnabled) ? "red" : "gray"
SequentialAnimation on opacity {
running: _recordingVideo
loops: Animation.Infinite
@ -157,14 +157,14 @@ Item { @@ -157,14 +157,14 @@ Item {
}
MouseArea {
anchors.fill: parent
enabled: _videoRunning && _streamingEnabled
enabled: _decodingVideo && _streamingEnabled
onClicked: {
if (_recordingVideo) {
_videoReceiver.stopRecording()
QGroundControl.videoManager.stopRecording()
// reset blinking animation
recordBtnBackground.opacity = 1
} else {
_videoReceiver.startRecording(videoFileName.text)
QGroundControl.videoManager.startRecording(videoFileName.text)
}
}
}

63
src/VideoStreaming/SubtitleWriter.cc

@ -15,9 +15,6 @@ @@ -15,9 +15,6 @@
*/
#include "SubtitleWriter.h"
#include "SettingsManager.h"
#include "VideoReceiver.h"
#include "VideoManager.h"
#include "QGCApplication.h"
#include "QGCCorePlugin.h"
#include <QDateTime>
@ -31,48 +28,11 @@ const int SubtitleWriter::_sampleRate = 1; // Sample rate in Hz for getting tele @@ -31,48 +28,11 @@ const int SubtitleWriter::_sampleRate = 1; // Sample rate in Hz for getting tele
SubtitleWriter::SubtitleWriter(QObject* parent)
: QObject(parent)
{
connect(&_timer, &QTimer::timeout, this, &SubtitleWriter::_captureTelemetry);
}
void SubtitleWriter::setVideoReceiver(VideoReceiver* videoReceiver)
void SubtitleWriter::startCapturingTelemetry(const QString& videoFile)
{
if(!videoReceiver) {
qCWarning(SubtitleWriterLog) << "Invalid VideoReceiver pointer! Aborting subtitle capture!";
return;
}
_videoReceiver = videoReceiver;
#if defined(QGC_GST_STREAMING)
// Only start writing subtitles once the recording pipeline actually starts
connect(_videoReceiver, &VideoReceiver::gotFirstRecordingKeyFrame, this, &SubtitleWriter::_startCapturingTelemetry);
// Captures recordingChanged() signals to stop writing subtitles
connect(_videoReceiver, &VideoReceiver::recordingChanged, this, &SubtitleWriter::_onVideoRecordingChanged);
#endif
// Timer for telemetry capture and writing to file
connect(&_timer, &QTimer::timeout, this, &SubtitleWriter::_captureTelemetry);
}
void SubtitleWriter::_onVideoRecordingChanged()
{
#if defined(QGC_GST_STREAMING)
// Stop capturing data if recording stopped
if(!_videoReceiver->recording()) {
qCDebug(SubtitleWriterLog) << "Stopping writing";
_timer.stop();
_file.close();
}
#endif
}
void SubtitleWriter::_startCapturingTelemetry()
{
if(!_videoReceiver) {
qCWarning(SubtitleWriterLog) << "Invalid VideoReceiver pointer! Aborting subtitle capture!";
_timer.stop();
return;
}
// Get the facts displayed in the values widget and capture them, removing the "Vehicle." prefix.
QSettings settings;
settings.beginGroup("ValuesWidget");
@ -81,8 +41,8 @@ void SubtitleWriter::_startCapturingTelemetry() @@ -81,8 +41,8 @@ void SubtitleWriter::_startCapturingTelemetry()
_startTime = QDateTime::currentDateTime();
QFileInfo videoFile(_videoReceiver->videoFile());
QString subtitleFilePath = QStringLiteral("%1/%2.ass").arg(videoFile.path(), videoFile.completeBaseName());
QFileInfo videoFileInfo(videoFile);
QString subtitleFilePath = QStringLiteral("%1/%2.ass").arg(videoFileInfo.path(), videoFileInfo.completeBaseName());
qCDebug(SubtitleWriterLog) << "Writing overlay to file:" << subtitleFilePath;
_file.setFileName(subtitleFilePath);
@ -118,14 +78,17 @@ void SubtitleWriter::_startCapturingTelemetry() @@ -118,14 +78,17 @@ void SubtitleWriter::_startCapturingTelemetry()
_timer.start(1000/_sampleRate);
}
void SubtitleWriter::_captureTelemetry()
void SubtitleWriter::stopCapturingTelemetry()
{
if(!_videoReceiver) {
qCWarning(SubtitleWriterLog) << "Invalid VideoReceiver pointer! Aborting subtitle capture!";
_timer.stop();
return;
}
#if defined(QGC_GST_STREAMING)
qCDebug(SubtitleWriterLog) << "Stopping writing";
_timer.stop();
_file.close();
#endif
}
void SubtitleWriter::_captureTelemetry()
{
static const float nRows = 3; // number of rows used for displaying data
static const int offsetFactor = 700; // Used to simulate a larger resolution and reduce the borders in the layout

13
src/VideoStreaming/SubtitleWriter.h

@ -17,7 +17,6 @@ @@ -17,7 +17,6 @@
#pragma once
#include "QGCLoggingCategory.h"
#include "VideoReceiver.h"
#include <QObject>
#include <QTimer>
#include <QDateTime>
@ -33,25 +32,19 @@ public: @@ -33,25 +32,19 @@ public:
explicit SubtitleWriter(QObject* parent = nullptr);
~SubtitleWriter() = default;
void setVideoReceiver(VideoReceiver* videoReceiver);
// starts capturing vehicle telemetry.
void startCapturingTelemetry(const QString& videoFile);
void stopCapturingTelemetry();
private slots:
// Fires with every "videoRecordingChanged() signal, stops capturing telemetry if video stopped."
void _onVideoRecordingChanged();
// Captures a snapshot of telemetry data from vehicle into the subtitles file.
void _captureTelemetry();
// starts capturing vehicle telemetry.
void _startCapturingTelemetry();
private:
QTimer _timer;
QStringList _values;
QDateTime _startTime;
QFile _file;
VideoReceiver* _videoReceiver;
static const int _sampleRate;
};

331
src/VideoStreaming/VideoManager.cc

@ -30,6 +30,12 @@ @@ -30,6 +30,12 @@
QGC_LOGGING_CATEGORY(VideoManagerLog, "VideoManagerLog")
static const char* kFileExtension[VideoReceiver::FILE_FORMAT_MAX - VideoReceiver::FILE_FORMAT_MIN] = {
"mkv",
"mov",
"mp4"
};
//-----------------------------------------------------------------------------
VideoManager::VideoManager(QGCApplication* app, QGCToolbox* toolbox)
: QGCTool(app, toolbox)
@ -43,6 +49,17 @@ VideoManager::~VideoManager() @@ -43,6 +49,17 @@ VideoManager::~VideoManager()
_videoReceiver = nullptr;
delete _thermalVideoReceiver;
_thermalVideoReceiver = nullptr;
#if defined(QGC_GST_STREAMING)
if (_thermalVideoSink != nullptr) {
gst_object_unref(_thermalVideoSink);
_thermalVideoSink = nullptr;
}
if (_videoSink != nullptr) {
gst_object_unref(_videoSink);
_videoSink = nullptr;
}
#endif
}
//-----------------------------------------------------------------------------
@ -74,60 +91,22 @@ VideoManager::setToolbox(QGCToolbox *toolbox) @@ -74,60 +91,22 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
emit isGStreamerChanged();
qCDebug(VideoManagerLog) << "New Video Source:" << videoSource;
_videoReceiver = toolbox->corePlugin()->createVideoReceiver(this);
_videoReceiver->setUnittestMode(qgcApp()->runningUnitTests());
_thermalVideoReceiver = toolbox->corePlugin()->createVideoReceiver(this);
_thermalVideoReceiver->setUnittestMode(qgcApp()->runningUnitTests());
_videoReceiver->moveToThread(qgcApp()->thread());
_thermalVideoReceiver->moveToThread(qgcApp()->thread());
// Those connects are temporary: In a perfect world those connections are going to be done on the Qml
// but because currently the videoReceiver is created in the C++ world, this is easier.
// The fact returning a QVariant is a quite annoying to use proper signal / slot connection.
_updateSettings();
auto appSettings = toolbox->settingsManager()->appSettings();
for (auto *videoReceiver : { _videoReceiver, _thermalVideoReceiver}) {
// First, Setup the current values from the settings.
videoReceiver->setRtspTimeout(_videoSettings->rtspTimeout()->rawValue().toInt());
videoReceiver->setStreamEnabled(_videoSettings->streamEnabled()->rawValue().toBool());
videoReceiver->setRecordingFormatId(_videoSettings->recordingFormat()->rawValue().toInt());
videoReceiver->setStreamConfigured(_videoSettings->streamConfigured());
connect(_videoSettings->rtspTimeout(), &Fact::rawValueChanged,
videoReceiver, [videoReceiver](const QVariant &value) {
videoReceiver->setRtspTimeout(value.toInt());
}
);
connect(_videoSettings->streamEnabled(), &Fact::rawValueChanged,
videoReceiver, [videoReceiver](const QVariant &value) {
videoReceiver->setStreamEnabled(value.toBool());
}
);
connect(_videoSettings->recordingFormat(), &Fact::rawValueChanged,
videoReceiver, [videoReceiver](const QVariant &value) {
videoReceiver->setRecordingFormatId(value.toInt());
}
);
// Why some options are facts while others aren't?
connect(_videoSettings, &VideoSettings::streamConfiguredChanged, videoReceiver, &VideoReceiver::setStreamConfigured);
connect(_videoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
connect(_videoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
connect(_videoReceiver, &VideoReceiver::recordingStarted, this, &VideoManager::_recordingStarted);
connect(_videoReceiver, &VideoReceiver::recordingChanged, this, &VideoManager::_recordingChanged);
connect(_videoReceiver, &VideoReceiver::screenshotComplete, this, &VideoManager::_screenshotComplete);
// Fix those.
// connect(appSettings, &Fact::rawValueChanged, videoReceiver, &VideoReceiver::setVideoPath);
// connect(appSettings->videoSavePath(), &Fact::rawValueChanged, videoReceiver, &VideoReceiver::setImagePath);
// Connect the video receiver with the rest of the app.
connect(videoReceiver, &VideoReceiver::restartTimeout, this, &VideoManager::restartVideo);
connect(videoReceiver, &VideoReceiver::sendMessage, qgcApp(), &QGCApplication::showMessage);
connect(videoReceiver, &VideoReceiver::beforeRecording, this, &VideoManager::cleanupOldVideos);
}
// FIXME: AV: I believe _thermalVideoReceiver should be handled just like _videoReceiver in terms of event
// and I expect that it will be changed during multiple video stream activity
connect(_thermalVideoReceiver, &VideoReceiver::timeout, this, &VideoManager::_restartVideo);
connect(_thermalVideoReceiver, &VideoReceiver::streamingChanged, this, &VideoManager::_streamingChanged);
_updateSettings();
if(isGStreamer()) {
startVideo();
_subtitleWriter.setVideoReceiver(_videoReceiver);
} else {
stopVideo();
}
@ -135,17 +114,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox) @@ -135,17 +114,7 @@ VideoManager::setToolbox(QGCToolbox *toolbox)
#endif
}
QStringList VideoManager::videoMuxes()
{
return {"matroskamux", "qtmux", "mp4mux"};
}
QStringList VideoManager::videoExtensions()
{
return {"mkv", "mov", "mp4"};
}
void VideoManager::cleanupOldVideos()
void VideoManager::_cleanupOldVideos()
{
#if defined(QGC_GST_STREAMING)
//-- Only perform cleanup if storage limit is enabled
@ -158,9 +127,11 @@ void VideoManager::cleanupOldVideos() @@ -158,9 +127,11 @@ void VideoManager::cleanupOldVideos()
videoDir.setSorting(QDir::Time);
QStringList nameFilters;
for(const QString& extension : videoExtensions()) {
nameFilters << QString("*.") + extension;
for(size_t i = 0; i < sizeof(kFileExtension) / sizeof(kFileExtension[0]); i += 1) {
nameFilters << QString("*.") + kFileExtension[i];
}
videoDir.setNameFilters(nameFilters);
//-- get the list of videos stored
QFileInfoList vidList = videoDir.entryInfoList();
@ -188,18 +159,114 @@ void VideoManager::cleanupOldVideos() @@ -188,18 +159,114 @@ void VideoManager::cleanupOldVideos()
void
VideoManager::startVideo()
{
if(_videoReceiver) _videoReceiver->start();
if(_thermalVideoReceiver) _thermalVideoReceiver->start();
if (qgcApp()->runningUnitTests()) {
return;
}
if(!_videoSettings->streamEnabled()->rawValue().toBool() || !_videoSettings->streamConfigured()) {
qCDebug(VideoReceiverLog) << "Stream not enabled/configured";
return;
}
#if defined(QGC_GST_STREAMING)
const unsigned timeout = _videoSettings->rtspTimeout()->rawValue().toUInt();
if(_videoReceiver != nullptr) {
_videoReceiver->start(_videoUri, timeout);
if (_videoSink != nullptr) {
_videoReceiver->startDecoding(_videoSink);
}
}
if(_thermalVideoReceiver != nullptr) {
_thermalVideoReceiver->start(_thermalVideoUri, timeout);
if (_thermalVideoSink != nullptr) {
_thermalVideoReceiver->startDecoding(_thermalVideoSink);
}
}
#endif
}
//-----------------------------------------------------------------------------
void
VideoManager::stopVideo()
{
if (qgcApp()->runningUnitTests()) {
return;
}
if(_videoReceiver) _videoReceiver->stop();
if(_thermalVideoReceiver) _thermalVideoReceiver->stop();
}
void
VideoManager::startRecording(const QString& videoFile)
{
if (qgcApp()->runningUnitTests()) {
return;
}
if (!_videoReceiver) {
qgcApp()->showMessage(tr("Video receiver is not ready."));
return;
}
const VideoReceiver::FILE_FORMAT fileFormat = static_cast<VideoReceiver::FILE_FORMAT>(_videoSettings->recordingFormat()->rawValue().toInt());
if(fileFormat < VideoReceiver::FILE_FORMAT_MIN || fileFormat >= VideoReceiver::FILE_FORMAT_MAX) {
qgcApp()->showMessage(tr("Invalid video format defined."));
return;
}
//-- Disk usage maintenance
_cleanupOldVideos();
QString savePath = qgcApp()->toolbox()->settingsManager()->appSettings()->videoSavePath();
if(savePath.isEmpty()) {
qgcApp()->showMessage(tr("Unabled to record video. Video save path must be specified in Settings."));
return;
}
_videoFile = savePath + "/"
+ (videoFile.isEmpty() ? QDateTime::currentDateTime().toString("yyyy-MM-dd_hh.mm.ss") : videoFile)
+ "." + kFileExtension[fileFormat - VideoReceiver::FILE_FORMAT_MIN];
_videoReceiver->startRecording(_videoFile, fileFormat);
}
void
VideoManager::stopRecording()
{
if (qgcApp()->runningUnitTests()) {
return;
}
if (!_videoReceiver) {
return;
}
_videoReceiver->stopRecording();
}
void
VideoManager::grabImage(const QString& imageFile)
{
if (qgcApp()->runningUnitTests()) {
return;
}
if (!_videoReceiver) {
return;
}
_imageFile = imageFile;
emit imageFileChanged();
_videoReceiver->takeScreenshot(_imageFile);
}
//-----------------------------------------------------------------------------
double VideoManager::aspectRatio()
{
@ -210,6 +277,7 @@ double VideoManager::aspectRatio() @@ -210,6 +277,7 @@ double VideoManager::aspectRatio()
return pInfo->aspectRatio();
}
}
// FIXME: AV: use _videoReceiver->videoSize() to calculate AR (if AR is not specified in the settings?)
return _videoSettings->aspectRatio()->rawValue().toDouble();
}
@ -264,6 +332,13 @@ VideoManager::hasThermal() @@ -264,6 +332,13 @@ VideoManager::hasThermal()
}
//-----------------------------------------------------------------------------
QString
VideoManager::imageFile()
{
return _imageFile;
}
//-----------------------------------------------------------------------------
bool
VideoManager::autoStreamConfigured()
{
@ -304,28 +379,28 @@ VideoManager::_videoSourceChanged() @@ -304,28 +379,28 @@ VideoManager::_videoSourceChanged()
emit hasVideoChanged();
emit isGStreamerChanged();
emit isAutoStreamChanged();
restartVideo();
_restartVideo();
}
//-----------------------------------------------------------------------------
void
VideoManager::_udpPortChanged()
{
restartVideo();
_restartVideo();
}
//-----------------------------------------------------------------------------
void
VideoManager::_rtspUrlChanged()
{
restartVideo();
_restartVideo();
}
//-----------------------------------------------------------------------------
void
VideoManager::_tcpUrlChanged()
{
restartVideo();
_restartVideo();
}
//-----------------------------------------------------------------------------
@ -390,7 +465,7 @@ VideoManager::_makeVideoSink(gpointer widget) @@ -390,7 +465,7 @@ VideoManager::_makeVideoSink(gpointer widget)
if ((sink = gst_element_factory_make("qgcvideosinkbin", nullptr)) != nullptr) {
g_object_set(sink, "widget", widget, NULL);
} else {
qCritical() << "VideoManager::_makeVideoSink() failed. Error with gst_element_factory_make('qgcvideosinkbin')";
qCritical() << "gst_element_factory_make('qgcvideosinkbin') failed";
}
return sink;
@ -405,24 +480,32 @@ VideoManager::_initVideo() @@ -405,24 +480,32 @@ VideoManager::_initVideo()
QQuickItem* root = qgcApp()->mainRootWindow();
if (root == nullptr) {
qCDebug(VideoManagerLog) << "VideoManager::_makeVideoSink() failed. No root window";
qCDebug(VideoManagerLog) << "mainRootWindow() failed. No root window";
return;
}
QQuickItem* widget = root->findChild<QQuickItem*>("videoContent");
if (widget != nullptr) {
_videoReceiver->setVideoSink(_makeVideoSink(widget));
if (widget != nullptr && _videoReceiver != nullptr) {
if ((_videoSink = _makeVideoSink(widget)) != nullptr) {
_videoReceiver->startDecoding(_videoSink);
} else {
qCDebug(VideoManagerLog) << "_makeVideoSink() failed";
}
} else {
qCDebug(VideoManagerLog) << "VideoManager::_makeVideoSink() failed. 'videoContent' widget not found";
qCDebug(VideoManagerLog) << "video receiver disabled";
}
widget = root->findChild<QQuickItem*>("thermalVideo");
if (widget != nullptr) {
_thermalVideoReceiver->setVideoSink(_makeVideoSink(widget));
if (widget != nullptr && _thermalVideoReceiver != nullptr) {
if ((_thermalVideoSink = _makeVideoSink(widget)) != nullptr) {
_thermalVideoReceiver->startDecoding(_thermalVideoSink);
} else {
qCDebug(VideoManagerLog) << "_makeVideoSink() failed";
}
} else {
qCDebug(VideoManagerLog) << "VideoManager::_makeVideoSink() failed. 'thermalVideo' widget not found";
qCDebug(VideoManagerLog) << "thermal video receiver disabled";
}
#endif
}
@ -440,23 +523,23 @@ VideoManager::_updateSettings() @@ -440,23 +523,23 @@ VideoManager::_updateSettings()
qCDebug(VideoManagerLog) << "Configure primary stream: " << pInfo->uri();
switch(pInfo->type()) {
case VIDEO_STREAM_TYPE_RTSP:
_videoReceiver->setUri(pInfo->uri());
_setVideoUri(pInfo->uri());
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceRTSP);
break;
case VIDEO_STREAM_TYPE_TCP_MPEG:
_videoReceiver->setUri(pInfo->uri());
_setVideoUri(pInfo->uri());
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceTCP);
break;
case VIDEO_STREAM_TYPE_RTPUDP:
_videoReceiver->setUri(QStringLiteral("udp://0.0.0.0:%1").arg(pInfo->uri()));
_setVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pInfo->uri()));
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceUDPH264);
break;
case VIDEO_STREAM_TYPE_MPEG_TS_H264:
_videoReceiver->setUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pInfo->uri()));
_setVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pInfo->uri()));
_toolbox->settingsManager()->videoSettings()->videoSource()->setRawValue(VideoSettings::videoSourceMPEGTS);
break;
default:
_videoReceiver->setUri(pInfo->uri());
_setVideoUri(pInfo->uri());
break;
}
//-- Thermal stream (if any)
@ -466,16 +549,16 @@ VideoManager::_updateSettings() @@ -466,16 +549,16 @@ VideoManager::_updateSettings()
switch(pTinfo->type()) {
case VIDEO_STREAM_TYPE_RTSP:
case VIDEO_STREAM_TYPE_TCP_MPEG:
_thermalVideoReceiver->setUri(pTinfo->uri());
_setThermalVideoUri(pTinfo->uri());
break;
case VIDEO_STREAM_TYPE_RTPUDP:
_thermalVideoReceiver->setUri(QStringLiteral("udp://0.0.0.0:%1").arg(pTinfo->uri()));
_setThermalVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(pTinfo->uri()));
break;
case VIDEO_STREAM_TYPE_MPEG_TS_H264:
_thermalVideoReceiver->setUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pTinfo->uri()));
_setThermalVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(pTinfo->uri()));
break;
default:
_thermalVideoReceiver->setUri(pTinfo->uri());
_setThermalVideoUri(pTinfo->uri());
break;
}
}
@ -484,20 +567,62 @@ VideoManager::_updateSettings() @@ -484,20 +567,62 @@ VideoManager::_updateSettings()
}
QString source = _videoSettings->videoSource()->rawValue().toString();
if (source == VideoSettings::videoSourceUDPH264)
_videoReceiver->setUri(QStringLiteral("udp://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
_setVideoUri(QStringLiteral("udp://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceUDPH265)
_videoReceiver->setUri(QStringLiteral("udp265://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
_setVideoUri(QStringLiteral("udp265://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceMPEGTS)
_videoReceiver->setUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
_setVideoUri(QStringLiteral("mpegts://0.0.0.0:%1").arg(_videoSettings->udpPort()->rawValue().toInt()));
else if (source == VideoSettings::videoSourceRTSP)
_videoReceiver->setUri(_videoSettings->rtspUrl()->rawValue().toString());
_setVideoUri(_videoSettings->rtspUrl()->rawValue().toString());
else if (source == VideoSettings::videoSourceTCP)
_videoReceiver->setUri(QStringLiteral("tcp://%1").arg(_videoSettings->tcpUrl()->rawValue().toString()));
_setVideoUri(QStringLiteral("tcp://%1").arg(_videoSettings->tcpUrl()->rawValue().toString()));
}
void
VideoManager::_setVideoUri(const QString& uri)
{
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
//-- Taisync on iOS or Android sends a raw h.264 stream
if (isTaisync()) {
_videoUri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
return;
}
#endif
_videoUri = uri;
}
void
VideoManager::_setThermalVideoUri(const QString& uri)
{
#if defined(QGC_GST_TAISYNC_ENABLED) && (defined(__android__) || defined(__ios__))
//-- Taisync on iOS or Android sends a raw h.264 stream
if (isTaisync()) {
// FIXME: AV: TAISYNC_VIDEO_UDP_PORT is used by video stream, thermal stream should go via its own proxy
_thermalVideoUri = QString("tsusb://0.0.0.0:%1").arg(TAISYNC_VIDEO_UDP_PORT);
return;
}
#endif
_thermalVideoUri = uri;
}
//-----------------------------------------------------------------------------
void
VideoManager::_streamingChanged()
{
#if defined(QGC_GST_STREAMING)
// FIXME: AV: we need VideoReceiver::running() to avoid restarting if one of streams is not active
// but since VideoManager is going to be relpaced by Video Model during multiple video streaming development activity
// I'll leave it as is for week or two
if ((_videoReceiver && !_videoReceiver->streaming())
/*|| (_thermalVideoReceiver && !_thermalVideoReceiver->streaming())*/) {
_restartVideo();
}
#endif
}
//-----------------------------------------------------------------------------
void
VideoManager::restartVideo()
VideoManager::_restartVideo()
{
#if defined(QGC_GST_STREAMING)
qCDebug(VideoManagerLog) << "Restart video streaming";
@ -508,6 +633,28 @@ VideoManager::restartVideo() @@ -508,6 +633,28 @@ VideoManager::restartVideo()
#endif
}
//-----------------------------------------------------------------------------
void
VideoManager::_recordingStarted()
{
_subtitleWriter.startCapturingTelemetry(_videoFile);
}
//-----------------------------------------------------------------------------
void
VideoManager::_recordingChanged()
{
if (_videoReceiver && !_videoReceiver->recording()) {
_subtitleWriter.stopCapturingTelemetry();
}
}
//----------------------------------------------------------------------------------------
void
VideoManager::_screenshotComplete()
{
}
//----------------------------------------------------------------------------------------
void
VideoManager::_setActiveVehicle(Vehicle* vehicle)
@ -519,14 +666,14 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle) @@ -519,14 +666,14 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle)
if(pCamera) {
pCamera->stopStream();
}
disconnect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::restartVideo);
disconnect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_restartVideo);
}
}
_activeVehicle = vehicle;
if(_activeVehicle) {
connect(_activeVehicle, &Vehicle::connectionLostChanged, this, &VideoManager::_connectionLostChanged);
if(_activeVehicle->dynamicCameras()) {
connect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::restartVideo);
connect(_activeVehicle->dynamicCameras(), &QGCCameraManager::streamChanged, this, &VideoManager::_restartVideo);
QGCCameraControl* pCamera = _activeVehicle->dynamicCameras()->currentCameraInstance();
if(pCamera) {
pCamera->resumeStream();
@ -537,7 +684,7 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle) @@ -537,7 +684,7 @@ VideoManager::_setActiveVehicle(Vehicle* vehicle)
setfullScreen(false);
}
emit autoStreamConfiguredChanged();
restartVideo();
_restartVideo();
}
//----------------------------------------------------------------------------------------

29
src/VideoStreaming/VideoManager.h

@ -50,6 +50,7 @@ public: @@ -50,6 +50,7 @@ public:
Q_PROPERTY(double thermalHfov READ thermalHfov NOTIFY aspectRatioChanged)
Q_PROPERTY(bool autoStreamConfigured READ autoStreamConfigured NOTIFY autoStreamConfiguredChanged)
Q_PROPERTY(bool hasThermal READ hasThermal NOTIFY aspectRatioChanged)
Q_PROPERTY(QString imageFile READ imageFile NOTIFY imageFileChanged)
virtual bool hasVideo ();
virtual bool isGStreamer ();
@ -62,14 +63,12 @@ public: @@ -62,14 +63,12 @@ public:
virtual double thermalHfov ();
virtual bool autoStreamConfigured();
virtual bool hasThermal ();
virtual void restartVideo ();
virtual QString imageFile ();
virtual VideoReceiver* videoReceiver () { return _videoReceiver; }
virtual VideoReceiver* thermalVideoReceiver () { return _thermalVideoReceiver; }
QStringList videoExtensions();
QStringList videoMuxes();
#if defined(QGC_DISABLE_UVC)
virtual bool uvcEnabled () { return false; }
#else
@ -85,7 +84,10 @@ public: @@ -85,7 +84,10 @@ public:
Q_INVOKABLE void startVideo ();
Q_INVOKABLE void stopVideo ();
void cleanupOldVideos();
Q_INVOKABLE void startRecording (const QString& videoFile = QString());
Q_INVOKABLE void stopRecording ();
Q_INVOKABLE void grabImage(const QString& imageFile);
signals:
void hasVideoChanged ();
@ -96,6 +98,7 @@ signals: @@ -96,6 +98,7 @@ signals:
void isTaisyncChanged ();
void aspectRatioChanged ();
void autoStreamConfiguredChanged();
void imageFileChanged ();
protected slots:
void _videoSourceChanged ();
@ -115,13 +118,29 @@ protected: @@ -115,13 +118,29 @@ protected:
#endif
void _initVideo ();
void _updateSettings ();
void _setVideoUri (const QString& uri);
void _setThermalVideoUri (const QString& uri);
void _cleanupOldVideos ();
void _restartVideo ();
void _streamingChanged ();
void _recordingStarted ();
void _recordingChanged ();
void _screenshotComplete ();
protected:
QString _videoFile;
QString _imageFile;
SubtitleWriter _subtitleWriter;
bool _isTaisync = false;
VideoReceiver* _videoReceiver = nullptr;
VideoReceiver* _thermalVideoReceiver = nullptr;
#if defined(QGC_GST_STREAMING)
GstElement* _videoSink = nullptr;
GstElement* _thermalVideoSink = nullptr;
#endif
VideoSettings* _videoSettings = nullptr;
QString _videoUri;
QString _thermalVideoUri;
QString _videoSourceID;
bool _fullScreen = false;
Vehicle* _activeVehicle = nullptr;

1953
src/VideoStreaming/VideoReceiver.cc

File diff suppressed because it is too large Load Diff

274
src/VideoStreaming/VideoReceiver.h

@ -17,183 +17,159 @@ @@ -17,183 +17,159 @@
#include "QGCLoggingCategory.h"
#include <QObject>
#include <QSize>
#include <QTimer>
#include <QTcpSocket>
#include <QThread>
#include <QWaitCondition>
#include <QMutex>
#include <QQueue>
#if defined(QGC_GST_STREAMING)
#include <gst/gst.h>
typedef GstElement VideoSink;
#else
typedef void VideoSink;
#endif
Q_DECLARE_LOGGING_CATEGORY(VideoReceiverLog)
class VideoSettings;
class VideoReceiver : public QObject
class VideoReceiver : public QThread
{
Q_OBJECT
public:
#if defined(QGC_GST_STREAMING)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
#endif
Q_PROPERTY(bool videoRunning READ videoRunning NOTIFY videoRunningChanged)
Q_PROPERTY(QString imageFile READ imageFile NOTIFY imageFileChanged)
Q_PROPERTY(QString videoFile READ videoFile NOTIFY videoFileChanged)
Q_PROPERTY(QString imagePath READ imagePath NOTIFY imagePathChanged)
Q_PROPERTY(QString videoPath READ videoPath NOTIFY videoPathChanged)
Q_PROPERTY(bool showFullScreen READ showFullScreen WRITE setShowFullScreen NOTIFY showFullScreenChanged)
Q_PROPERTY(bool streamEnabled READ streamEnabled WRITE setStreamEnabled NOTIFY streamEnabledChanged)
Q_PROPERTY(bool streamConfigured READ streamConfigured WRITE setStreamConfigured NOTIFY streamConfiguredChanged)
Q_PROPERTY(bool isTaisync READ isTaisync WRITE setIsTaysinc NOTIFY isTaisyncChanged)
Q_PROPERTY(int recordingFormatId READ recordingFormatId WRITE setRecordingFormatId NOTIFY recordingFormatIdChanged)
Q_PROPERTY(int rtspTimeout READ rtspTimeout WRITE setRtspTimeout NOTIFY rtspTimeoutChanged)
public:
explicit VideoReceiver(QObject* parent = nullptr);
~VideoReceiver();
bool streamEnabled() const;
Q_SLOT void setStreamEnabled(bool enabled);
Q_SIGNAL void streamEnabledChanged();
bool streamConfigured() const;
Q_SLOT void setStreamConfigured(bool enabled);
Q_SIGNAL void streamConfiguredChanged();
bool isTaisync() const;
Q_SLOT void setIsTaysinc(bool value);
Q_SIGNAL void isTaisyncChanged();
QString videoPath() const;
Q_SLOT void setVideoPath(const QString& path);
Q_SIGNAL void videoPathChanged();
QString imagePath() const;
Q_SLOT void setImagePath(const QString& path);
Q_SIGNAL void imagePathChanged();
int recordingFormatId() const;
Q_SLOT void setRecordingFormatId(int value);
Q_SIGNAL void recordingFormatIdChanged();
int rtspTimeout() const;
Q_SLOT void setRtspTimeout(int value);
Q_SIGNAL void rtspTimeoutChanged();
Q_SIGNAL void restartTimeout();
Q_SIGNAL void sendMessage(const QString& message);
// Emitted before recording starts.
Q_SIGNAL void beforeRecording();
void setUnittestMode(bool runUnitTests);
#if defined(QGC_GST_STREAMING)
virtual bool recording () { return _recording; }
#endif
virtual bool videoRunning () { return _videoRunning; }
virtual QString imageFile () { return _imageFile; }
virtual QString videoFile () { return _videoFile; }
virtual bool showFullScreen () { return _showFullScreen; }
virtual void grabImage (QString imageFile);
virtual void setShowFullScreen (bool show) { _showFullScreen = show; emit showFullScreenChanged(); }
#if defined(QGC_GST_STREAMING)
void setVideoSink (GstElement* videoSink);
#endif
~VideoReceiver(void);
typedef enum {
FILE_FORMAT_MIN = 0,
FILE_FORMAT_MKV = FILE_FORMAT_MIN,
FILE_FORMAT_MOV,
FILE_FORMAT_MP4,
FILE_FORMAT_MAX
} FILE_FORMAT;
Q_PROPERTY(bool streaming READ streaming NOTIFY streamingChanged)
Q_PROPERTY(bool decoding READ decoding NOTIFY decodingChanged)
Q_PROPERTY(bool recording READ recording NOTIFY recordingChanged)
Q_PROPERTY(QSize videoSize READ videoSize NOTIFY videoSizeChanged)
bool streaming(void) {
return _streaming;
}
bool decoding(void) {
return _decoding;
}
bool recording(void) {
return _recording;
}
QSize videoSize(void) {
const quint32 size = _videoSize;
return QSize((size >> 16) & 0xFFFF, size & 0xFFFF);
}
signals:
void videoRunningChanged ();
void imageFileChanged ();
void videoFileChanged ();
void showFullScreenChanged ();
#if defined(QGC_GST_STREAMING)
void recordingChanged ();
void msgErrorReceived ();
void msgEOSReceived ();
void msgStateChangedReceived ();
void gotFirstRecordingKeyFrame ();
#endif
void timeout(void);
void streamingChanged(void);
void decodingChanged(void);
void recordingChanged(void);
void recordingStarted(void);
void videoSizeChanged(void);
void screenshotComplete(void);
public slots:
virtual void start ();
virtual void stop ();
virtual void setUri (const QString& uri);
virtual void stopRecording ();
virtual void startRecording (const QString& videoFile = QString());
virtual void start(const QString& uri, unsigned timeout);
virtual void stop(void);
virtual void startDecoding(VideoSink* videoSink);
virtual void stopDecoding(void);
virtual void startRecording(const QString& videoFile, FILE_FORMAT format);
virtual void stopRecording(void);
virtual void takeScreenshot(const QString& imageFile);
protected slots:
virtual void _updateTimer ();
#if defined(QGC_GST_STREAMING)
GstElement* _makeSource (const QString& uri);
GstElement* _makeFileSink (const QString& videoFile, unsigned format);
virtual void _handleError ();
virtual void _handleEOS ();
virtual void _handleStateChanged ();
#endif
protected slots:
virtual void _watchdog(void);
virtual void _handleEOS(void);
protected:
#if defined(QGC_GST_STREAMING)
typedef struct
{
GstPad* teepad;
GstElement* queue;
GstElement* filesink;
gboolean removing;
} Sink;
bool _running;
bool _recording;
bool _streaming;
bool _starting;
bool _stopping;
bool _stop;
Sink* _sink;
void _setVideoSize(const QSize& size) {
_videoSize = ((quint32)size.width() << 16) | (quint32)size.height();
emit videoSizeChanged();
}
virtual GstElement* _makeSource(const QString& uri);
virtual GstElement* _makeDecoder(GstCaps* caps, GstElement* videoSink);
virtual GstElement* _makeFileSink(const QString& videoFile, FILE_FORMAT format);
virtual void _onNewSourcePad(GstPad* pad);
virtual void _onNewDecoderPad(GstPad* pad);
virtual bool _addDecoder(GstElement* src);
virtual bool _addVideoSink(GstPad* pad);
virtual void _noteTeeFrame(void);
virtual void _noteVideoSinkFrame(void);
virtual void _noteEndOfStream(void);
virtual void _unlinkBranch(GstElement* from);
virtual void _shutdownDecodingBranch (void);
virtual void _shutdownRecordingBranch(void);
typedef std::function<void(void)> Task;
bool _isOurThread(void);
void _post(Task t);
void run(void);
private:
static gboolean _onBusMessage(GstBus* bus, GstMessage* message, gpointer user_data);
static void _onNewPad(GstElement* element, GstPad* pad, gpointer data);
static void _wrapWithGhostPad(GstElement* element, GstPad* pad, gpointer data);
static void _linkPadWithOptionalBuffer(GstElement* element, GstPad* pad, gpointer data);
static gboolean _padProbe(GstElement* element, GstPad* pad, gpointer user_data);
static gboolean _autoplugQueryCaps(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data);
static gboolean _autoplugQueryContext(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data);
static gboolean _autoplugQuery(GstElement* bin, GstPad* pad, GstElement* element, GstQuery* query, gpointer data);
static GstPadProbeReturn _teeProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
static GstPadProbeReturn _videoSinkProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
static GstPadProbeReturn _eosProbe(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
static GstPadProbeReturn _keyframeWatch(GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
bool _removingDecoder;
bool _removingRecorder;
GstElement* _source;
GstElement* _tee;
GstElement* _decoderValve;
GstElement* _recorderValve;
GstElement* _decoder;
GstElement* _videoSink;
GstElement* _fileSink;
GstElement* _pipeline;
void _noteVideoSinkFrame ();
qint64 _lastSourceFrameTime;
qint64 _lastVideoFrameTime;
bool _resetVideoSink;
gulong _videoSinkProbeId;
static gboolean _onBusMessage (GstBus* bus, GstMessage* message, gpointer user_data);
static GstPadProbeReturn _unlinkCallBack (GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
static GstPadProbeReturn _videoSinkProbe (GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
static GstPadProbeReturn _keyframeWatch (GstPad* pad, GstPadProbeInfo* info, gpointer user_data);
QTimer _watchdogTimer;
virtual void _unlinkRecordingBranch (GstPadProbeInfo* info);
virtual void _shutdownRecordingBranch();
virtual void _shutdownPipeline ();
//-- RTSP UDP reconnect timeout
uint64_t _udpReconnect_us;
GstElement* _pipeline;
GstElement* _videoSink;
guint64 _lastFrameId;
qint64 _lastFrameTime;
unsigned _timeout;
//-- Wait for Video Server to show up before starting
QTimer _frameTimer;
QTimer _restart_timer;
int _restart_time_ms;
QWaitCondition _taskQueueUpdate;
QMutex _taskQueueSync;
QQueue<Task> _taskQueue;
bool _shutdown;
//-- RTSP UDP reconnect timeout
uint64_t _udpReconnect_us;
static const char* _kFileMux[FILE_FORMAT_MAX - FILE_FORMAT_MIN];
#else
private:
#endif
QString _uri;
QString _imageFile;
QString _videoFile;
QString _videoPath;
QString _imagePath;
bool _videoRunning;
bool _showFullScreen;
bool _streamEnabled;
bool _streamConfigured;
bool _storageLimit;
bool _unittTestMode;
bool _isTaisync;
int _recordingFormatId; // 0 - 2, defined in VideoReceiver.cc / kVideoExtensions. TODO: use a better representation.
int _rtspTimeout;
std::atomic<bool> _streaming;
std::atomic<bool> _decoding;
std::atomic<bool> _recording;
std::atomic<quint32>_videoSize;
};

3
src/VideoStreaming/gstqgcvideosinkbin.c

@ -125,6 +125,9 @@ _vsb_init(GstQgcVideoSinkBin *vsb) @@ -125,6 +125,9 @@ _vsb_init(GstQgcVideoSinkBin *vsb)
break;
}
// FIXME: AV: temporally disable sync due to MPEG2-TS sync issues
g_object_set(vsb->qmlglsink, "sync", FALSE, NULL);
if ((glcolorconvert = gst_element_factory_make("glcolorconvert", NULL)) == NULL) {
GST_ERROR_OBJECT(vsb, "gst_element_factory_make('glcolorconvert' failed)");
break;

Loading…
Cancel
Save