diff --git a/qgcimages.qrc b/qgcimages.qrc
index 7ae70c7..e300b5f 100644
--- a/qgcimages.qrc
+++ b/qgcimages.qrc
@@ -185,6 +185,7 @@
resources/CogWheels.png
src/FlightMap/Images/sub.png
src/ui/toolbar/Images/TelemRSSI.svg
+ src/ui/toolbar/Images/TrackingIcon.svg
src/AutoPilotPlugins/Common/Images/TuningComponentIcon.png
src/FlightMap/Images/vehicleArrowOpaque.svg
src/FlightMap/Images/vehicleArrowOutline.svg
diff --git a/src/Camera/QGCCameraControl.cc b/src/Camera/QGCCameraControl.cc
index 8560142..a7b2757 100644
--- a/src/Camera/QGCCameraControl.cc
+++ b/src/Camera/QGCCameraControl.cc
@@ -187,6 +187,15 @@ QGCCameraControl::QGCCameraControl(const mavlink_camera_information_t *info, Veh
_recTimer.setSingleShot(false);
_recTimer.setInterval(333);
connect(&_recTimer, &QTimer::timeout, this, &QGCCameraControl::_recTimerHandler);
+ //-- Tracking
+ if(_info.flags & CAMERA_CAP_FLAGS_HAS_TRACKING_RECTANGLE) {
+ _trackingStatus = static_cast(_trackingStatus | TRACKING_RECTANGLE);
+ _trackingStatus = static_cast(_trackingStatus | TRACKING_SUPPORTED);
+ }
+ if(_info.flags & CAMERA_CAP_FLAGS_HAS_TRACKING_POINT) {
+ _trackingStatus = static_cast(_trackingStatus | TRACKING_POINT);
+ _trackingStatus = static_cast(_trackingStatus | TRACKING_SUPPORTED);
+ }
}
//-----------------------------------------------------------------------------
@@ -1614,6 +1623,45 @@ QGCCameraControl::handleVideoStatus(const mavlink_video_stream_status_t* vs)
//-----------------------------------------------------------------------------
void
+QGCCameraControl::handleTrackingImageStatus(const mavlink_camera_tracking_image_status_t *tis)
+{
+ if (tis) {
+ _trackingImageStatus = *tis;
+
+ if (_trackingImageStatus.tracking_status == 0 || !trackingEnabled()) {
+ _trackingImageRect = {};
+ qCDebug(CameraControlLog) << "Tracking off";
+ } else {
+ if (_trackingImageStatus.tracking_mode == 2) {
+ _trackingImageRect = QRectF(QPointF(_trackingImageStatus.rec_top_x, _trackingImageStatus.rec_top_y),
+ QPointF(_trackingImageStatus.rec_bottom_x, _trackingImageStatus.rec_bottom_y));
+ } else {
+ float r = _trackingImageStatus.radius;
+ if (qIsNaN(r) || r <= 0 ) {
+ r = 0.05f;
+ }
+ // Bottom is NAN so that we can draw perfect square using video aspect ratio
+ _trackingImageRect = QRectF(QPointF(_trackingImageStatus.point_x - r, _trackingImageStatus.point_y - r),
+ QPointF(_trackingImageStatus.point_x + r, NAN));
+ }
+ // get rectangle into [0..1] boundaries
+ _trackingImageRect.setLeft(std::min(std::max(_trackingImageRect.left(), 0.0), 1.0));
+ _trackingImageRect.setTop(std::min(std::max(_trackingImageRect.top(), 0.0), 1.0));
+ _trackingImageRect.setRight(std::min(std::max(_trackingImageRect.right(), 0.0), 1.0));
+ _trackingImageRect.setBottom(std::min(std::max(_trackingImageRect.bottom(), 0.0), 1.0));
+
+ qCDebug(CameraControlLog) << "Tracking Image Status [left:" << _trackingImageRect.left()
+ << "top:" << _trackingImageRect.top()
+ << "right:" << _trackingImageRect.right()
+ << "bottom:" << _trackingImageRect.bottom() << "]";
+ }
+
+ emit trackingImageStatusChanged();
+ }
+}
+
+//-----------------------------------------------------------------------------
+void
QGCCameraControl::setCurrentStream(int stream)
{
if(stream != _currentStream && stream >= 0 && stream < _streamLabels.count()) {
@@ -2275,3 +2323,93 @@ QGCVideoStreamInfo::update(const mavlink_video_stream_status_t* vs)
}
return changed;
}
+
+//-----------------------------------------------------------------------------
+void
+QGCCameraControl::setTrackingEnabled(bool set)
+{
+ if(set) {
+ _trackingStatus = static_cast(_trackingStatus | TRACKING_ENABLED);
+ } else {
+ _trackingStatus = static_cast(_trackingStatus & ~TRACKING_ENABLED);
+ }
+ emit trackingEnabledChanged();
+}
+
+//-----------------------------------------------------------------------------
+void
+QGCCameraControl::startTracking(QRectF rec)
+{
+ if(_trackingMarquee != rec) {
+ _trackingMarquee = rec;
+
+ qCDebug(CameraControlLog) << "Start Tracking (Rectangle: ["
+ << static_cast(rec.x()) << ", "
+ << static_cast(rec.y()) << "] - ["
+ << static_cast(rec.x() + rec.width()) << ", "
+ << static_cast(rec.y() + rec.height()) << "]";
+
+ _vehicle->sendMavCommand(_compID,
+ MAV_CMD_CAMERA_TRACK_RECTANGLE,
+ true,
+ static_cast(rec.x()),
+ static_cast(rec.y()),
+ static_cast(rec.x() + rec.width()),
+ static_cast(rec.y() + rec.height()));
+ }
+}
+
+//-----------------------------------------------------------------------------
+void
+QGCCameraControl::startTracking(QPointF point, double radius)
+{
+ if(_trackingPoint != point || _trackingRadius != radius) {
+ _trackingPoint = point;
+ _trackingRadius = radius;
+
+ qCDebug(CameraControlLog) << "Start Tracking (Point: ["
+ << static_cast(point.x()) << ", "
+ << static_cast(point.y()) << "], Radius: "
+ << static_cast(radius);
+
+ _vehicle->sendMavCommand(_compID,
+ MAV_CMD_CAMERA_TRACK_POINT,
+ true,
+ static_cast(point.x()),
+ static_cast(point.y()),
+ static_cast(radius));
+ }
+}
+
+//-----------------------------------------------------------------------------
+void
+QGCCameraControl::stopTracking()
+{
+ qCDebug(CameraControlLog) << "Stop Tracking";
+
+ //-- Stop Tracking
+ _vehicle->sendMavCommand(_compID,
+ MAV_CMD_CAMERA_STOP_TRACKING,
+ true);
+
+ //-- Stop Sending Tracking Status
+ _vehicle->sendMavCommand(_compID,
+ MAV_CMD_SET_MESSAGE_INTERVAL,
+ true,
+ MAVLINK_MSG_ID_CAMERA_TRACKING_IMAGE_STATUS,
+ -1);
+
+ // reset tracking image rectangle
+ _trackingImageRect = {};
+}
+
+//-----------------------------------------------------------------------------
+void
+QGCCameraControl::_requestTrackingStatus()
+{
+ _vehicle->sendMavCommand(_compID,
+ MAV_CMD_SET_MESSAGE_INTERVAL,
+ true,
+ MAVLINK_MSG_ID_CAMERA_TRACKING_IMAGE_STATUS,
+ 500000); // Interval (us)
+}
\ No newline at end of file
diff --git a/src/Camera/QGCCameraControl.h b/src/Camera/QGCCameraControl.h
index b9e13bf..1b389bc 100644
--- a/src/Camera/QGCCameraControl.h
+++ b/src/Camera/QGCCameraControl.h
@@ -138,12 +138,21 @@ public:
THERMAL_PIP,
};
+ enum TrackingStatus {
+ TRACKING_UNKNOWN = 0,
+ TRACKING_SUPPORTED = 1,
+ TRACKING_ENABLED = 2,
+ TRACKING_RECTANGLE = 4,
+ TRACKING_POINT = 8
+ };
+
Q_ENUM(CameraMode)
Q_ENUM(VideoStatus)
Q_ENUM(PhotoStatus)
Q_ENUM(PhotoMode)
Q_ENUM(StorageStatus)
Q_ENUM(ThermalViewMode)
+ Q_ENUM(TrackingStatus)
Q_PROPERTY(int version READ version NOTIFY infoChanged)
Q_PROPERTY(QString modelName READ modelName NOTIFY infoChanged)
@@ -158,6 +167,7 @@ public:
Q_PROPERTY(bool hasZoom READ hasZoom NOTIFY infoChanged)
Q_PROPERTY(bool hasFocus READ hasFocus NOTIFY infoChanged)
Q_PROPERTY(bool hasVideoStream READ hasVideoStream NOTIFY infoChanged)
+ Q_PROPERTY(bool hasTracking READ hasTracking NOTIFY infoChanged)
Q_PROPERTY(bool photosInVideoMode READ photosInVideoMode NOTIFY infoChanged)
Q_PROPERTY(bool videoInPhotoMode READ videoInPhotoMode NOTIFY infoChanged)
Q_PROPERTY(bool isBasic READ isBasic NOTIFY infoChanged)
@@ -197,6 +207,10 @@ public:
Q_PROPERTY(QStringList streamLabels READ streamLabels NOTIFY streamLabelsChanged)
Q_PROPERTY(ThermalViewMode thermalMode READ thermalMode WRITE setThermalMode NOTIFY thermalModeChanged)
Q_PROPERTY(double thermalOpacity READ thermalOpacity WRITE setThermalOpacity NOTIFY thermalOpacityChanged)
+ Q_PROPERTY(bool trackingEnabled READ trackingEnabled WRITE setTrackingEnabled NOTIFY trackingEnabledChanged)
+ Q_PROPERTY(TrackingStatus trackingStatus READ trackingStatus CONSTANT)
+ Q_PROPERTY(bool trackingImageStatus READ trackingImageStatus NOTIFY trackingImageStatusChanged)
+ Q_PROPERTY(QRectF trackingImageRect READ trackingImageRect NOTIFY trackingImageStatusChanged)
Q_INVOKABLE virtual void setVideoMode ();
Q_INVOKABLE virtual void setPhotoMode ();
@@ -213,6 +227,9 @@ public:
Q_INVOKABLE virtual void stopZoom ();
Q_INVOKABLE virtual void stopStream ();
Q_INVOKABLE virtual void resumeStream ();
+ Q_INVOKABLE virtual void startTracking (QRectF rec);
+ Q_INVOKABLE virtual void startTracking (QPointF point, double radius);
+ Q_INVOKABLE virtual void stopTracking ();
virtual int version () { return _version; }
virtual QString modelName () { return _modelName; }
@@ -226,6 +243,7 @@ public:
virtual bool hasModes () { return _info.flags & CAMERA_CAP_FLAGS_HAS_MODES; }
virtual bool hasZoom () { return _info.flags & CAMERA_CAP_FLAGS_HAS_BASIC_ZOOM; }
virtual bool hasFocus () { return _info.flags & CAMERA_CAP_FLAGS_HAS_BASIC_FOCUS; }
+ virtual bool hasTracking () { return _trackingStatus & TRACKING_SUPPORTED; }
virtual bool hasVideoStream () { return _info.flags & CAMERA_CAP_FLAGS_HAS_VIDEO_STREAM; }
virtual bool photosInVideoMode () { return _info.flags & CAMERA_CAP_FLAGS_CAN_CAPTURE_IMAGE_IN_VIDEO_MODE; }
virtual bool videoInPhotoMode () { return _info.flags & CAMERA_CAP_FLAGS_CAN_CAPTURE_VIDEO_IN_IMAGE_MODE; }
@@ -287,9 +305,18 @@ public:
virtual void handleParamValue (const mavlink_param_ext_value_t& value);
virtual void handleStorageInfo (const mavlink_storage_information_t& st);
virtual void handleBatteryStatus (const mavlink_battery_status_t& bs);
+ virtual void handleTrackingImageStatus(const mavlink_camera_tracking_image_status_t *tis);
virtual void handleVideoInfo (const mavlink_video_stream_information_t *vi);
virtual void handleVideoStatus (const mavlink_video_stream_status_t *vs);
+ virtual bool trackingEnabled () { return _trackingStatus & TRACKING_ENABLED; }
+ virtual void setTrackingEnabled (bool set);
+
+ virtual TrackingStatus trackingStatus () { return _trackingStatus; }
+
+ virtual bool trackingImageStatus() { return _trackingImageStatus.tracking_status == 1; }
+ virtual QRectF trackingImageRect() { return _trackingImageRect; }
+
/// Notify controller a parameter has changed
virtual void factChanged (Fact* pFact);
/// Allow controller to modify or invalidate incoming parameter
@@ -328,6 +355,8 @@ signals:
void autoStreamChanged ();
void recordTimeChanged ();
void streamLabelsChanged ();
+ void trackingEnabledChanged ();
+ void trackingImageStatusChanged ();
void thermalModeChanged ();
void thermalOpacityChanged ();
void storageStatusChanged ();
@@ -338,6 +367,7 @@ protected:
virtual void _setCameraMode (CameraMode mode);
virtual void _requestStreamInfo (uint8_t streamID);
virtual void _requestStreamStatus (uint8_t streamID);
+ virtual void _requestTrackingStatus ();
virtual QGCVideoStreamInfo* _findStream (uint8_t streamID, bool report = true);
virtual QGCVideoStreamInfo* _findStream (const QString name);
@@ -428,4 +458,10 @@ protected:
QStringList _streamLabels;
ThermalViewMode _thermalMode = THERMAL_BLEND;
double _thermalOpacity = 85.0;
+ TrackingStatus _trackingStatus = TRACKING_UNKNOWN;
+ QRectF _trackingMarquee;
+ QPointF _trackingPoint;
+ double _trackingRadius = 0.0;
+ mavlink_camera_tracking_image_status_t _trackingImageStatus;
+ QRectF _trackingImageRect;
};
diff --git a/src/Camera/QGCCameraManager.cc b/src/Camera/QGCCameraManager.cc
index ef36857..5c9ee5b 100644
--- a/src/Camera/QGCCameraManager.cc
+++ b/src/Camera/QGCCameraManager.cc
@@ -101,6 +101,9 @@ QGCCameraManager::_mavlinkMessageReceived(const mavlink_message_t& message)
case MAVLINK_MSG_ID_BATTERY_STATUS:
_handleBatteryStatus(message);
break;
+ case MAVLINK_MSG_ID_CAMERA_TRACKING_IMAGE_STATUS:
+ _handleTrackingImageStatus(message);
+ break;
}
}
}
@@ -205,6 +208,7 @@ QGCCameraManager::_findCamera(int id)
void
QGCCameraManager::_handleCameraInfo(const mavlink_message_t& message)
{
+ qCDebug(CameraManagerLog) << "_handleCameraInfo";
//-- Have we requested it?
QString sCompID = QString::number(message.compid);
if(_cameraInfoRequest.contains(sCompID) && !_cameraInfoRequest[sCompID]->infoReceived) {
@@ -371,6 +375,18 @@ QGCCameraManager::_handleBatteryStatus(const mavlink_message_t& message)
//-----------------------------------------------------------------------------
void
+QGCCameraManager::_handleTrackingImageStatus(const mavlink_message_t& message)
+{
+ QGCCameraControl* pCamera = _findCamera(message.compid);
+ if(pCamera) {
+ mavlink_camera_tracking_image_status_t tis;
+ mavlink_msg_camera_tracking_image_status_decode(&message, &tis);
+ pCamera->handleTrackingImageStatus(&tis);
+ }
+}
+
+//-----------------------------------------------------------------------------
+void
QGCCameraManager::_requestCameraInfo(int compID)
{
qCDebug(CameraManagerLog) << "_requestCameraInfo(" << compID << ")";
diff --git a/src/Camera/QGCCameraManager.h b/src/Camera/QGCCameraManager.h
index 5e51501..91c726b 100644
--- a/src/Camera/QGCCameraManager.h
+++ b/src/Camera/QGCCameraManager.h
@@ -85,6 +85,7 @@ protected:
virtual void _handleVideoStreamInfo (const mavlink_message_t& message);
virtual void _handleVideoStreamStatus(const mavlink_message_t& message);
virtual void _handleBatteryStatus (const mavlink_message_t& message);
+ virtual void _handleTrackingImageStatus(const mavlink_message_t& message);
protected:
diff --git a/src/FlightDisplay/FlightDisplayViewVideo.qml b/src/FlightDisplay/FlightDisplayViewVideo.qml
index 05913d3..bfd2918 100644
--- a/src/FlightDisplay/FlightDisplayViewVideo.qml
+++ b/src/FlightDisplay/FlightDisplayViewVideo.qml
@@ -36,6 +36,13 @@ Item {
property bool _hasZoom: _camera && _camera.hasZoom
property int _fitMode: QGroundControl.settingsManager.videoSettings.videoFit.rawValue
+ function getWidth() {
+ return videoBackground.getWidth()
+ }
+ function getHeight() {
+ return videoBackground.getHeight()
+ }
+
property double _thermalHeightFactor: 0.85 //-- TODO
Image {
@@ -65,6 +72,7 @@ Item {
}
Rectangle {
+ id: videoBackground
anchors.fill: parent
color: "black"
visible: QGroundControl.videoManager.decoding
diff --git a/src/FlightDisplay/FlyViewVideo.qml b/src/FlightDisplay/FlyViewVideo.qml
index 109e69e..25337f1 100644
--- a/src/FlightDisplay/FlyViewVideo.qml
+++ b/src/FlightDisplay/FlyViewVideo.qml
@@ -18,6 +18,9 @@ Item {
id: _root
visible: QGroundControl.videoManager.hasVideo
+ property int _track_rec_x: 0
+ property int _track_rec_y: 0
+
property Item pipState: videoPipState
QGCPipState {
id: videoPipState
@@ -86,11 +89,149 @@ Item {
}
MouseArea {
- id: flyViewVideoMouseArea
- anchors.fill: parent
- enabled: pipState.state === pipState.fullState
- hoverEnabled: true
- onDoubleClicked: QGroundControl.videoManager.fullScreen = !QGroundControl.videoManager.fullScreen
+ id: flyViewVideoMouseArea
+ anchors.fill: parent
+ enabled: pipState.state === pipState.fullState
+ hoverEnabled: true
+
+ property double x0: 0
+ property double x1: 0
+ property double y0: 0
+ property double y1: 0
+ property double offset_x: 0
+ property double offset_y: 0
+ property double radius: 20
+ property var trackingROI: null
+ property var trackingStatus: trackingStatusComponent.createObject(flyViewVideoMouseArea, {})
+
+ onDoubleClicked: QGroundControl.videoManager.fullScreen = !QGroundControl.videoManager.fullScreen
+
+ onPressed: {
+ _track_rec_x = mouse.x
+ _track_rec_y = mouse.y
+
+ //create a new rectangle at the wanted position
+ if(videoStreaming._camera) {
+ if (videoStreaming._camera.trackingEnabled) {
+ trackingROI = trackingROIComponent.createObject(flyViewVideoMouseArea, {
+ "x": mouse.x,
+ "y": mouse.y
+ });
+ }
+ }
+ }
+ onPositionChanged: {
+ //on move, update the width of rectangle
+ if (trackingROI !== null) {
+ if (mouse.x < trackingROI.x) {
+ trackingROI.x = mouse.x
+ trackingROI.width = Math.abs(mouse.x - _track_rec_x)
+ } else {
+ trackingROI.width = Math.abs(mouse.x - trackingROI.x)
+ }
+ if (mouse.y < trackingROI.y) {
+ trackingROI.y = mouse.y
+ trackingROI.height = Math.abs(mouse.y - _track_rec_y)
+ } else {
+ trackingROI.height = Math.abs(mouse.y - trackingROI.y)
+ }
+ }
+ }
+ onReleased: {
+ //if there is already a selection, delete it
+ if (trackingROI !== null) {
+ trackingROI.destroy();
+ }
+
+ if(videoStreaming._camera) {
+ if (videoStreaming._camera.trackingEnabled) {
+ // order coordinates --> top/left and bottom/right
+ x0 = Math.min(_track_rec_x, mouse.x)
+ x1 = Math.max(_track_rec_x, mouse.x)
+ y0 = Math.min(_track_rec_y, mouse.y)
+ y1 = Math.max(_track_rec_y, mouse.y)
+
+ //calculate offset between video stream rect and background (black stripes)
+ offset_x = (parent.width - videoStreaming.getWidth()) / 2
+ offset_y = (parent.height - videoStreaming.getHeight()) / 2
+
+ //convert absolute coords in background to absolute video stream coords
+ x0 = x0 - offset_x
+ x1 = x1 - offset_x
+ y0 = y0 - offset_y
+ y1 = y1 - offset_y
+
+ //convert absolute to relative coordinates and limit range to 0...1
+ x0 = Math.max(Math.min(x0 / videoStreaming.getWidth(), 1.0), 0.0)
+ x1 = Math.max(Math.min(x1 / videoStreaming.getWidth(), 1.0), 0.0)
+ y0 = Math.max(Math.min(y0 / videoStreaming.getHeight(), 1.0), 0.0)
+ y1 = Math.max(Math.min(y1 / videoStreaming.getHeight(), 1.0), 0.0)
+
+ //use point message if rectangle is very small
+ if (Math.abs(_track_rec_x - mouse.x) < 10 && Math.abs(_track_rec_y - mouse.y) < 10) {
+ var pt = Qt.point(x0, y0)
+ videoStreaming._camera.startTracking(pt, radius / videoStreaming.getWidth())
+ } else {
+ var rec = Qt.rect(x0, y0, x1 - x0, y1 - y0)
+ videoStreaming._camera.startTracking(rec)
+ }
+ _track_rec_x = 0
+ _track_rec_y = 0
+ }
+ }
+ }
+
+ Component {
+ id: trackingROIComponent
+
+ Rectangle {
+ color: Qt.rgba(0.1,0.85,0.1,0.25)
+ border.color: "green"
+ border.width: 1
+ }
+ }
+
+ Component {
+ id: trackingStatusComponent
+
+ Rectangle {
+ color: "transparent"
+ border.color: "red"
+ border.width: 5
+ radius: 5
+ }
+ }
+
+ Timer {
+ id: trackingStatusTimer
+ interval: 50
+ repeat: true
+ running: true
+ onTriggered: {
+ if (videoStreaming._camera) {
+ if (videoStreaming._camera.trackingEnabled && videoStreaming._camera.trackingImageStatus) {
+ var margin_hor = (parent.parent.width - videoStreaming.getWidth()) / 2
+ var margin_ver = (parent.parent.height - videoStreaming.getHeight()) / 2
+ var left = margin_hor + videoStreaming.getWidth() * videoStreaming._camera.trackingImageRect.left
+ var top = margin_ver + videoStreaming.getHeight() * videoStreaming._camera.trackingImageRect.top
+ var right = margin_hor + videoStreaming.getWidth() * videoStreaming._camera.trackingImageRect.right
+ var bottom = margin_ver + !isNaN(videoStreaming._camera.trackingImageRect.bottom) ? videoStreaming.getHeight() * videoStreaming._camera.trackingImageRect.bottom : top + (right - left)
+ var width = right - left
+ var height = bottom - top
+
+ flyViewVideoMouseArea.trackingStatus.x = left
+ flyViewVideoMouseArea.trackingStatus.y = top
+ flyViewVideoMouseArea.trackingStatus.width = width
+ flyViewVideoMouseArea.trackingStatus.height = height
+ } else {
+ flyViewVideoMouseArea.trackingStatus.x = 0
+ flyViewVideoMouseArea.trackingStatus.y = 0
+ flyViewVideoMouseArea.trackingStatus.width = 0
+ flyViewVideoMouseArea.trackingStatus.height = 0
+ }
+ }
+ }
+ }
}
ProximityRadarVideoView{
diff --git a/src/FlightMap/Widgets/PhotoVideoControl.qml b/src/FlightMap/Widgets/PhotoVideoControl.qml
index 73eeee4..b51c73c 100644
--- a/src/FlightMap/Widgets/PhotoVideoControl.qml
+++ b/src/FlightMap/Widgets/PhotoVideoControl.qml
@@ -284,6 +284,42 @@ Rectangle {
}
}
+ // Tracking button
+ Rectangle {
+ Layout.alignment: Qt.AlignHCenter
+ color: _mavlinkCamera && _mavlinkCamera.trackingEnabled ? qgcPal.colorRed : qgcPal.windowShadeLight
+ width: ScreenTools.defaultFontPixelWidth * 6
+ height: width
+ radius: width * 0.5
+ border.color: qgcPal.buttonText
+ border.width: 3
+ visible: _mavlinkCamera && _mavlinkCamera.hasTracking
+ QGCColoredImage {
+ height: parent.height * 0.5
+ width: height
+ anchors.centerIn: parent
+ source: "/qmlimages/TrackingIcon.svg"
+ fillMode: Image.PreserveAspectFit
+ sourceSize.height: height
+ color: qgcPal.text
+ MouseArea {
+ anchors.fill: parent
+ onClicked: {
+ _mavlinkCamera.trackingEnabled = !_mavlinkCamera.trackingEnabled;
+ if(!_mavlinkCamera.trackingEnabled) {
+ !_mavlinkCamera.stopTracking()
+ }
+ }
+ }
+ }
+ }
+ QGCLabel {
+ Layout.alignment: Qt.AlignHCenter
+ text: qsTr("Camera Tracking")
+ font.pointSize: ScreenTools.defaultFontPointSize
+ visible: _mavlinkCamera && _mavlinkCamera.hasTracking
+ }
+
//-- Status Information
ColumnLayout {
Layout.alignment: Qt.AlignHCenter
diff --git a/src/ui/toolbar/Images/TrackingIcon.svg b/src/ui/toolbar/Images/TrackingIcon.svg
new file mode 100644
index 0000000..a324008
--- /dev/null
+++ b/src/ui/toolbar/Images/TrackingIcon.svg
@@ -0,0 +1,13 @@
+
+
+
\ No newline at end of file