diff --git a/CMakeLists.txt b/CMakeLists.txt index a31d2bbbadcd6fdefacd0f50a5c9b7f018555d0c..cf953db20bb7510ec6f664b37b3a48ebecf1e07b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -31,6 +31,7 @@ include(KDECompilerSettings NO_POLICY_SCOPE) include(ECMAddAppIcon) include(KDEGitCommitHooks) include(ECMCheckOutboundLicense) +include(ECMQtDeclareLoggingCategory) if(NEOCHAT_FLATPAK) include(cmake/Flatpak.cmake) @@ -123,6 +124,14 @@ if(ANDROID) configure_file(${CMAKE_CURRENT_SOURCE_DIR}/android/version.gradle.in ${CMAKE_BINARY_DIR}/version.gradle) endif() +include(FindPkgConfig) +pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>1.18 gstreamer-webrtc-1.0>=1.18) +if (TARGET PkgConfig::GSTREAMER) + add_feature_info(voip ON "GStreamer found. Call support is enabled.") +else() + add_feature_info(voip OFF "GStreamer could not be found on your system. As a consequence call support has been disabled. If you don't want that, make sure gstreamer-sdp-1.0>=1.18 gstreamer-webrtc-1.0>=1.18 can be found via pkgconfig.") +endif() + ki18n_install(po) install(FILES org.kde.neochat.desktop DESTINATION ${KDE_INSTALL_APPDIR}) diff --git a/imports/NeoChat/Component/Call/CallConfigurationSheet.qml b/imports/NeoChat/Component/Call/CallConfigurationSheet.qml new file mode 100644 index 0000000000000000000000000000000000000000..7f4d0af7d3ce7b861373584dd7f64f177f1e634a --- /dev/null +++ b/imports/NeoChat/Component/Call/CallConfigurationSheet.qml @@ -0,0 +1,65 @@ +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: LGPL-2.0-or-later + +import QtQuick 2.15 +import QtQuick.Controls 2.15 as QQC2 + +import org.kde.kirigami 2.15 as Kirigami + +import org.kde.neochat 1.0 + +Kirigami.OverlaySheet { + id: sheet + property var room: undefined + + title: i18n("Start call") + + Kirigami.FormLayout { + visible: Controller.callsSupported + QQC2.ComboBox { + id: audioSource + Kirigami.FormData.label: i18n("Audio Source:") + model: Controller.callsSupported ? AudioSources : [] + currentIndex: Controller.callsSupported ? AudioSources.currentIndex : -1 + onCurrentIndexChanged: AudioSources.currentIndex = currentIndex + } + QQC2.ComboBox { + id: videoSource + Kirigami.FormData.label: i18n("Video Source:") + model: Controller.callsSupported ? VideoSources : [] + currentIndex: Controller.callsSupported ? VideoSources.currentIndex : -1 + onCurrentIndexChanged: VideoSources.currentIndex = currentIndex + } + //QQC2.ComboBox { + //id: caps + //onModelChanged: currentIndex = VideoSources.capsIndex + //enabled: sendVideo.checked + //Kirigami.FormData.label: i18n("Framerate:") + //} + } + footer: Kirigami.ActionToolBar { + actions: [ + Kirigami.Action { + text: i18n("Close") + icon.name: "window-close-symbolic" + onTriggered: sheet.close() + }, + Kirigami.Action { + text: i18n("Video") + icon.name: "camera-video-symbolic" + onTriggered: { + CallManager.startCall(room, true); + sheet.close(); + } + }, + Kirigami.Action { + text: i18n("Audio") + icon.name: "audio-input-microphone" + onTriggered: { + CallManager.startCall(room, false); + sheet.close(); + } + } + ] + } +} diff --git a/imports/NeoChat/Component/Call/CallPageButton.qml b/imports/NeoChat/Component/Call/CallPageButton.qml new file mode 100644 index 0000000000000000000000000000000000000000..fc0dcd47badbcca88ba82237f8637f849f2c5f1d --- /dev/null +++ b/imports/NeoChat/Component/Call/CallPageButton.qml @@ -0,0 +1,87 @@ +// SPDX-FileCopyrightText: 2022 Carson Black +// SPDX-License-Identifier: LGPL-2.0-or-later + +import QtQuick 2.0 +import QtQuick.Controls 2.7 as QQC2 +import QtQuick.Layouts 1.1 +import org.kde.kirigami 2.13 as Kirigami +import NeoChat.Component 1.0 + +QQC2.AbstractButton { + id: control + + property int temprament: CallPageButton.Neutral + property bool shimmering: false + + enum Temprament { + Neutral, + Constructive, + Destructive + } + + padding: Kirigami.Units.largeSpacing + contentItem: ColumnLayout { + QQC2.Control { + padding: Kirigami.Units.gridUnit + + Kirigami.Theme.colorSet: Kirigami.Theme.Button + Layout.alignment: Qt.AlignHCenter + + contentItem: Kirigami.Icon { + implicitHeight: Kirigami.Units.iconSizes.medium + implicitWidth: Kirigami.Units.iconSizes.medium + source: control.icon.name + } + background: Rectangle { + Kirigami.Theme.colorSet: Kirigami.Theme.Button + + ShimmerGradient { + id: shimmerGradient + color: { + switch (control.temprament) { + case CallPageButton.Neutral: + return Kirigami.Theme.textColor + case CallPageButton.Constructive: + return Kirigami.Theme.positiveTextColor + case CallPageButton.Destructive: + return Kirigami.Theme.negativeTextColor + } + } + } + + color: { + if (control.checked) { + return Kirigami.Theme.focusColor + } + + switch (control.temprament) { + case CallPageButton.Neutral: + return Kirigami.Theme.backgroundColor + case CallPageButton.Constructive: + return Kirigami.Theme.positiveBackgroundColor + case CallPageButton.Destructive: + return Kirigami.Theme.negativeBackgroundColor + } + } + border.color: Kirigami.Theme.focusColor + border.width: control.visualFocus ? 2 : 0 + radius: height/2 + + Rectangle { + visible: control.shimmering + anchors.fill: parent + radius: height/2 + + gradient: control.shimmering ? shimmerGradient : null + } + } + } + QQC2.Label { + text: control.text + font: Kirigami.Theme.smallFont + + horizontalAlignment: Qt.AlignHCenter + Layout.fillWidth: true + } + } +} diff --git a/imports/NeoChat/Component/Call/qmldir b/imports/NeoChat/Component/Call/qmldir new file mode 100644 index 0000000000000000000000000000000000000000..58508814de6097a7ff82b27225f57049f5254b0c --- /dev/null +++ b/imports/NeoChat/Component/Call/qmldir @@ -0,0 +1,3 @@ +module NeoChat.Component.Call +CallPageButton 1.0 CallPageButton.qml +CallConfigurationSheet 1.0 CallConfigurationSheet.qml diff --git a/imports/NeoChat/Component/Timeline/CallInviteDelegate.qml b/imports/NeoChat/Component/Timeline/CallInviteDelegate.qml new file mode 100644 index 0000000000000000000000000000000000000000..61c94c47c7dcb5f631ad9dff84e2979238754ac1 --- /dev/null +++ b/imports/NeoChat/Component/Timeline/CallInviteDelegate.qml @@ -0,0 +1,22 @@ +// SPDX-FileCopyrightText: 2022 Carson Black +// SPDX-License-Identifier: GPL-2.0-or-later + +import QtQuick 2.15 +import QtQuick.Controls 2.15 as QQC2 +import QtQuick.Layouts 1.15 + +import org.kde.kirigami 2.15 as Kirigami +import org.kde.neochat 1.0 + +TimelineContainer { + width: ListView.view.width + + innerObject: QQC2.Control { + Layout.leftMargin: Config.showAvatarInTimeline ? Kirigami.Units.largeSpacing : 0 + padding: Kirigami.Units.gridUnit*2 + + contentItem: QQC2.Label { + text: model.author.isLocalUser ? i18n("Outgoing Call") : i18n("Incoming Call") + } + } +} diff --git a/imports/NeoChat/Component/Timeline/EventDelegate.qml b/imports/NeoChat/Component/Timeline/EventDelegate.qml index 49542d8b4c5269e660da577e465e119df723a1eb..9fd2d0407c8256b2bd122a6010490bd38d6638ec 100644 --- a/imports/NeoChat/Component/Timeline/EventDelegate.qml +++ b/imports/NeoChat/Component/Timeline/EventDelegate.qml @@ -18,6 +18,11 @@ DelegateChooser { delegate: StateDelegate {} } + DelegateChoice { + roleValue: "call-invite" + delegate: CallInviteDelegate {} + } + DelegateChoice { roleValue: "emote" delegate: MessageDelegate { diff --git a/imports/NeoChat/Page/CallPage.qml b/imports/NeoChat/Page/CallPage.qml new file mode 100644 index 0000000000000000000000000000000000000000..763daf5b6a5ccdffef9dfba0d15136b27b36bd74 --- /dev/null +++ b/imports/NeoChat/Page/CallPage.qml @@ -0,0 +1,180 @@ +/* SPDX-FileCopyrightText: 2021 Tobias Fella + * + * SPDX-License-Identifier: GPL-2.0-or-later + */ + +import QtQuick 2.12 +import QtQuick.Controls 2.12 as QQC2 +import QtQuick.Layouts 1.12 + +import org.kde.kirigami 2.14 as Kirigami +import org.freedesktop.gstreamer.GLVideoItem 1.0 + +import QtGraphicalEffects 1.15 + +import org.kde.neochat 1.0 +import NeoChat.Component.Call 1.0 + +Kirigami.Page { + id: page + + title: CallManager.hasInvite ? i18n("Incoming Call") + : CallManager.isInviting ? i18n("Calling") + : CallManager.state == CallSession.Initiating ? i18n("Configuring Call") + : i18n("Call") + + ColumnLayout { + anchors.fill: parent + + ColumnLayout { + Layout.alignment: Qt.AlignHCenter | Qt.AlignVCenter + + GstGLVideoItem { + id: videoItem + //Layout.fillWidth: true + Layout.fillHeight: true + Component.onCompleted: CallManager.item = this + z: 10 + MouseArea { + anchors.fill: parent + drag.target: parent + } + //layer.enabled: true + //layer.effect: OpacityMask { + //maskSource: Item { + //width: videoItem.width + //height: videoItem.height + //Rectangle { + //id: mask + //anchors.centerIn: parent + //width: /*videoItem.adapt ?*/ videoItem.width //: Math.min(videoItem.width, videoItem.height) + //height: /*videoItem.adapt ?*/ videoItem.height //: width + //radius: Kirigami.Units.smallSpacing - borderWidth + //} + //} + //} + } + + Kirigami.Avatar { + Layout.preferredWidth: Kirigami.Units.iconSizes.huge + Layout.preferredHeight: Kirigami.Units.iconSizes.huge + Layout.alignment: Qt.AlignHCenter + + name: CallManager.room.displayName + source: "image://mxc/" + CallManager.room.avatarMediaId + } + + QQC2.Label { + text: CallManager.remoteUser.displayName + + horizontalAlignment: Text.AlignHCenter + Layout.fillWidth: true + } + QQC2.Label { + text: CallManager.room.displayName + + horizontalAlignment: Text.AlignHCenter + Layout.fillWidth: true + } + + Item { implicitHeight: Kirigami.Units.gridUnit * 2 } + + RowLayout { + Layout.alignment: Qt.AlignHCenter + + id: buttonRow + spacing: Kirigami.Units.gridUnit + + CallPageButton { + text: i18n("Accept") + icon.name: "call-start" + shimmering: true + temprament: CallPageButton.Constructive + visible: CallManager.globalState === CallManager.INCOMING + + onClicked: { + visible = false; //TODO declarify + CallManager.acceptCall() + } + } + CallPageButton { + text: checked ? i18n("Enable Camera") : i18n("Disable Camera") + icon.name: checked ? "camera-off" : "camera-on" + checkable: true + } + CallPageButton { + text: checked ? i18n("Unmute Speaker") : i18n("Mute Speaker") + icon.name: checked ? "audio-volume-muted" : "audio-speakers-symbolic" + checkable: true + } + CallPageButton { + text: checked ? i18n("Unmute Microphone") : i18n("Mute Microphone") + icon.name: checked ? "microphone-sensitivity-muted" : "microphone-sensitivity-high" + checkable: true + checked: CallManager.muted + + onToggled: CallManager.muted = !CallManager.muted + } + CallPageButton { + text: i18n("Configure Devices") + icon.name: "settings-configure" + onClicked: callConfigurationSheet.open() + } + CallPageButton { + id: denyButton + visible: CallManager.globalState === CallManager.INCOMING + text: i18n("Deny") + icon.name: "call-stop" + shimmering: true + temprament: CallPageButton.Destructive + + onClicked: CallManager.hangupCall() + } + CallPageButton { + visible: !denyButton.visible + text: CallManager.isInviting ? i18n("Cancel") : i18n("Hang Up") + icon.name: "call-stop" + shimmering: CallManager.isInviting + temprament: CallPageButton.Destructive + + onClicked: CallManager.hangupCall() + } + } + } + } + + Timer { + id: lifeTimer + interval: CallManager.lifetime + onTriggered: { + page.closeDialog() + } + } + + Connections { + target: CallManager + function onCallEnded() { + page.closeDialog() + } + function onHasInviteChanged() { + if(!CallManager.hasInvite) { + lifeTimer.stop() + } + } + function onIsInvitingChanged() { + if(!CallManager.isInviting) { + lifeTimer.stop() + } + } + } + + Component.onCompleted: { + if(CallManager.hasInvite || CallManager.isInviting) { + lifeTimer.start() + } + } + + CallConfigurationSheet { + id: callConfigurationSheet + } +} diff --git a/imports/NeoChat/Page/RoomPage.qml b/imports/NeoChat/Page/RoomPage.qml index bd9fc505c3e8155e69335a327cd953b4d9fc3df1..d33198be0c1041eb178bf541d1cef586a7b32fe4 100644 --- a/imports/NeoChat/Page/RoomPage.qml +++ b/imports/NeoChat/Page/RoomPage.qml @@ -17,6 +17,7 @@ import NeoChat.Component.ChatBox 1.0 import NeoChat.Component.Timeline 1.0 import NeoChat.Dialog 1.0 import NeoChat.Menu.Timeline 1.0 +import NeoChat.Component.Call 1.0 Kirigami.ScrollablePage { id: page @@ -76,6 +77,16 @@ Kirigami.ScrollablePage { } } + actions.main: Kirigami.Action { + text: i18n("Call") + icon.name: "call-start" + visible: Controller.callsSupported && page.currentRoom.joinedCount === 2 + onTriggered: { + callConfigurationSheet.room = page.currentRoom + callConfigurationSheet.open() + } + } + Connections { target: actionsHandler function onShowMessage(messageType, message) { @@ -615,4 +626,7 @@ Kirigami.ScrollablePage { }); contextMenu.open(); } + CallConfigurationSheet { + id: callConfigurationSheet + } } diff --git a/qml/main.qml b/qml/main.qml index 490bcd80837a82204f195f1ecb5532606d05ead6..1374b910a4fd28f1eb55a3cf2ab34c1aac948707 100644 --- a/qml/main.qml +++ b/qml/main.qml @@ -44,6 +44,16 @@ Kirigami.ApplicationWindow { source: Qt.resolvedUrl("qrc:/imports/NeoChat/Menu/GlobalMenu.qml") } + Connections { + target: Controller.callsSupported ? CallManager : undefined + + function onGlobalStateChanged() { + if (CallManager.globalState === CallManager.OUTGOING || CallManager.globalState === CallManager.INCOMING) { + pageStack.pushDialogLayer("qrc:/imports/NeoChat/Page/CallPage.qml") + } + } + } + // This timer allows to batch update the window size change to reduce // the io load and also work around the fact that x/y/width/height are // changed when loading the page and overwrite the saved geometry from @@ -349,6 +359,10 @@ Kirigami.ApplicationWindow { showPassiveNotification(i18n("%1: %2", error, detail)); } + function onErrorOccured(error) { + showPassiveNotification(error); + } + function onShowWindow(token = null) { root.showWindow() if (token && KWindowSystem) { diff --git a/res.qrc b/res.qrc index b0ff13313b47d68cd477a7857b34c593ff0e8c8c..fb7c1d3969874f1603ef585bbc608ca429c03c8b 100644 --- a/res.qrc +++ b/res.qrc @@ -13,6 +13,7 @@ imports/NeoChat/Page/StartChatPage.qml imports/NeoChat/Page/ImageEditorPage.qml imports/NeoChat/Page/WelcomePage.qml + imports/NeoChat/Page/CallPage.qml imports/NeoChat/RoomSettings/General.qml imports/NeoChat/RoomSettings/Security.qml imports/NeoChat/RoomSettings/Categories.qml @@ -22,7 +23,10 @@ imports/NeoChat/Component/TypingPane.qml imports/NeoChat/Component/ShimmerGradient.qml imports/NeoChat/Component/QuickSwitcher.qml - imports/NeoChat/Component/ChatBox + imports/NeoChat/Component/Call/qmldir + imports/NeoChat/Component/Call/CallPageButton.qml + imports/NeoChat/Component/Call/CallConfigurationSheet.qml + imports/NeoChat/Component/ChatBox/ChatBar.qml imports/NeoChat/Component/ChatBox/ChatBox.qml imports/NeoChat/Component/ChatBox/ChatBar.qml imports/NeoChat/Component/ChatBox/AttachmentPane.qml @@ -46,6 +50,7 @@ imports/NeoChat/Component/Timeline/EventDelegate.qml imports/NeoChat/Component/Timeline/MessageDelegate.qml imports/NeoChat/Component/Timeline/ReadMarkerDelegate.qml + imports/NeoChat/Component/Timeline/CallInviteDelegate.qml imports/NeoChat/Component/Login/qmldir imports/NeoChat/Component/Login/LoginStep.qml imports/NeoChat/Component/Login/Login.qml diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 3787ace99fe9d950f7f11a5b446562f34fb790cb..a15bce575b0dd43ede1ebb5262e86afaaa4d70da 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -45,6 +45,13 @@ else() target_sources(neochat PRIVATE neochataccountregistry.cpp) endif() +ecm_qt_declare_logging_category(neochat + HEADER "voiplogging.h" + IDENTIFIER "voip" + CATEGORY_NAME "org.kde.neochat.voip" +) + + ecm_add_app_icon(NEOCHAT_ICON ICONS ${CMAKE_SOURCE_DIR}/128-logo.png) target_sources(neochat PRIVATE ${NEOCHAT_ICON}) @@ -75,6 +82,18 @@ if(TARGET QCoro5::Coro) else() target_link_libraries(neochat PRIVATE QCoro::QCoro) endif() + +if (TARGET PkgConfig::GSTREAMER) + target_link_libraries(neochat PRIVATE PkgConfig::GSTREAMER) + target_sources(neochat PRIVATE + call/callmanager.cpp + call/callsession.cpp + call/audiosources.cpp + call/videosources.cpp + call/devicemonitor.cpp + ) + target_compile_definitions(neochat PRIVATE GSTREAMER_AVAILABLE) +endif() kconfig_add_kcfg_files(neochat GENERATE_MOC neochatconfig.kcfgc) if(NEOCHAT_FLATPAK) diff --git a/src/call/audiodevicesmodel.cpp b/src/call/audiodevicesmodel.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cbf2d5450c55ebd45976031da834c73e96c617f1 --- /dev/null +++ b/src/call/audiodevicesmodel.cpp @@ -0,0 +1,87 @@ +// SPDX-FileCopyrightText: 2021 Nheko Contributors +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-FileCopyrightText: 2021 Carl Schwan +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#include "audiodevicesmodel.h" +#include "neochatconfig.h" +#include + +#ifdef GSTREAMER_AVAILABLE +extern "C" { +#include "gst/gst.h" +} +#endif + +AudioDevicesModel::AudioDevicesModel(QObject *parent) + : QAbstractListModel(parent) +{ + qDebug() << "foo"; +} + +QVariant AudioDevicesModel::data(const QModelIndex &index, int role) const +{ + const auto row = index.row(); + if (role == Qt::DisplayRole) { + return m_audioSources[row].name; + } + return {}; +} + +int AudioDevicesModel::rowCount(const QModelIndex &parent) const +{ + return m_audioSources.size(); +} + +GstDevice *AudioDevicesModel::currentDevice() const +{ + const auto config = NeoChatConfig::self(); + const QString name = config->microphone(); + for (const auto &audioSource : m_audioSources) { + if (audioSource.name == name) { + qDebug() << "WebRTC: microphone:" << name; + return audioSource.device; + } + } + qCritical() << "WebRTC: unknown microphone:" << name; + return nullptr; +} + +bool AudioDevicesModel::removeDevice(GstDevice *device, bool changed) +{ + for (int i = 0; i < m_audioSources.size(); i++) { + if (m_audioSources[i].device == device) { + Q_EMIT beginRemoveRows({}, i, i); + m_audioSources.removeAt(i); + Q_EMIT endRemoveRows(); + return true; + } + } + return false; +} + +bool AudioDevicesModel::hasMicrophone() const +{ + return !m_audioSources.empty(); +} + +void AudioDevicesModel::addDevice(GstDevice *device) +{ + auto _name = gst_device_get_display_name(device); + QString name(_name); + g_free(_name); + + qWarning() << "CallDevices: Audio device added:" << name; + + beginInsertRows({}, m_audioSources.size(), m_audioSources.size()); + m_audioSources.append(AudioSource{name, device}); + endInsertRows(); +} + +void AudioDevicesModel::setCurrentDevice(const QString &device) const +{ + if (NeoChatConfig::microphone().isEmpty()) { + NeoChatConfig::setMicrophone(m_audioSources.front().name); + } +} diff --git a/src/call/audiodevicesmodel.h b/src/call/audiodevicesmodel.h new file mode 100644 index 0000000000000000000000000000000000000000..25a2e279c23e91fd8a88f42c85584717555511fe --- /dev/null +++ b/src/call/audiodevicesmodel.h @@ -0,0 +1,39 @@ +// SPDX-FileCopyrightText: 2021 Nheko Contributors +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-FileCopyrightText: 2021 Carl Schwan +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#pragma once + +#include + +typedef struct _GstDevice GstDevice; + +class AudioDevicesModel : public QAbstractListModel +{ + Q_OBJECT + +public: + struct AudioSource { + QString name; + GstDevice *device; + }; + + AudioDevicesModel(QObject *parent = nullptr); + ~AudioDevicesModel() = default; + + int rowCount(const QModelIndex &parent = QModelIndex()) const override; + QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override; + QVector audioSources() const; + + void addDevice(GstDevice *device); + bool removeDevice(GstDevice *device, bool changed); + bool hasMicrophone() const; + void setDefaultDevice() const; + + GstDevice *currentDevice() const; + +private: + QVector m_audioSources; +}; diff --git a/src/call/audiosources.cpp b/src/call/audiosources.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b55f39217a6098f8975f3cd14d1ead0bce931c02 --- /dev/null +++ b/src/call/audiosources.cpp @@ -0,0 +1,86 @@ +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: LGPL-2.0-or-later + +#include "audiosources.h" + +#include + +#include +#include + +#include "devicemonitor.h" + +#include "neochatconfig.h" + +int AudioSources::rowCount(const QModelIndex &parent) const +{ + Q_UNUSED(parent); + return DeviceMonitor::instance().audioSources().size(); +} + +QVariant AudioSources::data(const QModelIndex &index, int role) const +{ + if (index.row() >= DeviceMonitor::instance().audioSources().size()) { + return QVariant(QStringLiteral("DEADBEEF")); + } + if (role == TitleRole) { + return DeviceMonitor::instance().audioSources()[index.row()].title; + } + return QVariant(); +} + +QHash AudioSources::roleNames() const +{ + return { + {TitleRole, "title"}, + }; +} + +AudioSources::AudioSources() + : QAbstractListModel() +{ + connect(&DeviceMonitor::instance(), &DeviceMonitor::audioSourceAdded, this, [this]() { + beginResetModel(); + endResetModel(); + Q_EMIT currentIndexChanged(); + }); + connect(&DeviceMonitor::instance(), &DeviceMonitor::audioSourceRemoved, this, [this]() { + beginResetModel(); + endResetModel(); + Q_EMIT currentIndexChanged(); + }); +} + +GstDevice *AudioSources::currentDevice() const +{ + const auto config = NeoChatConfig::self(); + const QString name = config->microphone(); + for (const auto &audioSource : DeviceMonitor::instance().audioSources()) { + if (audioSource.title == name) { + qDebug() << "WebRTC: microphone:" << name; + return audioSource.device; + } + } + return DeviceMonitor::instance().audioSources()[0].device; +} + +void AudioSources::setCurrentIndex(int index) const +{ + if (DeviceMonitor::instance().audioSources().size() == 0) { + return; + } + NeoChatConfig::setMicrophone(DeviceMonitor::instance().audioSources()[index].title); + NeoChatConfig::self()->save(); +} + +int AudioSources::currentIndex() const +{ + const auto config = NeoChatConfig::self(); + const QString name = config->microphone(); + for (auto i = 0; i < DeviceMonitor::instance().audioSources().size(); i++) { + if (DeviceMonitor::instance().audioSources()[i].title == name) { + return i; + } + } + return 0; +} diff --git a/src/call/audiosources.h b/src/call/audiosources.h new file mode 100644 index 0000000000000000000000000000000000000000..9e6d38969d0deea30bc86d606dca314b803ee39d --- /dev/null +++ b/src/call/audiosources.h @@ -0,0 +1,40 @@ +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: LGPL-2.0-or-later + +#pragma once + +#include + +#include + +class AudioSources : public QAbstractListModel +{ + Q_OBJECT + Q_PROPERTY(int currentIndex READ currentIndex WRITE setCurrentIndex NOTIFY currentIndexChanged) + +public: + enum Roles { + TitleRole = Qt::UserRole + 1, + }; + + static AudioSources &instance() + { + static AudioSources _instance; + return _instance; + } + + int rowCount(const QModelIndex &parent = QModelIndex()) const override; + QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override; + QHash roleNames() const override; + + GstDevice *currentDevice() const; + + void setCurrentIndex(int index) const; + int currentIndex() const; + +Q_SIGNALS: + void currentIndexChanged(); + +private: + AudioSources(); +}; diff --git a/src/call/calldevices.cpp b/src/call/calldevices.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a5b949c7379bd19b062219ef0932f25cafd9f3cb --- /dev/null +++ b/src/call/calldevices.cpp @@ -0,0 +1,199 @@ +// SPDX-FileCopyrightText: 2021 Nheko Contributors +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-FileCopyrightText: 2021 Carl Schwan +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#include "calldevices.h" +#include "audiodevicesmodel.h" +#include "neochatconfig.h" +#include "videodevicesmodel.h" +#include +#include +#include + +#include "voiplogging.h" + +#ifdef GSTREAMER_AVAILABLE +extern "C" { +#include "gst/gst.h" +} +#endif + +#ifdef GSTREAMER_AVAILABLE + +CallDevices::CallDevices() + : QObject() + , m_audioDevicesModel(new AudioDevicesModel(this)) + , m_videoDevicesModel(new VideoDevicesModel(this)) +{ + init(); +} + +AudioDevicesModel *CallDevices::audioDevicesModel() const +{ + return m_audioDevicesModel; +} + +VideoDevicesModel *CallDevices::videoDevicesModel() const +{ + return m_videoDevicesModel; +} + +void CallDevices::addDevice(GstDevice *device) +{ + if (!device) + return; + + gchar *type = gst_device_get_device_class(device); + bool isVideo = !std::strncmp(type, "Video", 5); + g_free(type); + if (isVideo) { + m_videoDevicesModel->addDevice(device); + m_videoDevicesModel->setDefaultDevice(); + } else { + m_audioDevicesModel->addDevice(device); + m_audioDevicesModel->setDefaultDevice(); + } +} + +void CallDevices::removeDevice(GstDevice *device, bool changed) +{ + if (device) { + if (m_audioDevicesModel->removeDevice(device, changed) || m_videoDevicesModel->removeDevice(device, changed)) + return; + } +} + +namespace +{ +gboolean newBusMessage(GstBus *bus, GstMessage *msg, gpointer user_data) +{ + Q_UNUSED(bus) + Q_UNUSED(user_data) + + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_DEVICE_ADDED: { + GstDevice *device; + gst_message_parse_device_added(msg, &device); + CallDevices::instance().addDevice(device); + Q_EMIT CallDevices::instance().devicesChanged(); + break; + } + case GST_MESSAGE_DEVICE_REMOVED: { + GstDevice *device; + gst_message_parse_device_removed(msg, &device); + CallDevices::instance().removeDevice(device, false); + Q_EMIT CallDevices::instance().devicesChanged(); + break; + } + case GST_MESSAGE_DEVICE_CHANGED: { + GstDevice *device; + GstDevice *oldDevice; + gst_message_parse_device_changed(msg, &device, &oldDevice); + CallDevices::instance().removeDevice(oldDevice, true); + CallDevices::instance().addDevice(device); + Q_EMIT CallDevices::instance().devicesChanged(); + break; + } + default: + break; + } + return true; +} +} + +void CallDevices::init() +{ + static GstDeviceMonitor *monitor = nullptr; + if (!monitor) { + monitor = gst_device_monitor_new(); + Q_ASSERT(monitor); + GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw"); + gst_device_monitor_add_filter(monitor, "Audio/Source", caps); + gst_device_monitor_add_filter(monitor, "Audio/Duplex", caps); + gst_caps_unref(caps); + caps = gst_caps_new_empty_simple("video/x-raw"); + gst_device_monitor_add_filter(monitor, "Video/Source", caps); + gst_device_monitor_add_filter(monitor, "Video/Duplex", caps); + gst_caps_unref(caps); + + GstBus *bus = gst_device_monitor_get_bus(monitor); + gst_bus_add_watch(bus, newBusMessage, nullptr); + gst_object_unref(bus); + if (!gst_device_monitor_start(monitor)) { + qCCritical(voip) << "Failed to start device monitor"; + return; + } else { + qCDebug(voip) << "Device monitor started"; + } + } +} + +bool CallDevices::hasMicrophone() const +{ + return m_audioDevicesModel->hasMicrophone(); +} + +bool CallDevices::hasCamera() const +{ + return m_videoDevicesModel->hasCamera(); +} + +QStringList CallDevices::resolutions(const QString &cameraName) const +{ + return m_videoDevicesModel->resolutions(cameraName); +} + +QStringList CallDevices::frameRates(const QString &cameraName, const QString &resolution) const +{ + if (auto s = m_videoDevicesModel->getVideoSource(cameraName); s) { + if (auto it = std::find_if(s->caps.cbegin(), + s->caps.cend(), + [&](const auto &c) { + return c.resolution == resolution; + }); + it != s->caps.cend()) + return it->frameRates; + } + return {}; +} + +GstDevice *CallDevices::audioDevice() const +{ + return m_audioDevicesModel->currentDevice(); +} + +GstDevice *CallDevices::videoDevice(QPair &resolution, QPair &frameRate) const +{ + return m_videoDevicesModel->currentDevice(resolution, frameRate); +} + +#else + +bool CallDevices::hasMicrophone() const +{ + return false; +} + +bool CallDevices::hasCamera() const +{ + return false; +} + +QStringList CallDevices::names(bool, const QString &) const +{ + return {}; +} + +QStringList CallDevices::resolutions(const QString &) const +{ + return {}; +} + +QStringList CallDevices::frameRates(const QString &, const QString &) const +{ + return {}; +} + +#endif diff --git a/src/call/calldevices.h b/src/call/calldevices.h new file mode 100644 index 0000000000000000000000000000000000000000..64f38cf025c509644339ae8d0d4aa5d12c394607 --- /dev/null +++ b/src/call/calldevices.h @@ -0,0 +1,64 @@ +// SPDX-FileCopyrightText: 2021 Contributors +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-FileCopyrightText: 2021 Carl Schwan +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#pragma once + +#include +#include +#include + +#include + +typedef struct _GstDevice GstDevice; + +class CallDevices; +class AudioDevicesModel; +class VideoDevicesModel; + +class CallDevices : public QObject +{ + Q_OBJECT + + Q_PROPERTY(AudioDevicesModel *audioDevices READ audioDevicesModel CONSTANT); + Q_PROPERTY(VideoDevicesModel *videoDevices READ videoDevicesModel CONSTANT); + +public: + static CallDevices &instance() + { + static CallDevices instance; + return instance; + } + CallDevices(CallDevices const &) = delete; + void operator=(CallDevices const &) = delete; + + bool hasMicrophone() const; + bool hasCamera() const; + QStringList names(bool isVideo, const QString &defaultDevice) const; + QStringList resolutions(const QString &cameraName) const; + QStringList frameRates(const QString &cameraName, const QString &resolution) const; + + AudioDevicesModel *audioDevicesModel() const; + VideoDevicesModel *videoDevicesModel() const; + + void addDevice(GstDevice *device); + void removeDevice(GstDevice *device, bool changed); + +Q_SIGNALS: + void devicesChanged(); + +private: + CallDevices(); + + void init(); + GstDevice *audioDevice() const; + GstDevice *videoDevice(QPair &resolution, QPair &frameRate) const; + + AudioDevicesModel *m_audioDevicesModel; + VideoDevicesModel *m_videoDevicesModel; + + friend class CallSession; + friend class Audio; +}; diff --git a/src/call/callmanager.cpp b/src/call/callmanager.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ec779873f0128f33e5804eec18766c60d1a10a23 --- /dev/null +++ b/src/call/callmanager.cpp @@ -0,0 +1,421 @@ +// SPDX-FileCopyrightText: 2020-2021 Nheko Authors +// SPDX-FileCopyrightText: 2021-2022 Tobias Fella +// SPDX-License-Identifier: GPL-3.0-or-later + +#include "callmanager.h" + +#include "controller.h" + +#include + +#include "voiplogging.h" +#include +#include + +#include + +CallManager::CallManager() +{ + init(); + connect(&Controller::instance(), &Controller::activeConnectionChanged, this, [this] { + updateTurnServers(); + }); +} + +QCoro::Task CallManager::updateTurnServers() +{ + qDebug() << m_cachedTurnUrisValidUntil << QDateTime::currentDateTime(); + if (m_cachedTurnUrisValidUntil > QDateTime::currentDateTime()) { + co_return; + } + Controller::instance().activeConnection()->getTurnServers(); + + auto servers = co_await qCoro(Controller::instance().activeConnection(), &Connection::turnServersChanged); + m_cachedTurnUrisValidUntil = QDateTime::currentDateTime().addSecs(servers["ttl"].toInt()); + + auto password = servers["password"].toString(); + auto username = servers["username"].toString(); + auto uris = servers["uris"].toArray(); + + m_cachedTurnUris.clear(); + for (const auto &u : uris) { + QString uri = u.toString(); + auto c = uri.indexOf(':'); + if (c == -1) { + qDebug() << "Invalid TURN URI:" << uri; + continue; + } + QString scheme = uri.left(c); + if (scheme != "turn" && scheme != "turns") { + qDebug() << "Invalid TURN scheme:" << scheme; + continue; + } + m_cachedTurnUris += scheme + QStringLiteral("://") + QUrl::toPercentEncoding(username) + QStringLiteral(":") + QUrl::toPercentEncoding(password) + + QStringLiteral("@") + uri.mid(c + 1); + } +} + +QString CallManager::callId() const +{ + return m_callId; +} + +void CallManager::handleCallEvent(NeoChatRoom *room, const Quotient::RoomEvent *event) +{ + if (const auto &inviteEvent = eventCast(event)) { + handleInvite(room, inviteEvent); + } else if (const auto &hangupEvent = eventCast(event)) { + handleHangup(room, hangupEvent); + } else if (const auto &candidatesEvent = eventCast(event)) { + handleCandidates(room, candidatesEvent); + } else if (const auto &answerEvent = eventCast(event)) { + handleAnswer(room, answerEvent); + } +} + +void CallManager::handleAnswer(NeoChatRoom *room, const Quotient::CallAnswerEvent *event) +{ + if (globalState() != OUTGOING) { + qCDebug(voip) << "Not inviting; irrelevant answer"; + return; + } + // if this isn't our call, then we don't care + if (event->callId() != m_callId) { + return; + } + // if this is something we sent out... + if (event->senderId() == room->localUser()->id()) { + if (state() == CallSession::DISCONNECTED) { + // this is us from another device, so we handled it from elsewhere + // + // TODO: Show the user that the call was answered on another device + // TODO: Stop ringing + } else { + // this is the answer we sent out, so we don't need to handle it + } + return; + } + + // if we're actually calling, accept the answer + if (state() != CallSession::DISCONNECTED) { + // TODO wait until candidates are here + m_session->acceptAnswer(event->sdp(), m_incomingCandidates); + return; + } + m_incomingCandidates.clear(); + setGlobalState(ACTIVE); +} + +void CallManager::handleCandidates(NeoChatRoom *room, const Quotient::CallCandidatesEvent *event) +{ + if (event->senderId() == room->localUser()->id()) { + return; + } + if (!m_callId.isEmpty() && event->callId() != m_callId) { // temp: don't accept candidates if there is a callId + qCDebug(voip) << "Candidates not for this call; Skipping"; + return; + } + for (const auto &candidate : event->candidates()) { + m_incomingCandidates += + Candidate{candidate.toObject()["candidate"].toString(), candidate.toObject()["sdpMLineIndex"].toInt(), candidate.toObject()["sdpMid"].toString()}; + } +} + +void CallManager::handleInvite(NeoChatRoom *room, const Quotient::CallInviteEvent *event) +{ + if (globalState() != IDLE) { + // TODO handle glare + qCDebug(voip) << "Already in a call"; + return; + } + + if (event->originTimestamp() < QDateTime::currentDateTime().addSecs(-60)) { + return; + } + if (event->senderId() == room->localUser()->id()) { + qCDebug(voip) << "Sent by this user"; + return; + } + setGlobalState(INCOMING); + + m_incomingSDP = event->sdp(); + m_remoteUser = dynamic_cast(room->user(event->senderId())); + Q_EMIT remoteUserChanged(); + m_room = room; + Q_EMIT roomChanged(); + m_callId = event->callId(); + Q_EMIT callIdChanged(); + Q_EMIT incomingCall(static_cast(room->user(event->senderId())), room, event->lifetime(), event->callId()); + // TODO: Start ringing; + m_hasInvite = true; + Q_EMIT hasInviteChanged(); + m_lifetime = event->lifetime(); + Q_EMIT lifetimeChanged(); + QTimer::singleShot(event->lifetime(), this, [this]() { + m_hasInvite = false; + Q_EMIT hasInviteChanged(); + }); + // acceptCall(); //TODO remove +} + +void CallManager::handleHangup(NeoChatRoom *room, const Quotient::CallHangupEvent *event) +{ + if (globalState() == IDLE) { + qCDebug(voip) << "No call; irrelevant hangup"; + return; + } + + if (event->senderId() == room->localUser()->id()) { + return; + } + if (event->callId() != m_callId) { + return; + } + if (m_session) { + m_session->end(); + } + setGlobalState(IDLE); + Q_EMIT callEnded(); +} + +void CallManager::acceptCall() +{ + if (!hasInvite()) { + return; + } + + // TODO check plugins + // TODO wait until candidates are here + + updateTurnServers(); + + // TODO make video configurable + // change true to false if you don't have a camera + m_session = CallSession::acceptCall(true, m_incomingSDP, m_incomingCandidates, m_cachedTurnUris, this); + connect(m_session, &CallSession::stateChanged, this, [this] { + Q_EMIT stateChanged(); + if (state() == CallSession::ICEFAILED) { + Q_EMIT callEnded(); + } + }); // TODO refactor away? + m_incomingCandidates.clear(); + connectSingleShot(m_session, &CallSession::answerCreated, this, [this](const QString &sdp, const QVector &candidates) { + m_room->answerCall(m_callId, sdp); + qCDebug(voip) << "Sending Answer"; + m_room->sendCallCandidates(m_callId, candidatesToJson(candidates)); + qCDebug(voip) << "Sending Candidates"; + setGlobalState(ACTIVE); + }); + m_hasInvite = false; + Q_EMIT hasInviteChanged(); +} + +void CallManager::hangupCall() +{ + if (m_session) { + m_session->end(); + } + m_room->hangupCall(m_callId); + setGlobalState(IDLE); + Q_EMIT callEnded(); + m_isInviting = false; + m_hasInvite = false; + Q_EMIT isInvitingChanged(); + Q_EMIT hasInviteChanged(); +} + +void CallManager::checkPlugins(bool isVideo, QString &errorMessage) +{ + m_session->havePlugins(isVideo, errorMessage); +} + +NeoChatUser *CallManager::remoteUser() const +{ + return m_remoteUser; +} + +NeoChatRoom *CallManager::room() const +{ + return m_room; +} + +bool CallManager::hasInvite() const +{ + return m_hasInvite; +} + +CallSession::State CallManager::state() const +{ + if (!m_session) { + return CallSession::DISCONNECTED; + } + return m_session->state(); +} + +int CallManager::lifetime() const +{ + return m_lifetime; +} + +void CallManager::ignoreCall() +{ + m_lifetime = 0; + Q_EMIT lifetimeChanged(); + m_callId = QString(); + Q_EMIT callIdChanged(); + m_hasInvite = false; + Q_EMIT hasInviteChanged(); + m_room = nullptr; + Q_EMIT roomChanged(); + m_remoteUser = nullptr; + Q_EMIT remoteUserChanged(); +} + +void CallManager::startCall(NeoChatRoom *room, bool sendVideo) +{ + if (m_session) { + // Don't start calls if there already is one + Q_EMIT Controller::instance().errorOccured(i18n("A call is already started")); + return; + } + if (room->users().size() != 2) { + // Don't start calls if the room doesn't have exactly two members + Q_EMIT Controller::instance().errorOccured(i18n("Call are limited to 1:1 rooms")); + return; + } + + QString errorString; + if (!m_session->havePlugins(false, errorString) || + (sendVideo && !m_session->havePlugins(true, errorString))) { + Q_EMIT Controller::instance().errorOccured(errorString); + return; + } + + setLifetime(60000); + setRoom(room); + setRemoteUser(otherUser(room)); + + updateTurnServers(); + + setCallId(generateCallId()); + + m_session = CallSession::startCall(sendVideo, m_cachedTurnUris, this); + connect(m_session, &CallSession::stateChanged, this, [this] { + Q_EMIT stateChanged(); + if (state() == CallSession::ICEFAILED) { + Q_EMIT callEnded(); + } + }); // TODO refactor away? + + connectSingleShot(m_session, &CallSession::offerCreated, this, [this](const QString &sdp, const QVector &candidates) { + m_room->inviteCall(callId(), lifetime(), sdp); + qCDebug(voip) << "Sending Invite"; + m_room->sendCallCandidates(callId(), candidatesToJson(candidates)); + qCDebug(voip) << "Sending Candidates"; + }); +} + +QString CallManager::generateCallId() +{ + return QDateTime::currentDateTime().toString("yyyyMMddhhmmsszzz"); +} + +void CallManager::setCallId(const QString &callId) +{ + m_callId = callId; + Q_EMIT callIdChanged(); +} + +bool CallManager::isInviting() const +{ + return m_isInviting; +} + +void CallManager::setMuted(bool muted) +{ + if (!m_session) { + return; + } + m_session->setMuted(muted); + Q_EMIT mutedChanged(); +} + +bool CallManager::muted() const +{ + if (!m_session) { + return false; + } + return m_session->muted(); +} + +bool CallManager::init() +{ + qRegisterMetaType(); + qRegisterMetaType>(); + GError *error = nullptr; + if (!gst_init_check(nullptr, nullptr, &error)) { + QString strError; + if (error) { + strError += error->message; + g_error_free(error); + } + qCCritical(voip) << "Failed to initialize GStreamer:" << strError; + return false; + } + + gchar *version = gst_version_string(); + qCDebug(voip) << "Initialised GStreamer: Version" << version; + g_free(version); + + // Required to register the qml types + auto _sink = gst_element_factory_make("qmlglsink", nullptr); + Q_ASSERT(_sink); + gst_object_unref(_sink); + return true; +} + +void CallManager::setLifetime(int lifetime) +{ + m_lifetime = lifetime; + Q_EMIT lifetimeChanged(); +} + +void CallManager::setRoom(NeoChatRoom *room) +{ + m_room = room; + Q_EMIT roomChanged(); +} + +void CallManager::setRemoteUser(NeoChatUser *user) +{ + m_remoteUser = user; + Q_EMIT roomChanged(); +} + +NeoChatUser *CallManager::otherUser(NeoChatRoom *room) +{ + return dynamic_cast(room->users()[0]->id() == room->localUser()->id() ? room->users()[1] : room->users()[0]); +} + +QJsonArray CallManager::candidatesToJson(const QVector &candidates) const +{ + QJsonArray candidatesJson; + for (const auto &candidate : candidates) { + candidatesJson += QJsonObject{{"candidate", candidate.candidate}, {"sdpMid", candidate.sdpMid}, {"sdpMLineIndex", candidate.sdpMLineIndex}}; + } + return candidatesJson; +} + +void CallManager::setGlobalState(GlobalState globalState) +{ + if (m_globalState == globalState) { + return; + } + m_globalState = globalState; + Q_EMIT globalStateChanged(); +} + +CallManager::GlobalState CallManager::globalState() const +{ + return m_globalState; +} diff --git a/src/call/callmanager.h b/src/call/callmanager.h new file mode 100644 index 0000000000000000000000000000000000000000..826071923a4b55ad3a7fc6093192096861d7ace6 --- /dev/null +++ b/src/call/callmanager.h @@ -0,0 +1,134 @@ +// SPDX-FileCopyrightText: 2020-2021 Nheko Authors +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: GPL-3.0-or-later + +#pragma once + +#include "neochatroom.h" +#include "neochatuser.h" +#include +#include +#include + +#include "callsession.h" + +#include "events/callanswerevent.h" +#include "events/callcandidatesevent.h" +#include "events/callhangupevent.h" +#include "events/callinviteevent.h" + +#include + +class CallSession; +class QQuickItem; + +using namespace Quotient; +class CallManager : public QObject +{ + Q_OBJECT + +public: + enum GlobalState { + IDLE, + INCOMING, + OUTGOING, + ACTIVE, + }; + Q_ENUM(GlobalState); + + Q_PROPERTY(GlobalState globalState READ globalState NOTIFY globalStateChanged) + Q_PROPERTY(NeoChatUser *remoteUser READ remoteUser NOTIFY remoteUserChanged) + Q_PROPERTY(QString callId READ callId NOTIFY callIdChanged) + Q_PROPERTY(NeoChatRoom *room READ room NOTIFY roomChanged) + Q_PROPERTY(int lifetime READ lifetime NOTIFY lifetimeChanged) // TODO integrate with 'hasInvite' + Q_PROPERTY(bool muted READ muted WRITE setMuted NOTIFY mutedChanged) + Q_PROPERTY(QQuickItem *item MEMBER m_item) // TODO allow for different devices for each session + Q_PROPERTY(CallSession::State state READ state NOTIFY stateChanged) + + static CallManager &instance() + { + static CallManager _instance; + return _instance; + } + + QString callId() const; + + CallSession::State state() const; + + NeoChatUser *remoteUser() const; + NeoChatRoom *room() const; + bool hasInvite() const; + bool isInviting() const; + + int lifetime() const; + + bool muted() const; + void setMuted(bool muted); + + CallManager::GlobalState globalState() const; + + void handleCallEvent(NeoChatRoom *room, const RoomEvent *event); + + Q_INVOKABLE void startCall(NeoChatRoom *room, bool camera); + Q_INVOKABLE void acceptCall(); + Q_INVOKABLE void hangupCall(); + Q_INVOKABLE void ignoreCall(); + + QCoro::Task updateTurnServers(); + + QQuickItem *m_item = nullptr; + +Q_SIGNALS: + void currentCallIdChanged(); + void incomingCall(NeoChatUser *user, NeoChatRoom *room, int timeout, const QString &callId); + void callEnded(); + void remoteUserChanged(); + void callIdChanged(); + void roomChanged(); + void hasInviteChanged(); + void stateChanged(); + void lifetimeChanged(); + void isInvitingChanged(); + void mutedChanged(); + void globalStateChanged(); + +private: + CallManager(); + QString m_callId; + + QVector m_incomingCandidates; + QString m_incomingSDP; + + void checkPlugins(bool isVideo, QString &errorMessage); + + QStringList m_cachedTurnUris; + QDateTime m_cachedTurnUrisValidUntil = QDateTime::fromSecsSinceEpoch(0); + + NeoChatUser *m_remoteUser = nullptr; + NeoChatRoom *m_room = nullptr; + int m_lifetime = 0; + + bool m_hasInvite = false; + bool m_isInviting = false; + GlobalState m_globalState; + + void handleInvite(NeoChatRoom *room, const CallInviteEvent *event); + void handleHangup(NeoChatRoom *room, const CallHangupEvent *event); + void handleCandidates(NeoChatRoom *room, const CallCandidatesEvent *event); + void handleAnswer(NeoChatRoom *room, const CallAnswerEvent *event); + + QString generateCallId(); + bool init(); + + bool m_initialised = false; + CallSession *m_session = nullptr; + + void setLifetime(int lifetime); + void setRoom(NeoChatRoom *room); + void setRemoteUser(NeoChatUser *user); + void setCallId(const QString &callid); + void setGlobalState(GlobalState state); + + NeoChatUser *otherUser(NeoChatRoom *room); + QJsonArray candidatesToJson(const QVector &candidates) const; +}; diff --git a/src/call/callsession.cpp b/src/call/callsession.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c3ea31df8e96cc9e5ce72cbd91e2c3cf0a1f151a --- /dev/null +++ b/src/call/callsession.cpp @@ -0,0 +1,851 @@ +// SPDX-FileCopyrightText: 2021 Nheko Contributors +// SPDX-FileCopyrightText: 2021 Carl Schwan +// SPDX-FileCopyrightText: 2021-2022 Tobias Fella +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#include "callsession.h" + +#include "calldevices.h" + +#include +#include + +#include + +#define GST_USE_UNSTABLE_API +#include + +#include "callmanager.h" + +#include "voiplogging.h" + +#include "audiosources.h" +#include "videosources.h" +#include + +#define STUN_SERVER "stun://turn.matrix.org:3478" // TODO make STUN server configurable + +struct KeyFrameRequestData { + GstElement *pipe = nullptr; + GstElement *decodebin = nullptr; + gint packetsLost = 0; + guint timerid = 0; + QString statsField; +} _keyFrameRequestData; + +QPair getResolution(GstPad *pad) +{ + QPair ret; + GstCaps *caps = gst_pad_get_current_caps(pad); + const GstStructure *s = gst_caps_get_structure(caps, 0); + gst_structure_get_int(s, "width", &ret.first); + gst_structure_get_int(s, "height", &ret.second); + gst_caps_unref(caps); + return ret; +} + +QPair getResolution(GstElement *pipe, const gchar *elementName, const gchar *padName) +{ + GstElement *element = gst_bin_get_by_name(GST_BIN(pipe), elementName); + GstPad *pad = gst_element_get_static_pad(element, padName); + auto ret = getResolution(pad); + gst_object_unref(pad); + gst_object_unref(element); + return ret; +} + +void setLocalDescription(GstPromise *promise, gpointer user_data) +{ + qDebug() << "Session: Setting local description"; + auto instance = static_cast(user_data); + const GstStructure *reply = gst_promise_get_reply(promise); + gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer")); + GstWebRTCSessionDescription *gstsdp = nullptr; + gst_structure_get(reply, isAnswer ? "answer" : "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &gstsdp, nullptr); + gst_promise_unref(promise); + g_signal_emit_by_name(instance->m_webrtc, "set-local-description", gstsdp, nullptr); + gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp); + instance->m_localSdp = QString(sdp); + g_free(sdp); + gst_webrtc_session_description_free(gstsdp); + qCDebug(voip) << "Session: local description set:" << isAnswer << instance->m_localSdp; +} + +bool contains(std::string_view str1, std::string_view str2) +{ + return std::search(str1.cbegin(), + str1.cend(), + str2.cbegin(), + str2.cend(), + [](unsigned char c1, unsigned char c2) { + return std::tolower(c1) == std::tolower(c2); + }) + != str1.cend(); +} + +void createOffer(GstElement *webrtc, CallSession *session) +{ + qDebug() << "Session: Creating Offer"; + auto promise = gst_promise_new_with_change_func(setLocalDescription, session, nullptr); + g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise); +} + +void createAnswer(GstPromise *promise, gpointer user_data) +{ + qDebug() << "Session: Creating Answer"; + auto instance = static_cast(user_data); + gst_promise_unref(promise); + promise = gst_promise_new_with_change_func(setLocalDescription, instance, nullptr); + g_signal_emit_by_name(instance->m_webrtc, "create-answer", nullptr, promise); +} + +bool getMediaAttributes(const GstSDPMessage *sdp, const char *mediaType, const char *encoding, int &payloadType, bool &receiveOnly, bool &sendOnly) +{ + payloadType = -1; + receiveOnly = false; + sendOnly = false; + for (guint mlineIndex = 0; mlineIndex < gst_sdp_message_medias_len(sdp); mlineIndex++) { + const GstSDPMedia *media = gst_sdp_message_get_media(sdp, mlineIndex); + if (!strcmp(gst_sdp_media_get_media(media), mediaType)) { + receiveOnly = gst_sdp_media_get_attribute_val(media, "recvonly") != nullptr; + sendOnly = gst_sdp_media_get_attribute_val(media, "sendonly") != nullptr; + const gchar *rtpval = nullptr; + for (guint n = 0; n == 0 || rtpval; n++) { + rtpval = gst_sdp_media_get_attribute_val_n(media, "rtpmap", n); + if (rtpval && contains(rtpval, encoding)) { + payloadType = atoi(rtpval); + break; + } + } + return true; + } + } + return false; +} + +GstWebRTCSessionDescription *parseSDP(const QString &sdp, GstWebRTCSDPType type) +{ + GstSDPMessage *message; + gst_sdp_message_new(&message); + if (gst_sdp_message_parse_buffer((guint8 *)sdp.toLatin1().data(), sdp.size(), message) == GST_SDP_OK) { + return gst_webrtc_session_description_new(type, message); + } else { + qCCritical(voip) << "Failed to parse remote SDP"; + gst_sdp_message_free(message); + return nullptr; + } +} + +void addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer user_data) +{ + qCDebug(voip) << "Adding local ICE Candidates"; + auto instance = static_cast(user_data); + Q_ASSERT(instance); + instance->m_localCandidates += Candidate{candidate, static_cast(mlineIndex), QString()}; +} + +void iceConnectionStateChanged(GstElement *webrtc, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data) +{ + auto instance = static_cast(user_data); + Q_ASSERT(instance); + GstWebRTCICEConnectionState newState; + g_object_get(webrtc, "ice-connection-state", &newState, nullptr); + switch (newState) { + case GST_WEBRTC_ICE_CONNECTION_STATE_NEW: + qCDebug(voip) << "GstWebRTCICEConnectionState -> New"; + instance->setState(CallSession::CONNECTING); + break; + case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: + qCDebug(voip) << "GstWebRTCICEConnectionState -> Checking"; + instance->setState(CallSession::CONNECTING); + break; + case GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED: + qCDebug(voip) << "GstWebRTCICEConnectionState -> Connected"; + break; + case GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED: + qCDebug(voip) << "GstWebRTCICEConnectionState -> Completed"; + break; + case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: + qCDebug(voip) << "GstWebRTCICEConnectionState -> Failed"; + instance->setState(CallSession::ICEFAILED); + break; + case GST_WEBRTC_ICE_CONNECTION_STATE_DISCONNECTED: + qCDebug(voip) << "GstWebRTCICEConnectionState -> Disconnected"; + break; + case GST_WEBRTC_ICE_CONNECTION_STATE_CLOSED: + qCDebug(voip) << "GstWebRTCICEConnectionState -> Closed"; + break; + default: + break; + } +} + +GstElement *newAudioSinkChain(GstElement *pipe) +{ + qCDebug(voip) << "New Audio Sink Chain"; + GstElement *queue = gst_element_factory_make("queue", nullptr); + GstElement *convert = gst_element_factory_make("audioconvert", nullptr); + GstElement *resample = gst_element_factory_make("audioresample", nullptr); + GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr); + gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr); + gst_element_link_many(queue, convert, resample, sink, nullptr); + gst_element_sync_state_with_parent(queue); + gst_element_sync_state_with_parent(convert); + gst_element_sync_state_with_parent(resample); + gst_element_sync_state_with_parent(sink); + return queue; +} + +void sendKeyFrameRequest() +{ + GstPad *sinkpad = gst_element_get_static_pad(_keyFrameRequestData.decodebin, "sink"); + if (!gst_pad_push_event(sinkpad, gst_event_new_custom(GST_EVENT_CUSTOM_UPSTREAM, gst_structure_new_empty("GstForceKeyUnit")))) { + qCWarning(voip) << "Keyframe request failed"; + } + gst_object_unref(sinkpad); +} + +void _testPacketLoss(GstPromise *promise, gpointer G_GNUC_UNUSED) +{ + const GstStructure *reply = gst_promise_get_reply(promise); + gint packetsLost = 0; + GstStructure *rtpStats; + if (!gst_structure_get(reply, _keyFrameRequestData.statsField.toLatin1().data(), GST_TYPE_STRUCTURE, &rtpStats, nullptr)) { + qCDebug(voip) << "get-stats: no field:" << _keyFrameRequestData.statsField; + gst_promise_unref(promise); + return; + } + gst_structure_get_int(rtpStats, "packets-lost", &packetsLost); + gst_structure_free(rtpStats); + gst_promise_unref(promise); + if (packetsLost > _keyFrameRequestData.packetsLost) { + qCDebug(voip) << "inbound video lost packet count:" << packetsLost; + _keyFrameRequestData.packetsLost = packetsLost; + sendKeyFrameRequest(); + } +} + +gboolean testPacketLoss(gpointer G_GNUC_UNUSED) +{ + if (_keyFrameRequestData.pipe) { + GstElement *webrtc = gst_bin_get_by_name(GST_BIN(_keyFrameRequestData.pipe), "webrtcbin"); + GstPromise *promise = gst_promise_new_with_change_func(_testPacketLoss, nullptr, nullptr); + g_signal_emit_by_name(webrtc, "get-stats", nullptr, promise); + gst_object_unref(webrtc); + return true; + } + return false; +} + +GstElement *newVideoSinkChain(GstElement *pipe, QQuickItem *quickItem) +{ + Q_ASSERT(quickItem); + qCDebug(voip) << "Creating Video Sink Chain"; + // use compositor for now; acceleration needs investigation + GstElement *queue = gst_element_factory_make("queue", nullptr); + GstElement *compositor = gst_element_factory_make("compositor", nullptr); + GstElement *glupload = gst_element_factory_make("glupload", nullptr); + GstElement *glcolorconvert = gst_element_factory_make("glcolorconvert", nullptr); + GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr); + GstElement *glsinkbin = gst_element_factory_make("glsinkbin", nullptr); + g_object_set(qmlglsink, "widget", quickItem, nullptr); + g_object_set(glsinkbin, "sink", qmlglsink, nullptr); + gst_bin_add_many(GST_BIN(pipe), queue, compositor, glupload, glcolorconvert, glsinkbin, nullptr); + gst_element_link_many(queue, compositor, glupload, glcolorconvert, glsinkbin, nullptr); + gst_element_sync_state_with_parent(queue); + gst_element_sync_state_with_parent(compositor); + gst_element_sync_state_with_parent(glupload); + gst_element_sync_state_with_parent(glcolorconvert); + gst_element_sync_state_with_parent(glsinkbin); + return queue; +} + +void linkNewPad(GstElement *decodebin, GstPad *newpad, gpointer user_data) +{ + qCDebug(voip) << "Linking New Pad"; + qCWarning(voip) << "Resolution:" << getResolution(newpad); + auto instance = static_cast(user_data); + Q_ASSERT(instance); + GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); + GstCaps *sinkcaps = gst_pad_get_current_caps(sinkpad); + const GstStructure *structure = gst_caps_get_structure(sinkcaps, 0); + + gchar *mediaType = nullptr; + guint ssrc = 0; + gst_structure_get(structure, "media", G_TYPE_STRING, &mediaType, "ssrc", G_TYPE_UINT, &ssrc, nullptr); + gst_caps_unref(sinkcaps); + gst_object_unref(sinkpad); + + GstElement *queue = nullptr; + if (!strcmp(mediaType, "audio")) { + qCDebug(voip) << "Receiving audio stream"; + //_haveAudioStream = true; + queue = newAudioSinkChain(instance->m_pipe); + } else if (!strcmp(mediaType, "video")) { + qCDebug(voip) << "Receiving video stream"; + instance->setIsReceivingVideo(true); + queue = newVideoSinkChain(instance->m_pipe, CallManager::instance().m_item); + _keyFrameRequestData.statsField = QStringLiteral("rtp-inbound-stream-stats_") + QString::number(ssrc); + } else { + g_free(mediaType); + qCWarning(voip) << "Unknown pad type:" << GST_PAD_NAME(newpad); + return; + } + Q_ASSERT(queue); + GstPad *queuepad = gst_element_get_static_pad(queue, "sink"); + if (queuepad) { + if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad))) { + qCCritical(voip) << "Unable to link new pad"; + // TODO: Error handling + } else { + // if (instance->calltype() != CallSession::VIDEO || (_haveAudioStream && (_haveVideoStream || session->isRemoteVideoReceiveOnly()))) { + instance->setState(CallSession::CONNECTED); + // if (_haveVideoStream) { + // _keyFrameRequestData.pipe = pipe; + // _keyFrameRequestData.decodebin = decodebin; + // _keyFrameRequestData.timerid = g_timeout_add_seconds(3, testPacketLoss, nullptr); + // } + // if (session->isRemoteVideoReceiveOnly()) { + // addLocalVideo(pipe); + // } + //} + } + gst_object_unref(queuepad); + } + g_free(mediaType); +} + +void setWaitForKeyFrame(GstBin *decodebin G_GNUC_UNUSED, GstElement *element, gpointer G_GNUC_UNUSED) +{ + if (!strcmp(gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(gst_element_get_factory(element))), "rtpvp8depay")) { + g_object_set(element, "wait-for-keyframe", TRUE, nullptr); + } +} + +void addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, gpointer user_data) +{ + if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC) { + return; + } + + auto instance = static_cast(user_data); + Q_ASSERT(instance); + + qCDebug(voip) << "Receiving incoming stream"; + GstElement *decodebin = gst_element_factory_make("decodebin", nullptr); + // Investigate hardware, see nheko source + g_object_set(decodebin, "force-sw-decoders", TRUE, nullptr); + g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), instance); + g_signal_connect(decodebin, "element-added", G_CALLBACK(setWaitForKeyFrame), nullptr); + gst_bin_add(GST_BIN(instance->m_pipe), decodebin); + gst_element_sync_state_with_parent(decodebin); + GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink"); + if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad))) { + // TODO: Error handling + qCWarning(voip) << "Session: Unable to link decodebin"; + } + gst_object_unref(sinkpad); +} + +void iceGatheringStateChanged(GstElement *webrtc, GParamSpec *pspec G_GNUC_UNUSED, gpointer user_data) +{ + auto instance = static_cast(user_data); + Q_ASSERT(instance); + + GstWebRTCICEGatheringState newState; + g_object_get(webrtc, "ice-gathering-state", &newState, nullptr); + if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) { + qCDebug(voip) << "GstWebRTCICEGatheringState -> Complete"; + if (instance->m_isOffering) { + Q_EMIT instance->offerCreated(instance->m_localSdp, instance->m_localCandidates); + instance->setState(CallSession::OFFERSENT); + } else { + Q_EMIT instance->answerCreated(instance->m_localSdp, instance->m_localCandidates); + instance->setState(CallSession::ANSWERSENT); + } + } +} + +gboolean newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) +{ + CallSession *instance = static_cast(user_data); + Q_ASSERT(instance); + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_EOS: + qCDebug(voip) << "End of stream"; + // TODO: Error handling + instance->end(); + break; + case GST_MESSAGE_ERROR: + GError *error; + gchar *debug; + gst_message_parse_error(msg, &error, &debug); + qCWarning(voip) << "Error from element:" << GST_OBJECT_NAME(msg->src) << error->message; + // TODO: Error handling + g_clear_error(&error); + g_free(debug); + instance->end(); + break; + default: + break; + } + return TRUE; +} + +CallSession::CallSession(QObject *parent) + : QObject(parent) +{ +} + +void CallSession::acceptAnswer(const QString &sdp, const QVector &candidates) +{ + qCDebug(voip) << "Accepting Answer"; + if (m_state != CallSession::OFFERSENT) { + return; + } + + GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER); + if (!answer) { + end(); + return; + } + + acceptCandidates(candidates); + + int unused; + // if (!getMediaAttributes(answer->sdp, "video", "vp8", unused, m_isRemoteVideoReceiveOnly, m_isRemoteVideoSendOnly)) { + // m_isRemoteVideoReceiveOnly = true; + // } // TODO + g_signal_emit_by_name(m_webrtc, "set-remote-description", answer, nullptr); + return; +} + +void CallSession::acceptOffer(bool sendVideo, const QString &sdp, const QVector remoteCandidates) +{ + Q_ASSERT(!sdp.isEmpty()); + Q_ASSERT(!remoteCandidates.isEmpty()); + qCDebug(voip) << "Session: Accepting offer"; + if (m_state != CallSession::DISCONNECTED) { + return; + } + m_isOffering = false; + + GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER); + if (!offer) { + qCritical() << "Session: Offer is not an offer"; + return; + } + + int opusPayloadType; + bool receiveOnly; + bool sendOnly; + if (getMediaAttributes(offer->sdp, "audio", "opus", opusPayloadType, receiveOnly, sendOnly)) { + if (opusPayloadType == -1) { + qCritical() << "Session: No OPUS in offer"; + gst_webrtc_session_description_free(offer); + return; + } + } else { + qCritical() << "Session: No audio in offer"; + gst_webrtc_session_description_free(offer); + return; + } + + int vp8PayloadType; + // bool isVideo = getMediaAttributes(offer->sdp, "video", "vp8", vp8PayloadType, m_isRemoteVideoReceiveOnly, m_isRemoteVideoSendOnly); // TODO + // if (isVideo && vp8PayloadType == -1) { + // qCritical() << "Session: No VP8 in offer"; + // gst_webrtc_session_description_free(offer); + // return; + // } + if (!startPipeline(sendVideo)) { + gst_webrtc_session_description_free(offer); + qCritical() << "Failed to start pipeline"; + return; + } + QThread::msleep(1000); // ? + + acceptCandidates(remoteCandidates); + + GstPromise *promise = gst_promise_new_with_change_func(createAnswer, this, nullptr); + g_signal_emit_by_name(m_webrtc, "set-remote-description", offer, promise); + gst_webrtc_session_description_free(offer); +} + +void CallSession::createCall(bool sendVideo) +{ + qCDebug(voip) << "Creating call"; + m_isOffering = true; + startPipeline(sendVideo); +} + +bool CallSession::startPipeline(bool sendVideo) +{ + qDebug() << "Session: Starting Pipeline"; + if (m_state != CallSession::DISCONNECTED) { + return false; + } + m_state = CallSession::INITIATING; + Q_EMIT stateChanged(); + + if (!createPipeline(sendVideo)) { + qCritical() << "Failed to create pipeline"; + end(); + return false; + } + m_webrtc = gst_bin_get_by_name(GST_BIN(m_pipe), "webrtcbin"); + Q_ASSERT(m_webrtc); + if (false /*TODO: CHECK USE STUN*/) { + qDebug() << "Session: Setting STUN server:" << STUN_SERVER; + g_object_set(m_webrtc, "stun-server", STUN_SERVER, nullptr); + } + + for (const auto &uri : m_turnServers) { + qDebug() << "Session: Setting turn server:" << uri; + gboolean udata; + g_signal_emit_by_name(m_webrtc, "add-turn-server", uri.toLatin1().data(), (gpointer)(&udata)); + } + + if (m_turnServers.empty()) { + qDebug() << "Session: No TURN servers provided"; + } + + if (m_isOffering) { + qDebug() << "offering"; + g_signal_connect(m_webrtc, "on-negotiation-needed", G_CALLBACK(::createOffer), this); + } + + g_signal_connect(m_webrtc, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), this); + g_signal_connect(m_webrtc, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), this); + + gst_element_set_state(m_pipe, GST_STATE_READY); + g_signal_connect(m_webrtc, "pad-added", G_CALLBACK(addDecodeBin), this); + + g_signal_connect(m_webrtc, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), this); + gst_object_unref(m_webrtc); + + qDebug() << "Random debug statement #1"; + + GstStateChangeReturn ret = gst_element_set_state(m_pipe, GST_STATE_PLAYING); + if (ret == GST_STATE_CHANGE_FAILURE) { + // TODO: Error handling - unable to start pipeline + qDebug() << "unable to start pipeline"; + end(); + return false; + } + + GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipe)); + m_busWatchId = gst_bus_add_watch(bus, newBusMessage, this); + gst_object_unref(bus); + + m_state = CallSession::INITIATED; + Q_EMIT stateChanged(); + + return true; +} + +void CallSession::end() +{ + qDebug() << "Ending Call"; + if (m_pipe) { + gst_element_set_state(m_pipe, GST_STATE_NULL); + gst_object_unref(m_pipe); + m_pipe = nullptr; + if (m_busWatchId) { + g_source_remove(m_busWatchId); + m_busWatchId = 0; + } + } + if (m_state != CallSession::DISCONNECTED) { + m_state = CallSession::DISCONNECTED; + Q_EMIT stateChanged(); + } +} + +bool CallSession::createPipeline(bool sendVideo) +{ + qCDebug(voip) << "Creating Pipeline"; + GstDevice *device = AudioSources::instance().currentDevice(); + if (!device) { + return false; + } + GstElement *source = gst_device_create_element(device, nullptr); + GstElement *volume = gst_element_factory_make("volume", "srclevel"); + GstElement *convert = gst_element_factory_make("audioconvert", nullptr); + GstElement *resample = gst_element_factory_make("audioresample", nullptr); + GstElement *queue1 = gst_element_factory_make("queue", nullptr); + GstElement *opusenc = gst_element_factory_make("opusenc", nullptr); + GstElement *rtp = gst_element_factory_make("rtpopuspay", nullptr); + GstElement *queue2 = gst_element_factory_make("queue", nullptr); + GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr); + Q_ASSERT(source); + Q_ASSERT(volume); + Q_ASSERT(convert); + Q_ASSERT(resample); + Q_ASSERT(queue1); + Q_ASSERT(opusenc); + Q_ASSERT(queue2); + Q_ASSERT(capsfilter); + + GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp", + "media", + G_TYPE_STRING, + "audio", + "encoding-name", + G_TYPE_STRING, + "OPUS", + "payload", + G_TYPE_INT, + OPUS_PAYLOAD_TYPE, + nullptr); + Q_ASSERT(rtpcaps); + g_object_set(capsfilter, "caps", rtpcaps, nullptr); + gst_caps_unref(rtpcaps); + + GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin"); + Q_ASSERT(webrtcbin); + g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr); + + m_pipe = gst_pipeline_new(nullptr); + gst_bin_add_many(GST_BIN(m_pipe), source, volume, convert, resample, queue1, opusenc, rtp, queue2, capsfilter, webrtcbin, nullptr); + + if (!gst_element_link_many(source, volume, convert, resample, queue1, opusenc, rtp, queue2, capsfilter, webrtcbin, nullptr)) { + qCCritical(voip) << "Failed to link pipeline"; + // TODO propagate errors up and end call + return false; + } + + return sendVideo ? addVideoPipeline() : true; +} + +bool CallSession::addVideoPipeline() +{ + qWarning() << "Session: Adding Video Pipeline"; + GstElement *camerafilter = nullptr; + GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr); + GstElement *tee = gst_element_factory_make("tee", "videosrctee"); + gst_bin_add_many(GST_BIN(m_pipe), videoconvert, tee, nullptr); + QPair resolution; + QPair frameRate; + auto device = VideoSources::instance().currentDevice(); + auto deviceCaps = device->caps[VideoSources::instance().capsIndex()]; + setIsSendingVideo(true); + int width = deviceCaps.width; + int height = deviceCaps.height; + int framerate = deviceCaps.framerates.back(); + if (!device) { + return false; + } + GstElement *camera = gst_device_create_element(device->device, nullptr); + GstCaps *caps = + gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, framerate, 1, nullptr); + camerafilter = gst_element_factory_make("capsfilter", "camerafilter"); + g_object_set(camerafilter, "caps", caps, nullptr); + gst_caps_unref(caps); + + gst_bin_add_many(GST_BIN(m_pipe), camera, camerafilter, nullptr); + if (!gst_element_link_many(camera, videoconvert, camerafilter, nullptr)) { + qCWarning(voip) << "Failed to link camera elements"; + // TODO: Error handling + return false; + } + if (!gst_element_link(camerafilter, tee)) { + qCWarning(voip) << "Failed to link camerafilter -> tee"; + // TODO: Error handling + return false; + } + + GstElement *queue = gst_element_factory_make("queue", nullptr); + GstElement *vp8enc = gst_element_factory_make("vp8enc", nullptr); + g_object_set(vp8enc, "deadline", 1, nullptr); + g_object_set(vp8enc, "error-resilient", 1, nullptr); + GstElement *rtpvp8pay = gst_element_factory_make("rtpvp8pay", nullptr); + GstElement *rtpqueue = gst_element_factory_make("queue", nullptr); + GstElement *rtpcapsfilter = gst_element_factory_make("capsfilter", nullptr); + GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp", + "media", + G_TYPE_STRING, + "video", + "encoding-name", + G_TYPE_STRING, + "VP8", + "payload", + G_TYPE_INT, + VP8_PAYLOAD_TYPE, + nullptr); + g_object_set(rtpcapsfilter, "caps", rtpcaps, nullptr); + gst_caps_unref(rtpcaps); + + gst_bin_add_many(GST_BIN(m_pipe), queue, vp8enc, rtpvp8pay, rtpqueue, rtpcapsfilter, nullptr); + + GstElement *webrtcbin = gst_bin_get_by_name(GST_BIN(m_pipe), "webrtcbin"); + if (!gst_element_link_many(tee, queue, vp8enc, rtpvp8pay, rtpqueue, rtpcapsfilter, webrtcbin, nullptr)) { + qCCritical(voip) << "WebRTC: failed to link rtp video elements"; + gst_object_unref(webrtcbin); + return false; + } + + gst_object_unref(webrtcbin); + return true; +} + +void CallSession::setTurnServers(QStringList servers) +{ + qDebug() << "Session: Setting Turn Servers"; + m_turnServers = servers; +} + +QQuickItem *CallSession::getVideoItem() const +{ + return m_videoItem; +} + +void CallSession::acceptCandidates(const QVector &candidates) +{ + Q_ASSERT(!candidates.isEmpty()); + Q_ASSERT(m_webrtc); + qDebug() << "Session: Accepting ICE Candidates"; + for (const auto &c : candidates) { + qDebug() << "Remote candidate:" << c.candidate << c.sdpMLineIndex; + g_signal_emit_by_name(m_webrtc, "add-ice-candidate", c.sdpMLineIndex, c.candidate.toLatin1().data()); + } +} + +bool CallSession::havePlugins(bool video, QString &errorString) const +{ + GstRegistry *registry = gst_registry_get(); + if (video) { + const QVector videoPlugins = { + "compositor", + "opengl", + "qmlgl", + "rtp", + "videoconvert", + "vpx", + }; + bool hasPlugin = true; + QStringList missingPlugins; + for (auto i = 0; i < videoPlugins.size(); i++) { + auto plugin = gst_registry_find_plugin(registry, videoPlugins[i]); + if (!plugin) { + hasPlugin = false; + missingPlugins << QString(videoPlugins[i]); + } + gst_object_unref(plugin); + } + if (!hasPlugin) { + errorString = i18np("Missing GStreamer plugin: %2", "Missing GStreamer plugins: %2", missingPlugins.count(), missingPlugins.join(i18nc("List separator", ", "))); + return false; + } + } + + const QVector audioPlugins = { + "audioconvert", + "audioresample", + "autodetect", + "dtls", + "nice", + "opus", + "playback", + "rtpmanager", + "srtp", + "volume", + "webrtc", + }; + + bool hasPlugin = true; + QStringList missingPlugins; + for (auto i = 0; i < audioPlugins.size(); i++) { + auto plugin = gst_registry_find_plugin(registry, audioPlugins[i]); + if (!plugin) { + hasPlugin = false; + missingPlugins << QString(audioPlugins[i]); + } + gst_object_unref(plugin); + } + if (!hasPlugin) { + errorString = i18np("Missing GStreamer plugin: %2", "Missing GStreamer plugins: %2", missingPlugins.count(), missingPlugins.join(i18nc("List separator", ", "))); + return false; + } + + qCInfo(voip) << "GStreamer: All plugins installed"; + return true; +} + +void CallSession::setMuted(bool muted) +{ + const auto srclevel = gst_bin_get_by_name(GST_BIN(m_pipe), "srclevel"); + g_object_set(srclevel, "mute", muted, nullptr); + gst_object_unref(srclevel); + Q_EMIT mutedChanged(); +} + +bool CallSession::muted() const +{ + if (m_state < CallSession::CONNECTING) { + return false; + } + if (!m_pipe) { + return false; + } + const auto srclevel = gst_bin_get_by_name(GST_BIN(m_pipe), "srclevel"); + bool muted; + if (!srclevel) { + return false; + } + g_object_get(srclevel, "mute", &muted, nullptr); + // gst_object_unref(srclevel); //TODO why does this crash? + return muted; +} + +void CallSession::setIsSendingVideo(bool isSendingVideo) +{ + m_isSendingVideo = isSendingVideo; + Q_EMIT isSendingVideoChanged(); +} + +bool CallSession::isSendingVideo() const +{ + return m_isSendingVideo; +} + +CallSession *CallSession::acceptCall(bool sendVideo, const QString &sdp, const QVector &candidates, const QStringList &turnUris, QObject *parent) +{ + auto instance = new CallSession(parent); + instance->setTurnServers(turnUris); + instance->acceptOffer(sendVideo, sdp, candidates); + return instance; +} + +CallSession *CallSession::startCall(bool sendVideo, const QStringList &turnUris, QObject *parent) +{ + auto instance = new CallSession(parent); + + instance->setTurnServers(turnUris); + instance->createCall(sendVideo); + return instance; +} + +CallSession::State CallSession::state() const +{ + return m_state; +} + +void CallSession::setState(CallSession::State state) +{ + qCDebug(voip) << "Setting state" << state; + m_state = state; + Q_EMIT stateChanged(); +} + +bool CallSession::isReceivingVideo() const +{ + return m_isReceivingVideo; +} + +void CallSession::setIsReceivingVideo(bool isReceivingVideo) +{ + m_isReceivingVideo = isReceivingVideo; + Q_EMIT isReceivingVideoChanged(); +} diff --git a/src/call/callsession.h b/src/call/callsession.h new file mode 100644 index 0000000000000000000000000000000000000000..6fa18b876cf5e0f6d497c73d7f7764f028472598 --- /dev/null +++ b/src/call/callsession.h @@ -0,0 +1,113 @@ +// SPDX-FileCopyrightText: 2021 Nheko Contributors +// SPDX-FileCopyrightText: 2021 Carl Schwan +// SPDX-FileCopyrightText: 2021-2022 Tobias Fella +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#pragma once + +#include +#include +#include +#include +#include + +#include + +#define OPUS_PAYLOAD_TYPE 111 +#define VP8_PAYLOAD_TYPE 96 + +class CallDevices; + +struct Candidate { + QString candidate; + int sdpMLineIndex; + QString sdpMid; +}; +Q_DECLARE_METATYPE(Candidate); +Q_DECLARE_METATYPE(QVector); + +class CallSession : public QObject +{ + Q_OBJECT + +public: + enum State { + DISCONNECTED, + ICEFAILED, + INITIATING, + INITIATED, + OFFERSENT, + ANSWERSENT, + CONNECTING, + CONNECTED, + }; + Q_ENUM(State); + + Q_PROPERTY(CallSession::State state READ state NOTIFY stateChanged) + Q_PROPERTY(bool isSendingVideo READ isSendingVideo NOTIFY isSendingVideoChanged) + Q_PROPERTY(bool isReceivingVideo READ isReceivingVideo NOTIFY isReceivingVideoChanged) + Q_PROPERTY(bool muted READ muted WRITE setMuted NOTIFY mutedChanged) + + static CallSession *startCall(bool sendVideo, const QStringList &turnUris, QObject *parent = nullptr); + static CallSession * + acceptCall(bool sendVideo, const QString &sdp, const QVector &candidates, const QStringList &turnUris, QObject *parent = nullptr); + + void acceptAnswer(const QString &sdp, const QVector &candidates); + + void end(); + + void setTurnServers(QStringList servers); + QQuickItem *getVideoItem() const; + + bool havePlugins(bool video, QString &errorString) const; + + CallSession::State state() const; + void setState(CallSession::State state); + + QVector m_localCandidates; + bool m_haveAudioStream = false; + bool m_haveVideoStream = false; + QString m_localSdp; + + void setMuted(bool muted); + bool muted() const; + GstElement *m_pipe = nullptr; + + void setIsSendingVideo(bool video); + bool isSendingVideo() const; + + void setIsReceivingVideo(bool isReceivingVideo); + bool isReceivingVideo() const; + + GstElement *m_webrtc = nullptr; + bool m_isOffering = false; + +Q_SIGNALS: + void stateChanged(); + void offerCreated(const QString &sdp, const QVector &candidates); + void answerCreated(const QString &sdp, const QVector &candidates); + + void isSendingVideoChanged(); + void isReceivingVideoChanged(); + + void mutedChanged(); + +private: + void acceptOffer(bool sendVideo, const QString &sdp, const QVector remoteCandidates); + void createCall(bool sendVideo); + + void acceptCandidates(const QVector &candidates); + + CallSession::State m_state = CallSession::DISCONNECTED; + QQuickItem *m_videoItem; + unsigned int m_busWatchId = 0; + bool m_isSendingVideo = false; + bool m_isReceivingVideo = false; + QStringList m_turnServers; + + bool startPipeline(bool sendVideo); + bool createPipeline(bool sendVideo); + bool addVideoPipeline(); + CallSession(QObject *parent = nullptr); +}; diff --git a/src/call/devicemonitor.cpp b/src/call/devicemonitor.cpp new file mode 100644 index 0000000000000000000000000000000000000000..07a2d8962e929dd9bcfbfcae2b013bb4915d70e1 --- /dev/null +++ b/src/call/devicemonitor.cpp @@ -0,0 +1,146 @@ +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: LGPL-2.0-or-later + +#include "devicemonitor.h" +#include + +static gboolean deviceCallback(GstBus *bus, GstMessage *message, gpointer user_data) +{ + Q_UNUSED(bus); + auto monitor = static_cast(user_data); + return monitor->callback(message); +} + +DeviceMonitor::DeviceMonitor() + : QObject() +{ + QTimer::singleShot(0, this, &DeviceMonitor::init); +} + +void DeviceMonitor::init() +{ + if (m_monitor) { + return; + } + m_monitor = gst_device_monitor_new(); + GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw"); + gst_device_monitor_add_filter(m_monitor, "Audio/Source", caps); + gst_device_monitor_add_filter(m_monitor, "Audio/Duplex", caps); + + gst_caps_unref(caps); + caps = gst_caps_new_empty_simple("video/x-raw"); + gst_device_monitor_add_filter(m_monitor, "Video/Source", caps); + gst_device_monitor_add_filter(m_monitor, "Video/Duplex", caps); + gst_caps_unref(caps); + + GstBus *bus = gst_device_monitor_get_bus(m_monitor); + gst_bus_add_watch(bus, deviceCallback, this); + gst_object_unref(bus); + + if (!gst_device_monitor_start(m_monitor)) { + qWarning() << "Failed to start device monitor"; + } +} + +QVector DeviceMonitor::audioSources() const +{ + return m_audioSources; +} + +QVector DeviceMonitor::videoSources() const +{ + return m_videoSources; +} + +void DeviceMonitor::handleVideoSource(GstDevice *device) +{ + VideoSource source; + auto title = gst_device_get_display_name(device); + source.title = QString(title); + g_free(title); + source.device = device; + + auto caps = gst_device_get_caps(device); + auto size = gst_caps_get_size(caps); + for (size_t i = 0; i < size; i++) { + VideoCap videoCap; + GstStructure *cap = gst_caps_get_structure(caps, i); + const gchar *name = gst_structure_get_name(cap); + if (strcmp(name, "video/x-raw")) { + // TODO g_free(name); + continue; + } + // TODO g_free(name); + gst_structure_get(cap, "width", G_TYPE_INT, &videoCap.width, "height", G_TYPE_INT, &videoCap.height, nullptr); + const auto framerate = gst_structure_get_value(cap, "framerate"); + if (GST_VALUE_HOLDS_FRACTION(framerate)) { + auto numerator = gst_value_get_fraction_numerator(framerate); + auto denominator = gst_value_get_fraction_denominator(framerate); + videoCap.framerates += (float)numerator / denominator; + } + // unref cap? + source.caps += videoCap; + } + m_videoSources += source; + Q_EMIT videoSourceAdded(); +} + +void DeviceMonitor::handleAudioSource(GstDevice *device) +{ + AudioSource source; + auto title = gst_device_get_display_name(device); + source.title = QString(title); + g_free(title); + source.device = device; + m_audioSources += source; + Q_EMIT audioSourceAdded(); +} + +bool DeviceMonitor::callback(GstMessage *message) +{ + GstDevice *device; + switch (GST_MESSAGE_TYPE(message)) { + case GST_MESSAGE_DEVICE_ADDED: { + gst_message_parse_device_added(message, &device); + auto name = gst_device_get_display_name(device); + auto deviceClass = QString(gst_device_get_device_class(device)); + if (deviceClass == QStringLiteral("Video/Source")) { + handleVideoSource(device); + + } else if (deviceClass == QStringLiteral("Audio/Source")) { + handleAudioSource(device); + } + g_free(name); + gst_object_unref(device); + break; + } + case GST_MESSAGE_DEVICE_REMOVED: { + gst_message_parse_device_removed(message, &device); + auto name = gst_device_get_display_name(device); + auto deviceClass = QString(gst_device_get_device_class(device)); + if (deviceClass == QStringLiteral("Video/Source")) { + m_videoSources.erase(std::remove_if(m_videoSources.begin(), + m_videoSources.end(), + [name](VideoSource d) { + return d.title == QString(name); + }), + m_videoSources.end()); + Q_EMIT videoSourceRemoved(); + } else if (deviceClass == QStringLiteral("Audio/Source")) { + m_audioSources.erase(std::remove_if(m_audioSources.begin(), + m_audioSources.end(), + [name](AudioSource d) { + return d.title == QString(name); + }), + m_audioSources.end()); + Q_EMIT audioSourceRemoved(); + } + g_free(name); + gst_object_unref(device); + break; + } + default: + break; + } + return G_SOURCE_CONTINUE; +} diff --git a/src/call/devicemonitor.h b/src/call/devicemonitor.h new file mode 100644 index 0000000000000000000000000000000000000000..661f2257dcf4c6cada1794fd46b3e8de8251bf77 --- /dev/null +++ b/src/call/devicemonitor.h @@ -0,0 +1,58 @@ +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: LGPL-2.0-or-later + +#pragma once + +#include +#include +#include + +#include + +struct AudioSource { + QString title; + GstDevice *device; +}; +struct VideoCap { + int width; + int height; + QVector framerates; +}; + +struct VideoSource { + QString title; + GstDevice *device; + QVector caps; +}; + +class DeviceMonitor : public QObject +{ + Q_OBJECT + +public: + static DeviceMonitor &instance() + { + static DeviceMonitor _instance; + return _instance; + } + + QVector audioSources() const; + QVector videoSources() const; + bool callback(GstMessage *message); + void init(); + +Q_SIGNALS: + void videoSourceAdded(); + void audioSourceAdded(); + + void videoSourceRemoved(); + void audioSourceRemoved(); + +private: + DeviceMonitor(); + GstDeviceMonitor *m_monitor = nullptr; + QVector m_audioSources; + QVector m_videoSources; + void handleVideoSource(GstDevice *device); + void handleAudioSource(GstDevice *device); +}; diff --git a/src/call/videodevicesmodel.cpp b/src/call/videodevicesmodel.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ecc754dccbcbace28ad138cd5a1ad46adad68bd5 --- /dev/null +++ b/src/call/videodevicesmodel.cpp @@ -0,0 +1,197 @@ +// SPDX-FileCopyrightText: 2021 Nheko Contributors +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-FileCopyrightText: 2021 Carl Schwan +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#include "videodevicesmodel.h" + +#include "neochatconfig.h" + +#ifdef GSTREAMER_AVAILABLE +extern "C" { +#include "gst/gst.h" +} +#endif + +namespace +{ + +using Framerate = std::pair; + +std::optional getFramerate(const GValue *value) +{ + if (GST_VALUE_HOLDS_FRACTION(value)) { + gint num = gst_value_get_fraction_numerator(value); + gint den = gst_value_get_fraction_denominator(value); + return Framerate{num, den}; + } + return std::nullopt; +} + +void addFramerate(QStringList &rates, const Framerate &rate) +{ + constexpr double minimumFramerate = 1.0; + if (static_cast(rate.first) / rate.second >= minimumFramerate) + rates.push_back(rate.first + "/" + rate.second); +} + +QPair tokenise(QStringView str, QChar delim) +{ + QPair ret; + auto pos = str.indexOf(delim); + ret.first = str.left(pos).toInt(); + ret.second = str.right(str.length() - pos + 1).toInt(); + return ret; +} +} + +VideoDevicesModel::VideoDevicesModel(QObject *parent) + : QAbstractListModel(parent) +{ +} + +QVariant VideoDevicesModel::data(const QModelIndex &index, int role) const +{ + if (index.isValid()) { + return {}; + } + const auto row = index.row(); + switch (role) { + case Qt::DisplayRole: + return m_videoSources[row].name; + case DeviceRole: + return QVariant::fromValue(m_videoSources[row]); + } + return {}; +} + +int VideoDevicesModel::rowCount(const QModelIndex &) const +{ + return m_videoSources.size(); +} + +QHash VideoDevicesModel::roleNames() const +{ + QHash roles = QAbstractItemModel::roleNames(); + roles[DeviceRole] = QByteArrayLiteral("device"); + return roles; +} + +void VideoDevicesModel::addDevice(GstDevice *device) +{ + gchar *_name = gst_device_get_display_name(device); + QString name(_name); + g_free(_name); + + GstCaps *gstcaps = gst_device_get_caps(device); + + qDebug() << "CallDevices: Video device added:" << name; + + if (!gstcaps) { + qDebug() << "Unable to get caps for" << name; + return; + } + + VideoSource videoSource{name, device, {}}; + for (size_t i = 0; i < gst_caps_get_size(gstcaps); i++) { + GstStructure *structure = gst_caps_get_structure(gstcaps, i); + const gchar *_capName = gst_structure_get_name(structure); + if (!strcmp(_capName, "video/x-raw")) { + gint width, height; + if (gst_structure_get(structure, "width", G_TYPE_INT, &width, "height", G_TYPE_INT, &height, nullptr)) { + VideoSource::Caps caps; + caps.resolution = QString::number(width) + QStringLiteral("x") + QString::number(height); + QStringList framerates; + const GValue *_framerate = gst_structure_get_value(structure, "framerate"); + if (GST_VALUE_HOLDS_FRACTION(_framerate)) { + addFramerate(framerates, *getFramerate(_framerate)); + } else if (GST_VALUE_HOLDS_FRACTION_RANGE(_framerate)) { + addFramerate(framerates, *getFramerate(gst_value_get_fraction_range_min(_framerate))); + addFramerate(framerates, *getFramerate(gst_value_get_fraction_range_max(_framerate))); + } else if (GST_VALUE_HOLDS_LIST(_framerate)) { + guint nRates = gst_value_list_get_size(_framerate); + for (guint j = 0; j < nRates; j++) { + const GValue *rate = gst_value_list_get_value(_framerate, j); + if (GST_VALUE_HOLDS_FRACTION(rate)) { + addFramerate(framerates, *getFramerate(rate)); + } + } + } + caps.framerates = framerates; + videoSource.caps += caps; + } + } + } + gst_caps_unref(gstcaps); + + Q_EMIT beginInsertRows({}, m_videoSources.size(), m_videoSources.size()); + m_videoSources.append(videoSource); + Q_EMIT endInsertRows(); +} + +bool VideoDevicesModel::removeDevice(GstDevice *device, bool changed) +{ + for (int i = 0; i < m_videoSources.size(); i++) { + if (m_videoSources[i].device == device) { + beginRemoveRows(QModelIndex(), i, i); + m_videoSources.removeAt(i); + endRemoveRows(); + return true; + } + } + return false; +} + +GstDevice *VideoDevicesModel::currentDevice(QPair &resolution, QPair &framerate) const +{ + const auto config = NeoChatConfig::self(); + if (auto s = getVideoSource(config->camera()); s) { + qDebug() << "WebRTC: camera:" << config->camera(); + resolution = tokenise(config->cameraResolution(), 'x'); + framerate = tokenise(config->cameraFramerate(), '/'); + qDebug() << "WebRTC: camera resolution:" << resolution.first << 'x' << resolution.second; + qDebug() << "WebRTC: camera frame rate:" << framerate.first << '/' << framerate.second; + return s->device; + } else { + qCritical() << "WebRTC: unknown camera:" << config->camera(); + return nullptr; + } +} + +void VideoDevicesModel::setDefaultDevice() const +{ + if (NeoChatConfig::camera().isEmpty()) { + const VideoSource &camera = m_videoSources.front(); + NeoChatConfig::setCamera(camera.name); + NeoChatConfig::setCameraResolution(camera.caps.front().resolution); + NeoChatConfig::setCameraFramerate(camera.caps.front().frameraetes.front()); + } +} + +std::optional VideoDevicesModel::getVideoSource(const QString &cameraName) const +{ + for (const auto &videoSource : m_videoSources) { + if (videoSource.name == cameraName) { + return videoSource; + } + } + return std::nullopt; +} + +QStringList VideoDevicesModel::resolutions(const QString &cameraName) const +{ + QStringList ret; + if (auto s = getVideoSource(cameraName); s) { + ret.reserve(s->caps.size()); + for (const auto &c : s->caps) { + ret.push_back(c.resolution); + } + } + return ret; +} + +bool VideoDevicesModel::hasCamera() const +{ + return !m_videoSources.isEmpty(); +} diff --git a/src/call/videodevicesmodel.h b/src/call/videodevicesmodel.h new file mode 100644 index 0000000000000000000000000000000000000000..89e63d015d2f6529bde971b1fe34f6415ebc006a --- /dev/null +++ b/src/call/videodevicesmodel.h @@ -0,0 +1,54 @@ +// SPDX-FileCopyrightText: 2021 Nheko Contributors +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-FileCopyrightText: 2021 Carl Schwan +// +// SPDX-License-Identifier: GPL-3.0-or-later + +#pragma once + +#include +#include +#include + +typedef struct _GstDevice GstDevice; + +struct VideoSource { + struct Caps { + QSize resolution; + QList framerates; + }; + + QString name; + GstDevice *device; + QList caps; +}; +Q_DECLARE_METATYPE(VideoSource); + +class VideoDevicesModel : public QAbstractListModel +{ + Q_OBJECT + +public: + enum RoleNames { + DeviceRole = Qt::UserRole + 1, + }; + + VideoDevicesModel(QObject *parent = nullptr); + + int rowCount(const QModelIndex &parent = QModelIndex()) const override; + QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override; + QHash roleNames() const override; + + void addDevice(GstDevice *device); + bool removeDevice(GstDevice *device, bool changed); + + std::optional getVideoSource(const QString &cameraName) const; + QStringList resolutions(const QString &cameraName) const; + void setDefaultDevice() const; + + bool hasCamera() const; + GstDevice *currentDevice(QPair &resolution, QPair &framerate) const; + +private: + QList m_videoSources; +}; diff --git a/src/call/videosources.cpp b/src/call/videosources.cpp new file mode 100644 index 0000000000000000000000000000000000000000..01eb1b91fadb71f6a4361fcca50ab1d7270a9c4a --- /dev/null +++ b/src/call/videosources.cpp @@ -0,0 +1,142 @@ +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: LGPL-2.0-or-later + +#include "videosources.h" + +#include + +//#include "pipelinemanager.h" +#include +#include + +#include "devicemonitor.h" +#include "neochatconfig.h" + +int VideoSources::rowCount(const QModelIndex &parent) const +{ + Q_UNUSED(parent); + return DeviceMonitor::instance().videoSources().size(); +} + +QVariant VideoSources::data(const QModelIndex &index, int role) const +{ + if (index.row() >= DeviceMonitor::instance().videoSources().size()) { + return QVariant(QStringLiteral("DEADBEEF")); + } + if (role == TitleRole) { + return DeviceMonitor::instance().videoSources()[index.row()].title; + } + return QVariant(); +} + +QHash VideoSources::roleNames() const +{ + return { + {TitleRole, "title"}, + }; +} + +VideoSources::VideoSources() + : QAbstractListModel() +{ + connect(&DeviceMonitor::instance(), &DeviceMonitor::videoSourceAdded, this, [this]() { + beginResetModel(); + endResetModel(); + Q_EMIT currentIndexChanged(); + }); + connect(&DeviceMonitor::instance(), &DeviceMonitor::videoSourceRemoved, this, [this]() { + beginResetModel(); + endResetModel(); + Q_EMIT currentIndexChanged(); + }); +} + +void VideoSources::foo(int index) +{ + auto device = DeviceMonitor::instance().videoSources()[index].device; + + auto bin = gst_bin_new(nullptr); + + GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr); + // GstElement *videorate = gst_element_factory_make("videorate", nullptr); + + GstElement *filter = gst_element_factory_make("capsfilter", nullptr); + GstCaps *caps = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 1920, "height", G_TYPE_INT, 1080, "framerate", GST_TYPE_FRACTION, 5, 1, nullptr); + g_object_set(filter, "caps", caps, nullptr); + gst_caps_unref(caps); + GstElement *deviceElement = gst_device_create_element(device, nullptr); + + gst_bin_add_many(GST_BIN(bin), deviceElement, videoconvert, filter, nullptr); + gst_element_link_many(deviceElement, videoconvert, filter, nullptr); + + // GstPad *pad = gst_element_get_static_pad(filter, "src"); + GstPad *pad = gst_element_get_static_pad(filter, "src"); + auto ghostpad = gst_ghost_pad_new("src", pad); + gst_element_add_pad(bin, ghostpad); + gst_object_unref(pad); + // PipelineManager::instance().add(bin); +} + +const VideoSource *VideoSources::currentDevice() const +{ + const auto config = NeoChatConfig::self(); + const QString name = config->camera(); + for (const auto &videoSource : DeviceMonitor::instance().videoSources()) { + if (videoSource.title == name) { + qDebug() << "WebRTC: camera:" << name; + return &videoSource; + } + } + if (DeviceMonitor::instance().videoSources().length() == 0) { + return nullptr; + } + return &DeviceMonitor::instance().videoSources()[0]; +} + +void VideoSources::setCurrentIndex(int index) +{ + if (DeviceMonitor::instance().videoSources().size() == 0) { + return; + } + NeoChatConfig::setCamera(DeviceMonitor::instance().videoSources()[index].title); + NeoChatConfig::self()->save(); + + setCapsIndex(0); +} + +int VideoSources::currentIndex() const +{ + const auto config = NeoChatConfig::self(); + const QString name = config->camera(); + for (auto i = 0; i < DeviceMonitor::instance().videoSources().size(); i++) { + if (DeviceMonitor::instance().videoSources()[i].title == name) { + return i; + } + } + return 0; +} + +QStringList VideoSources::caps(int index) const +{ + if (index >= DeviceMonitor::instance().videoSources().size()) { + return QStringList(); + } + const auto &caps = DeviceMonitor::instance().videoSources()[index].caps; + QStringList strings; + for (const auto &cap : caps) { + strings += QStringLiteral("%1x%2, %3 FPS").arg(cap.width).arg(cap.height).arg(cap.framerates.back()); + } + return strings; +} + +void VideoSources::setCapsIndex(int index) +{ + NeoChatConfig::self()->setCameraCaps(index); + NeoChatConfig::self()->save(); + Q_EMIT capsIndexChanged(); +} + +int VideoSources::capsIndex() const +{ + return NeoChatConfig::self()->cameraCaps(); +} diff --git a/src/call/videosources.h b/src/call/videosources.h new file mode 100644 index 0000000000000000000000000000000000000000..00a6bac050fdb1c40d06fb03cc2827ee3b684ee5 --- /dev/null +++ b/src/call/videosources.h @@ -0,0 +1,51 @@ +// SPDX-FileCopyrightText: 2021 Tobias Fella +// SPDX-License-Identifier: LGPL-2.0-or-later + +#pragma once + +#include + +#include + +#include "devicemonitor.h" + +class VideoSources : public QAbstractListModel +{ + Q_OBJECT + Q_PROPERTY(int currentIndex READ currentIndex WRITE setCurrentIndex NOTIFY currentIndexChanged) + Q_PROPERTY(int capsIndex READ capsIndex WRITE setCapsIndex NOTIFY capsIndexChanged) +public: + enum Roles { + TitleRole = Qt::UserRole + 1, + DeviceRole, + }; + + static VideoSources &instance() + { + static VideoSources _instance; + return _instance; + } + + int rowCount(const QModelIndex &parent = QModelIndex()) const override; + QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override; + QHash roleNames() const override; + + Q_INVOKABLE void foo(int index); + + const VideoSource *currentDevice() const; + + void setCurrentIndex(int index); + int currentIndex() const; + + void setCapsIndex(int index); + int capsIndex() const; + + Q_INVOKABLE QStringList caps(int index) const; + +Q_SIGNALS: + void currentIndexChanged(); + void capsIndexChanged(); + +private: + VideoSources(); +}; diff --git a/src/controller.cpp b/src/controller.cpp index 92defd35d1fa93ebd66e3d0af1344934fd5178bf..56ec963623bf0e75069f631ecef2a4182485747f 100644 --- a/src/controller.cpp +++ b/src/controller.cpp @@ -701,3 +701,12 @@ bool Controller::hasWindowSystem() const return false; #endif } + +bool Controller::callsSupported() const +{ +#ifdef GSTREAMER_AVAILABLE + return true; +#else + return false; +#endif +} diff --git a/src/controller.h b/src/controller.h index eef1342fd88730089eda7eb5118f5e6e6f7a07fb..8249085e9d308e0573f1dc3e0f185c9a4aed3474 100644 --- a/src/controller.h +++ b/src/controller.h @@ -40,6 +40,7 @@ class Controller : public QObject Q_PROPERTY(bool supportSystemTray READ supportSystemTray CONSTANT) Q_PROPERTY(bool hasWindowSystem READ hasWindowSystem CONSTANT) Q_PROPERTY(bool isOnline READ isOnline NOTIFY isOnlineChanged) + Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT) public: static Controller &instance(); @@ -95,6 +96,8 @@ public: Q_INVOKABLE void setBlur(QQuickItem *item, bool blur); Q_INVOKABLE void raiseWindow(QWindow *window); + bool callsSupported() const; + private: explicit Controller(QObject *parent = nullptr); ~Controller() override; diff --git a/src/main.cpp b/src/main.cpp index 5d0a2649f0337d618aa94afaa48495a6c5c72a95..2617ce409be69fed188754ae0268dd2ae5c76442 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -73,10 +73,17 @@ #include "userlistmodel.h" #include "webshortcutmodel.h" #include + #ifdef HAVE_COLORSCHEME #include "colorschemer.h" #endif +#ifdef GSTREAMER_AVAILABLE +#include "call/audiosources.h" +#include "call/callmanager.h" +#include "call/videosources.h" +#endif + #ifdef HAVE_RUNNER #include "runner.h" #include @@ -211,7 +218,6 @@ int main(int argc, char *argv[]) qmlRegisterUncreatableType("org.kde.neochat", 1, 0, "RoomMessageEvent", "ENUM"); qmlRegisterUncreatableType("org.kde.neochat", 1, 0, "NeoChatRoomType", "ENUM"); qmlRegisterUncreatableType("org.kde.neochat", 1, 0, "UserType", "ENUM"); - qRegisterMetaType("User*"); qRegisterMetaType("const User*"); qRegisterMetaType("const Quotient::User*"); @@ -223,6 +229,13 @@ int main(int argc, char *argv[]) qRegisterMetaType("GetRoomEventsJob*"); qRegisterMetaType("QMimeType"); +#ifdef GSTREAMER_AVAILABLE + qmlRegisterSingletonInstance("org.kde.neochat", 1, 0, "AudioSources", &AudioSources::instance()); + qmlRegisterSingletonInstance("org.kde.neochat", 1, 0, "VideoSources", &VideoSources::instance()); + qmlRegisterSingletonInstance("org.kde.neochat", 1, 0, "CallManager", &CallManager::instance()); + qmlRegisterUncreatableType("org.kde.neochat", 1, 0, "CallSession", "ENUM"); +#endif + #ifdef HAVE_WINDOWSYSTEM qmlRegisterSingletonType("org.kde.kwindowsystem.private", 1, 0, "KWindowSystem", [](QQmlEngine *, QJSEngine *) -> QObject * { return KWindowSystem::self(); diff --git a/src/messageeventmodel.cpp b/src/messageeventmodel.cpp index c8b707b95c0d53e31527e64e8b320a1a3fd68939..876c9cd6b635d6c8a1ba488a6d794427e6f5cda0 100644 --- a/src/messageeventmodel.cpp +++ b/src/messageeventmodel.cpp @@ -478,6 +478,9 @@ QVariant MessageEventModel::data(const QModelIndex &idx, int role) const return "message"; } + if (evt.matrixType() == "m.call.invite") { + return "call-invite"; + } if (is(evt)) { return "sticker"; } diff --git a/src/neochatconfig.kcfg b/src/neochatconfig.kcfg index 1b0fe52a2e8a56a49d2cedda5c31100b7924ceb7..57724e0210ee3d30acd0dc66d3f8619d85d8b593 100644 --- a/src/neochatconfig.kcfg +++ b/src/neochatconfig.kcfg @@ -91,5 +91,19 @@ true + + + + + + + + + + + + + + diff --git a/src/neochatroom.cpp b/src/neochatroom.cpp index 08fcd5e53488810551f1a5378a80a4753352b440..79219f1ff6c1f70041597a98e35fc60a1227f78d 100644 --- a/src/neochatroom.cpp +++ b/src/neochatroom.cpp @@ -15,6 +15,9 @@ #include #include +#ifdef GSTREAMER_AVAILABLE +#include "call/callmanager.h" +#endif #include "connection.h" #include "csapi/account-data.h" #include "csapi/content-repo.h" @@ -73,6 +76,11 @@ NeoChatRoom::NeoChatRoom(Connection *connection, QString roomId, JoinState joinS } NotificationsManager::instance().postInviteNotification(this, htmlSafeDisplayName(), htmlSafeMemberName(senderId), avatar_image); }); +#ifdef GSTREAMER_AVAILABLE + connect(this, &Room::callEvent, this, [=](Room *room, const RoomEvent *event) { + CallManager::instance().handleCallEvent(static_cast(room), event); + }); +#endif } void NeoChatRoom::uploadFile(const QUrl &url, const QString &body) diff --git a/src/notificationsmanager.cpp b/src/notificationsmanager.cpp index 382a802529e6718a8d94c2b334320f6a6f609529..ed6cd6b6f26c37fdd78b3d4c8a4e0879a8cc3e51 100644 --- a/src/notificationsmanager.cpp +++ b/src/notificationsmanager.cpp @@ -16,6 +16,10 @@ #endif #include +#ifdef GSTREAMER_AVAILABLE +#include "call/callmanager.h" +#endif + #include "controller.h" #include "neochatconfig.h" #include "roommanager.h" @@ -124,3 +128,39 @@ void NotificationsManager::clearInvitationNotification(const QString &roomId) m_invitations[roomId]->close(); } } + +#ifdef GSTREAMER_AVAILABLE +void NotificationsManager::postCallInviteNotification(NeoChatRoom *room, const QString &roomName, const QString &sender, const QImage &icon, bool video) +{ + if (!NeoChatConfig::self()->showNotifications()) { + return; + } + + QPixmap img; + img.convertFromImage(icon); + KNotification *notification = new KNotification("message"); + + if (sender == roomName) { + notification->setTitle(sender); + } else { + notification->setTitle(i18n("%1 (%2)", sender, roomName)); + } + + notification->setText(video ? i18n("%1 is inviting you to a video call", sender) : i18n("%1 is inviting you to a voice call", sender)); + notification->setPixmap(img); + notification->setDefaultAction(i18n("Open NeoChat in this room")); + connect(notification, &KNotification::defaultActivated, this, [=]() { + RoomManager::instance().enterRoom(room); + Q_EMIT Controller::instance().showWindow(); + }); + notification->setActions({i18n("Accept"), i18n("Decline")}); + connect(notification, &KNotification::action1Activated, this, [=]() { + CallManager::instance().acceptCall(); + }); + connect(notification, &KNotification::action2Activated, this, [=]() { + CallManager::instance().hangupCall(); + }); + notification->sendEvent(); + m_notifications.insert(room->id(), notification); +} +#endif diff --git a/src/notificationsmanager.h b/src/notificationsmanager.h index d3c8611081b01030266a3895272a0b9087827a7e..7ec9fb6925de20b1d8bd7ad97b27b20bf636edce 100644 --- a/src/notificationsmanager.h +++ b/src/notificationsmanager.h @@ -22,6 +22,7 @@ public: Q_INVOKABLE void postNotification(NeoChatRoom *room, const QString &sender, const QString &text, const QImage &icon, const QString &replyEventId, bool canReply); void postInviteNotification(NeoChatRoom *room, const QString &title, const QString &sender, const QImage &icon); + void postCallInviteNotification(NeoChatRoom *room, const QString &roomName, const QString &sender, const QImage &icon, bool video); void clearInvitationNotification(const QString &roomId); diff --git a/src/roomlistmodel.cpp b/src/roomlistmodel.cpp index cf8108daca483e6831ed1c7d9ff05dfc917b738a..e5838a46cce16b1afaed5aa06dee4dfa9ce03bbd 100644 --- a/src/roomlistmodel.cpp +++ b/src/roomlistmodel.cpp @@ -235,12 +235,23 @@ void RoomListModel::handleNotifications() } else { avatar_image = room->avatar(128); } - NotificationsManager::instance().postNotification(dynamic_cast(room), - sender->displayname(room), - notification["event"].toObject()["content"].toObject()["body"].toString(), - avatar_image, - notification["event"].toObject()["event_id"].toString(), - true); + if (notification["event"]["type"].toString() == QStringLiteral("m.call.invite")) { +#ifdef GSTREAMER_AVAILABLE + NotificationsManager::instance().postCallInviteNotification( + dynamic_cast(room), + room->displayName(), + sender->displayname(room), + avatar_image, + notification["event"]["content"]["offer"]["sdp"].toString().contains(QStringLiteral("video"))); +#endif + } else { + NotificationsManager::instance().postNotification(dynamic_cast(room), + sender->displayname(room), + notification["event"].toObject()["content"].toObject()["body"].toString(), + avatar_image, + notification["event"].toObject()["event_id"].toString(), + true); + } } } });