Nicolas Werner 1 year ago
parent c25d0c6b2f
commit de8522a185
No known key found for this signature in database
GPG Key ID: C8D75E610773F2D9
  1. 2
      src/GridImagePackModel.cpp
  2. 4
      src/JdenticonProvider.h
  3. 5
      src/MainWindow.cpp
  4. 2
      src/MatrixClient.cpp
  5. 4
      src/MxcImageProvider.h
  6. 2
      src/SingleImagePackModel.cpp
  7. 14
      src/main.cpp
  8. 1
      src/timeline/CommunitiesModel.cpp
  9. 4
      src/timeline/EventStore.cpp
  10. 6
      src/timeline/EventStore.h
  11. 109
      src/timeline/InputBar.cpp
  12. 1
      src/timeline/RoomlistModel.cpp
  13. 1
      src/timeline/TimelineModel.cpp
  14. 22
      src/ui/MxcMediaProxy.cpp
  15. 4
      src/ui/MxcMediaProxy.h
  16. 21
      src/ui/NhekoGlobalObject.cpp
  17. 1
      src/ui/Theme.cpp
  18. 10
      src/voip/CallManager.cpp
  19. 1
      src/voip/ScreenCastPortal.cpp
  20. 1
      src/voip/WebRTCSession.cpp

@ -13,7 +13,6 @@
#include "Cache_p.h"
#include "emoji/Provider.h"
QString
emoji::categoryToName(emoji::Emoji::Category cat)
{
@ -69,7 +68,6 @@ GridImagePackModel::GridImagePackModel(const std::string &roomId, bool stickers,
, room_id(roomId)
, columns(stickers ? 3 : 7)
{
if (!stickers) {
for (const auto &category : {
emoji::Emoji::Category::People,

@ -52,9 +52,7 @@ public:
QImage m_pixmap;
};
class JdenticonProvider
:
public QQuickAsyncImageProvider
class JdenticonProvider : public QQuickAsyncImageProvider
{
Q_OBJECT

@ -5,9 +5,9 @@
#include <QApplication>
#include <QMessageBox>
#include <mtx/events/collections.hpp>
#include <mtx/requests.hpp>
#include <mtx/responses/login.hpp>
#include <mtx/events/collections.hpp>
#include "AliasEditModel.h"
#include "BlurhashProvider.h"
@ -132,8 +132,6 @@ MainWindow::MainWindow(QWindow *parent)
void
MainWindow::registerQmlTypes()
{
qmlRegisterUncreatableMetaObject(qml_mtx_events::staticMetaObject,
"im.nheko",
1,
@ -253,7 +251,6 @@ MainWindow::registerQmlTypes()
qmlRegisterSingletonInstance("im.nheko", 1, 0, "Settings", userSettings_.data());
qmlRegisterUncreatableType<FilteredCommunitiesModel>(
"im.nheko",
1,

@ -15,8 +15,6 @@
#include "nlohmann/json.hpp"
#include <mtx/responses.hpp>
namespace http {
mtx::http::Client *

@ -70,9 +70,7 @@ public:
QImage m_image;
};
class MxcImageProvider
:
public QQuickAsyncImageProvider
class MxcImageProvider : public QQuickAsyncImageProvider
{
Q_OBJECT

@ -20,7 +20,6 @@
#include "timeline/Permissions.h"
#include "timeline/TimelineModel.h"
SingleImagePackModel::SingleImagePackModel(ImagePackInfo pack_, QObject *parent)
: QAbstractListModel(parent)
, roomid_(std::move(pack_.source_room))
@ -29,7 +28,6 @@ SingleImagePackModel::SingleImagePackModel(ImagePackInfo pack_, QObject *parent)
, pack(std::move(pack_.pack))
, fromSpace_(pack_.from_space)
{
if (!pack.pack)
pack.pack = mtx::events::msc2545::ImagePack::PackDescription{};

@ -330,15 +330,17 @@ main(int argc, char *argv[])
QLocale::setDefault(QLocale(QLocale::English, QLocale::UnitedKingdom));
QTranslator qtTranslator;
if(qtTranslator.load(QLocale(),
QStringLiteral("qt"),
QStringLiteral("_"),
QLibraryInfo::path(QLibraryInfo::TranslationsPath)))
if (qtTranslator.load(QLocale(),
QStringLiteral("qt"),
QStringLiteral("_"),
QLibraryInfo::path(QLibraryInfo::TranslationsPath)))
app.installTranslator(&qtTranslator);
QTranslator appTranslator;
if(appTranslator.load(
QLocale(), QStringLiteral("nheko"), QStringLiteral("_"), QStringLiteral(":/translations")))
if (appTranslator.load(QLocale(),
QStringLiteral("nheko"),
QStringLiteral("_"),
QStringLiteral(":/translations")))
app.installTranslator(&appTranslator);
MainWindow w;

@ -17,7 +17,6 @@
#include "Utils.h"
#include "timeline/TimelineModel.h"
CommunitiesModel::CommunitiesModel(QObject *parent)
: QAbstractListModel(parent)
, hiddenTagIds_{UserSettings::instance()->hiddenTags()}

@ -18,7 +18,6 @@
#include "UserSettingsPage.h"
#include "Utils.h"
QCache<EventStore::IdIndex, olm::DecryptionResult> EventStore::decryptedEvents_{1000};
QCache<EventStore::IdIndex, mtx::events::collections::TimelineEvents> EventStore::events_by_id_{
1000};
@ -27,7 +26,6 @@ QCache<EventStore::Index, mtx::events::collections::TimelineEvents> EventStore::
EventStore::EventStore(std::string room_id, QObject *)
: room_id_(std::move(room_id))
{
auto range = cache::client()->getTimelineRange(room_id_);
if (range) {
@ -289,7 +287,7 @@ EventStore::EventStore(std::string room_id, QObject *)
}
void
EventStore::addPending(const mtx::events::collections::TimelineEvents& event)
EventStore::addPending(const mtx::events::collections::TimelineEvents &event)
{
if (this->thread() != QThread::currentThread())
nhlog::db()->warn("{} called from a different thread!", __func__);

@ -11,10 +11,10 @@
#include <QObject>
#include <QVariant>
#include <mtx/common.hpp>
#include <mtx/events/collections.hpp>
#include <mtx/responses/messages.hpp>
#include <mtx/responses/sync.hpp>
#include <mtx/common.hpp>
#include "Reaction.h"
#include "encryption/Olm.h"
@ -107,7 +107,7 @@ signals:
void newEncryptedImage(mtx::crypto::EncryptedFile encryptionInfo);
void eventFetched(std::string id,
std::string relatedTo,
const mtx::events::collections::TimelineEvents& timeline);
const mtx::events::collections::TimelineEvents &timeline);
void oldMessagesRetrieved(const mtx::responses::Messages &);
void fetchedMore();
@ -119,7 +119,7 @@ signals:
void updateFlowEventId(std::string event_id);
public slots:
void addPending(const mtx::events::collections::TimelineEvents& event);
void addPending(const mtx::events::collections::TimelineEvents &event);
void receivedSessionKey(const std::string &session_id);
void clearTimeline();
void enableKeyRequests(bool suppressKeyRequests_);

@ -989,56 +989,58 @@ MediaUpload::MediaUpload(std::unique_ptr<QIODevice> source_,
blurhash_ =
QString::fromStdString(blurhash::encode(data_.data(), img.width(), img.height(), 4, 3));
} else if (mimeClass_ == u"video" || mimeClass_ == u"audio") {
auto mediaPlayer = new QMediaPlayer( this);
auto mediaPlayer = new QMediaPlayer(this);
mediaPlayer->setAudioOutput(nullptr);
if (mimeClass_ == u"video") {
auto newSurface = new QVideoSink(this);
connect(
newSurface, &QVideoSink::videoFrameChanged, this, [this, mediaPlayer](const QVideoFrame& frame) {
QImage img = frame.toImage();
if (img.size().isEmpty())
return;
connect(newSurface,
&QVideoSink::videoFrameChanged,
this,
[this, mediaPlayer](const QVideoFrame &frame) {
QImage img = frame.toImage();
if (img.size().isEmpty())
return;
mediaPlayer->stop();
mediaPlayer->stop();
auto orientation = mediaPlayer->metaData().value(QMediaMetaData::Orientation).toInt();
if (orientation == 90 || orientation == 270 || orientation == 180) {
img =
img.transformed(QTransform().rotate(orientation), Qt::SmoothTransformation);
}
auto orientation =
mediaPlayer->metaData().value(QMediaMetaData::Orientation).toInt();
if (orientation == 90 || orientation == 270 || orientation == 180) {
img = img.transformed(QTransform().rotate(orientation),
Qt::SmoothTransformation);
}
nhlog::ui()->debug("Got image {}x{}", img.width(), img.height());
nhlog::ui()->debug("Got image {}x{}", img.width(), img.height());
this->setThumbnail(img);
this->setThumbnail(img);
if (!dimensions_.isValid())
this->dimensions_ = img.size();
if (!dimensions_.isValid())
this->dimensions_ = img.size();
if (img.height() > 200 && img.width() > 360)
img = img.scaled(360, 200, Qt::KeepAspectRatioByExpanding);
std::vector<unsigned char> data_;
for (int y = 0; y < img.height(); y++) {
for (int x = 0; x < img.width(); x++) {
auto p = img.pixel(x, y);
data_.push_back(static_cast<unsigned char>(qRed(p)));
data_.push_back(static_cast<unsigned char>(qGreen(p)));
data_.push_back(static_cast<unsigned char>(qBlue(p)));
}
}
blurhash_ = QString::fromStdString(
blurhash::encode(data_.data(), img.width(), img.height(), 4, 3));
});
if (img.height() > 200 && img.width() > 360)
img = img.scaled(360, 200, Qt::KeepAspectRatioByExpanding);
std::vector<unsigned char> data_;
for (int y = 0; y < img.height(); y++) {
for (int x = 0; x < img.width(); x++) {
auto p = img.pixel(x, y);
data_.push_back(static_cast<unsigned char>(qRed(p)));
data_.push_back(static_cast<unsigned char>(qGreen(p)));
data_.push_back(static_cast<unsigned char>(qBlue(p)));
}
}
blurhash_ = QString::fromStdString(
blurhash::encode(data_.data(), img.width(), img.height(), 4, 3));
});
mediaPlayer->setVideoOutput(newSurface);
}
connect(mediaPlayer,
&QMediaPlayer::errorOccurred,
&QMediaPlayer::errorOccurred,
this,
[](QMediaPlayer::Error error, QString errorString) {
nhlog::ui()->debug("Media player error {} and errorStr {}",
error,
errorString.toStdString());
nhlog::ui()->debug(
"Media player error {} and errorStr {}", error, errorString.toStdString());
});
connect(mediaPlayer,
&QMediaPlayer::mediaStatusChanged,
@ -1046,25 +1048,22 @@ MediaUpload::MediaUpload(std::unique_ptr<QIODevice> source_,
nhlog::ui()->debug(
"Media player status {} and error {}", status, mediaPlayer->error());
});
connect(mediaPlayer,
&QMediaPlayer::metaDataChanged,
this,
[this, mediaPlayer]() {
nhlog::ui()->debug("Got metadata {}");
if (mediaPlayer->duration() > 0)
this->duration_ = mediaPlayer->duration();
auto dimensions = mediaPlayer->metaData().value(QMediaMetaData::Resolution).toSize();
if (!dimensions.isEmpty()) {
dimensions_ = dimensions;
auto orientation =
mediaPlayer->metaData().value(QMediaMetaData::Orientation).toInt();
if (orientation == 90 || orientation == 270) {
dimensions_.transpose();
}
}
});
connect(mediaPlayer, &QMediaPlayer::metaDataChanged, this, [this, mediaPlayer]() {
nhlog::ui()->debug("Got metadata {}");
if (mediaPlayer->duration() > 0)
this->duration_ = mediaPlayer->duration();
auto dimensions = mediaPlayer->metaData().value(QMediaMetaData::Resolution).toSize();
if (!dimensions.isEmpty()) {
dimensions_ = dimensions;
auto orientation =
mediaPlayer->metaData().value(QMediaMetaData::Orientation).toInt();
if (orientation == 90 || orientation == 270) {
dimensions_.transpose();
}
}
});
connect(
mediaPlayer, &QMediaPlayer::durationChanged, this, [this, mediaPlayer](qint64 duration) {
if (duration > 0) {
@ -1077,8 +1076,8 @@ MediaUpload::MediaUpload(std::unique_ptr<QIODevice> source_,
auto originalFile = qobject_cast<QFile *>(source.get());
mediaPlayer->setSourceDevice(source.get(),
QUrl(originalFile ? originalFile->fileName() : originalFilename_));
mediaPlayer->setSourceDevice(
source.get(), QUrl(originalFile ? originalFile->fileName() : originalFilename_));
mediaPlayer->play();
}

@ -28,7 +28,6 @@ RoomlistModel::RoomlistModel(TimelineViewManager *parent)
: QAbstractListModel(parent)
, manager(parent)
{
connect(ChatPage::instance(), &ChatPage::decryptSidebarChanged, this, [this]() {
auto decrypt = ChatPage::instance()->userSettings()->decryptSidebar();
QHash<QString, QSharedPointer<TimelineModel>>::iterator i;

@ -31,7 +31,6 @@
#include "Utils.h"
#include "encryption/Olm.h"
namespace std {
inline uint // clazy:exclude=qhash-namespace
qHash(const std::string &key, uint seed = 0)

@ -25,22 +25,15 @@ MxcMediaProxy::MxcMediaProxy(QObject *parent)
{
connect(this, &MxcMediaProxy::eventIdChanged, &MxcMediaProxy::startDownload);
connect(this, &MxcMediaProxy::roomChanged, &MxcMediaProxy::startDownload);
connect(this,
&QMediaPlayer::errorOccurred,
this,
[](QMediaPlayer::Error error, QString errorString) {
nhlog::ui()->debug("Media player error {} and errorStr {}",
error,
errorString.toStdString());
});
connect(
this, &QMediaPlayer::errorOccurred, this, [](QMediaPlayer::Error error, QString errorString) {
nhlog::ui()->debug(
"Media player error {} and errorStr {}", error, errorString.toStdString());
});
connect(this, &MxcMediaProxy::mediaStatusChanged, [this](QMediaPlayer::MediaStatus status) {
nhlog::ui()->info("Media player status {} and error {}", status, this->error());
});
connect(this,
&MxcMediaProxy::metaDataChanged,
[this]() {
emit orientationChanged();
});
connect(this, &MxcMediaProxy::metaDataChanged, [this]() { emit orientationChanged(); });
connect(ChatPage::instance()->timelineManager()->rooms(),
&RoomlistModel::currentRoomChanged,
@ -51,7 +44,8 @@ MxcMediaProxy::MxcMediaProxy(QObject *parent)
int
MxcMediaProxy::orientation() const
{
//nhlog::ui()->debug("metadata: {}", availableMetaData().join(QStringLiteral(",")).toStdString());
// nhlog::ui()->debug("metadata: {}",
// availableMetaData().join(QStringLiteral(",")).toStdString());
auto orientation = metaData().value(QMediaMetaData::Orientation).toInt();
nhlog::ui()->debug("Video orientation: {}", orientation);
return orientation;

@ -4,13 +4,13 @@
#pragma once
#include <QVideoSink>
#include <QBuffer>
#include <QUrl>
#include <QMediaPlayer>
#include <QObject>
#include <QPointer>
#include <QString>
#include <QUrl>
#include <QVideoSink>
#include "Logging.h"

@ -5,8 +5,8 @@
#include "NhekoGlobalObject.h"
#include <QApplication>
#include <QGuiApplication>
#include <QDesktopServices>
#include <QGuiApplication>
#include <QStyle>
#include <QUrl>
#include <QWindow>
@ -184,9 +184,11 @@ Nheko::createRoom(bool space,
void
Nheko::setWindowRole([[maybe_unused]] QWindow *win, [[maybe_unused]] QString newRole) const
{
const QNativeInterface::QX11Application *x11Interface = qGuiApp->nativeInterface<QNativeInterface::QX11Application>();
const QNativeInterface::QX11Application *x11Interface =
qGuiApp->nativeInterface<QNativeInterface::QX11Application>();
if (!x11Interface) return;
if (!x11Interface)
return;
auto connection = x11Interface->connection();
@ -195,10 +197,15 @@ Nheko::setWindowRole([[maybe_unused]] QWindow *win, [[maybe_unused]] QString new
char WM_WINDOW_ROLE[] = "WM_WINDOW_ROLE";
auto cookie = xcb_intern_atom(connection, false, std::size(WM_WINDOW_ROLE) - 1, WM_WINDOW_ROLE);
xcb_intern_atom_reply_t *reply = xcb_intern_atom_reply(connection, cookie, nullptr);
auto atom = reply ->atom;
auto atom = reply->atom;
free(reply);
xcb_change_property(connection, XCB_PROP_MODE_REPLACE, win->winId(),
atom, XCB_ATOM_STRING, 8,
role.size(), role.data());
xcb_change_property(connection,
XCB_PROP_MODE_REPLACE,
win->winId(),
atom,
XCB_ATOM_STRING,
8,
role.size(),
role.data());
}

@ -4,7 +4,6 @@
#include "Theme.h"
QPalette
Theme::paletteFromTheme(QStringView theme)
{

@ -41,7 +41,6 @@ extern "C"
}
#endif
using namespace mtx::events;
using namespace mtx::events::voip;
@ -60,7 +59,6 @@ CallManager::CallManager(QObject *parent)
, session_(WebRTCSession::instance())
, turnServerTimer_(this)
{
#ifdef GSTREAMER_AVAILABLE
std::string errorMessage;
if (session_.havePlugins(true, true, ScreenShareType::XDP, &errorMessage)) {
@ -186,7 +184,8 @@ CallManager::CallManager(QObject *parent)
nhlog::ui()->error("WebRTC: access to ringtone file denied");
break;
default:
nhlog::ui()->error("WebRTC: unable to play ringtone, {}", errorString.toStdString());
nhlog::ui()->error("WebRTC: unable to play ringtone, {}",
errorString.toStdString());
break;
}
});
@ -820,10 +819,9 @@ CallManager::retrieveTurnServer()
void
CallManager::playRingtone(const QUrl &ringtone, bool repeat)
{
player_.setLoops(repeat ? QMediaPlayer::Infinite :
1);
player_.setLoops(repeat ? QMediaPlayer::Infinite : 1);
player_.setSource(ringtone);
//player_.audioOutput()->setVolume(100);
// player_.audioOutput()->setVolume(100);
player_.play();
}

@ -438,7 +438,6 @@ struct PipeWireStream
QVariantMap map;
};
const QDBusArgument &
operator>>(const QDBusArgument &argument, PipeWireStream &stream)
{

@ -41,7 +41,6 @@ extern "C"
// https://github.com/vector-im/riot-web/issues/10173
#define STUN_SERVER "stun://turn.matrix.org:3478"
using webrtc::CallType;
using webrtc::ScreenShareType;
using webrtc::State;

Loading…
Cancel
Save