Updated TDesktop sources to 2.3.2+d34eabd
46
.github/workflows/linux.yml
vendored
|
|
@ -61,6 +61,7 @@ jobs:
|
|||
defines:
|
||||
- ""
|
||||
- "DESKTOP_APP_DISABLE_DBUS_INTEGRATION"
|
||||
- "TDESKTOP_DISABLE_GTK_INTEGRATION"
|
||||
|
||||
env:
|
||||
GIT: "https://github.com"
|
||||
|
|
@ -102,7 +103,8 @@ jobs:
|
|||
libgtk2.0-dev libice-dev libsm-dev libicu-dev libdrm-dev dh-autoreconf \
|
||||
autoconf automake build-essential libxml2-dev libass-dev libfreetype6-dev \
|
||||
libgpac-dev libsdl1.2-dev libtheora-dev libtool libva-dev libvdpau-dev \
|
||||
libvorbis-dev libxcb1-dev libxcb-image0-dev libxcb-shm0-dev libxcb-screensaver0-dev \
|
||||
libvorbis-dev libxcb1-dev libxcb-image0-dev libxcb-shm0-dev \
|
||||
libxcb-screensaver0-dev libjpeg-dev ninja-build \
|
||||
libxcb-xfixes0-dev libxcb-keysyms1-dev libxcb-icccm4-dev libatspi2.0-dev \
|
||||
libxcb-render-util0-dev libxcb-util0-dev libxcb-xkb-dev libxrender-dev \
|
||||
libasound-dev libpulse-dev libxcb-sync0-dev libxcb-randr0-dev libegl1-mesa-dev \
|
||||
|
|
@ -380,7 +382,7 @@ jobs:
|
|||
make -j$(nproc)
|
||||
sudo make DESTDIR="$LibrariesPath/openssl-cache" install_sw
|
||||
cd ..
|
||||
rm -rf $opensslDir
|
||||
# rm -rf $opensslDir # Keep this folder for WebRTC.
|
||||
- name: OpenSSL install.
|
||||
run: |
|
||||
cd $LibrariesPath
|
||||
|
|
@ -430,8 +432,8 @@ jobs:
|
|||
|
||||
git clone -b v5.12.8 --depth=1 git://code.qt.io/qt/qt5.git qt_${QT}
|
||||
cd qt_${QT}
|
||||
perl init-repository --module-subset=qtbase,qtwayland,qtimageformats,qtsvg,qtx11extras
|
||||
git submodule update qtbase qtwayland qtimageformats qtsvg qtx11extras
|
||||
perl init-repository --module-subset=qtbase,qtwayland,qtimageformats,qtsvg
|
||||
git submodule update qtbase qtwayland qtimageformats qtsvg
|
||||
cd qtbase
|
||||
find ../../patches/qtbase_${QT} -type f -print0 | sort -z | xargs -r0 git apply
|
||||
cd ..
|
||||
|
|
@ -519,7 +521,40 @@ jobs:
|
|||
mkdir -p breakpad/out/Default/
|
||||
cp breakpad-cache/dump_syms breakpad/out/Default/dump_syms
|
||||
|
||||
- name: Kotatogram Desktop build.
|
||||
- name: WebRTC cache.
|
||||
id: cache-webrtc
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.LibrariesPath }}/tg_owt
|
||||
key: ${{ runner.OS }}-webrtc-${{ env.CACHE_KEY }}
|
||||
- name: WebRTC.
|
||||
if: steps.cache-webrtc.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd $LibrariesPath
|
||||
|
||||
git clone $GIT/desktop-app/tg_owt.git
|
||||
mkdir -p tg_owt/out/Debug
|
||||
cd tg_owt/out/Debug
|
||||
cmake -G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DTG_OWT_SPECIAL_TARGET=linux \
|
||||
-DTG_OWT_LIBJPEG_INCLUDE_PATH=`pwd`/../../../qt_$QT/qtbase/src/3rdparty/libjpeg \
|
||||
-DTG_OWT_OPENSSL_INCLUDE_PATH=$OPENSSL_PREFIX/include \
|
||||
-DTG_OWT_OPUS_INCLUDE_PATH=/usr/local/include/opus \
|
||||
-DTG_OWT_FFMPEG_INCLUDE_PATH=/usr/local/include \
|
||||
../..
|
||||
ninja
|
||||
|
||||
# Cleanup.
|
||||
cd $LibrariesPath/tg_owt
|
||||
mv out/Debug/libtg_owt.a libtg_owt.a
|
||||
rm -rf out
|
||||
mkdir -p out/Debug
|
||||
mv libtg_owt.a out/Debug/libtg_owt.a
|
||||
|
||||
rm -rf $LibrariesPath/openssl_${OPENSSL_VER}
|
||||
|
||||
- name: Telegram Desktop build.
|
||||
if: env.ONLY_CACHE == 'false'
|
||||
run: |
|
||||
cd $REPO_NAME/Telegram
|
||||
|
|
@ -537,6 +572,7 @@ jobs:
|
|||
-D CMAKE_CXX_FLAGS="-s" \
|
||||
-D TDESKTOP_API_TEST=ON \
|
||||
-D DESKTOP_APP_USE_PACKAGED=OFF \
|
||||
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF \
|
||||
$DEFINE
|
||||
|
||||
cd ../out/Debug
|
||||
|
|
|
|||
43
.github/workflows/mac.yml
vendored
|
|
@ -86,6 +86,7 @@ jobs:
|
|||
|
||||
- name: First set up.
|
||||
run: |
|
||||
sudo chown -R `whoami`:admin /usr/local/share
|
||||
brew install automake fdk-aac lame libass libtool libvorbis libvpx \
|
||||
ninja opus sdl shtool texi2html theora x264 xvid yasm pkg-config
|
||||
|
||||
|
|
@ -160,7 +161,7 @@ jobs:
|
|||
$MIN_MAC
|
||||
make build_libs -j$(nproc)
|
||||
|
||||
SSL_DIR=$LibrariesPath/openssl_${{ env.OPENSSL_VER }}
|
||||
SSL_DIR=$LibrariesPath/openssl_$OPENSSL_VER
|
||||
mkdir -p $SSL_DIR/include
|
||||
copyLib() {
|
||||
cp $1.a $SSL_DIR/$1.a
|
||||
|
|
@ -425,8 +426,8 @@ jobs:
|
|||
run: |
|
||||
cd $LibrariesPath
|
||||
|
||||
git clone git://code.qt.io/qt/qt5.git qt$QT
|
||||
cd qt$QT
|
||||
git clone git://code.qt.io/qt/qt5.git qt_$QT
|
||||
cd qt_$QT
|
||||
perl init-repository --module-subset=qtbase,qtimageformats
|
||||
git checkout v5.12.8
|
||||
git submodule update qtbase
|
||||
|
|
@ -455,6 +456,36 @@ jobs:
|
|||
make clean
|
||||
cp -r $QT_PREFIX $LibrariesPath/qt-cache
|
||||
|
||||
- name: WebRTC cache.
|
||||
id: cache-webrtc
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.LibrariesPath }}/tg_owt
|
||||
key: ${{ runner.OS }}-webrtc-${{ env.CACHE_KEY }}
|
||||
- name: WebRTC.
|
||||
if: steps.cache-webrtc.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd $LibrariesPath
|
||||
|
||||
git clone $GIT/desktop-app/tg_owt.git
|
||||
mkdir -p tg_owt/out/Debug
|
||||
cd tg_owt/out/Debug
|
||||
cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug \
|
||||
-DTG_OWT_SPECIAL_TARGET=mac \
|
||||
-DTG_OWT_LIBJPEG_INCLUDE_PATH=`pwd`/../../../qt_$QT/qtbase/src/3rdparty/libjpeg \
|
||||
-DTG_OWT_OPENSSL_INCLUDE_PATH=`pwd`/../../../openssl_$OPENSSL_VER/include \
|
||||
-DTG_OWT_OPUS_INCLUDE_PATH=$PREFIX/include/opus \
|
||||
-DTG_OWT_FFMPEG_INCLUDE_PATH=/usr/local/include \
|
||||
../..
|
||||
ninja
|
||||
|
||||
# Cleanup.
|
||||
cd $LibrariesPath/tg_owt
|
||||
mv out/Debug/libtg_owt.a libtg_owt.a
|
||||
rm -rf out
|
||||
mkdir -p out/Debug
|
||||
mv libtg_owt.a out/Debug/libtg_owt.a
|
||||
|
||||
- name: Kotatogram Desktop build.
|
||||
if: env.ONLY_CACHE == 'false'
|
||||
run: |
|
||||
|
|
@ -469,7 +500,11 @@ jobs:
|
|||
echo ::set-env name=ARTIFACT_NAME::Kotatogram
|
||||
fi
|
||||
|
||||
./configure.sh -D TDESKTOP_API_TEST=ON -D DESKTOP_APP_USE_PACKAGED=OFF $DEFINE
|
||||
./configure.sh \
|
||||
-D TDESKTOP_API_TEST=ON \
|
||||
-D DESKTOP_APP_USE_PACKAGED=OFF \
|
||||
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF \
|
||||
$DEFINE
|
||||
|
||||
cd ../out
|
||||
|
||||
|
|
|
|||
13
.github/workflows/snap.yml
vendored
|
|
@ -13,6 +13,7 @@ on:
|
|||
- '!.github/workflows/snap.yml'
|
||||
- 'Telegram/build/**'
|
||||
- 'Telegram/Patches/**'
|
||||
- '!Telegram/Patches/ffmpeg.diff'
|
||||
- 'Telegram/Resources/uwp/**'
|
||||
- 'Telegram/Resources/winrc/**'
|
||||
- 'Telegram/SourceFiles/platform/win/**'
|
||||
|
|
@ -32,6 +33,7 @@ on:
|
|||
- '!.github/workflows/snap.yml'
|
||||
- 'Telegram/build/**'
|
||||
- 'Telegram/Patches/**'
|
||||
- '!Telegram/Patches/ffmpeg.diff'
|
||||
- 'Telegram/Resources/uwp/**'
|
||||
- 'Telegram/Resources/winrc/**'
|
||||
- 'Telegram/SourceFiles/platform/win/**'
|
||||
|
|
@ -62,18 +64,19 @@ jobs:
|
|||
|
||||
- name: First set up.
|
||||
run: |
|
||||
# Workaround for permanent problems with third-party repository keys
|
||||
sudo rm -rf /etc/apt/sources.list.d/*
|
||||
|
||||
sudo apt-get update
|
||||
sudo snap install --classic snapcraft
|
||||
|
||||
# Workaround for snapcraft
|
||||
# See https://forum.snapcraft.io/t/13258
|
||||
sudo chown root:root /
|
||||
|
||||
sudo usermod -aG lxd $USER
|
||||
|
||||
sudo snap run lxd init --auto
|
||||
sudo snap run lxd waitready
|
||||
|
||||
- name: Kotatogram Desktop snap build.
|
||||
run: sudo snap run snapcraft --destructive-mode
|
||||
run: sg lxd -c 'snap run snapcraft --use-lxd'
|
||||
|
||||
- name: Move artifact.
|
||||
if: env.UPLOAD_ARTIFACT == 'true'
|
||||
|
|
|
|||
84
.github/workflows/win.yml
vendored
|
|
@ -67,6 +67,7 @@ jobs:
|
|||
VC: "call vcvars32.bat && cd Libraries"
|
||||
GIT: "https://github.com"
|
||||
QT: "5_12_8"
|
||||
QT_VER: "5.12.8"
|
||||
OPENSSL_VER: "1_1_1"
|
||||
UPLOAD_ARTIFACT: "true"
|
||||
ONLY_CACHE: "false"
|
||||
|
|
@ -128,6 +129,20 @@ jobs:
|
|||
cd Patches
|
||||
eval $checkoutCommit
|
||||
|
||||
- name: Find any version of Python 2.
|
||||
shell: cmd
|
||||
run: |
|
||||
echo Find any version of Python 2.
|
||||
for /D %%a in (C:\hostedtoolcache\windows\Python\2.*) do (
|
||||
SET PY2=%%a\x64
|
||||
)
|
||||
if [%PY2%] == [] (
|
||||
echo Python 2 is not found.
|
||||
exit 1
|
||||
)
|
||||
echo Found %PY2%.
|
||||
echo ::set-env name=PY2::%PY2%
|
||||
|
||||
- name: LZMA.
|
||||
shell: cmd
|
||||
run: |
|
||||
|
|
@ -221,16 +236,6 @@ jobs:
|
|||
run: |
|
||||
cd %LibrariesPath%
|
||||
|
||||
echo Find any version of Python 2.
|
||||
for /D %%a in (C:\hostedtoolcache\windows\Python\2.*) do (
|
||||
SET PY2=%%a\x64
|
||||
)
|
||||
IF [%PY2%] == [] (
|
||||
echo Python 2 is not found.
|
||||
exit 1
|
||||
)
|
||||
echo Found %PY2%.
|
||||
|
||||
git clone %GIT%/telegramdesktop/gyp.git
|
||||
cd gyp
|
||||
SET PATH=%PY2%;%cd%;%PATH%
|
||||
|
|
@ -300,7 +305,7 @@ jobs:
|
|||
id: cache-qt
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.LibrariesPath }}/Qt-5.12.8
|
||||
path: ${{ env.LibrariesPath }}/Qt-${{ env.QT_VER }}
|
||||
key: ${{ runner.OS }}-qt-${{ env.CACHE_KEY }}-${{ hashFiles('**/qtbase_5_12_8/*') }}
|
||||
- name: Configure Qt 5.12.8.
|
||||
if: steps.cache-qt.outputs.cache-hit != 'true'
|
||||
|
|
@ -311,18 +316,18 @@ jobs:
|
|||
git clone git://code.qt.io/qt/qt5.git qt_%QT%
|
||||
cd qt_%QT%
|
||||
perl init-repository --module-subset=qtbase,qtimageformats
|
||||
git checkout v5.12.8
|
||||
git checkout v%QT_VER%
|
||||
git submodule update qtbase
|
||||
git submodule update qtimageformats
|
||||
cd qtbase
|
||||
for /r %%i in (..\..\patches\qtbase_%QT%\*) do git apply %%i
|
||||
cd ..
|
||||
|
||||
SET SSL=%LibrariesPath%\openssl_1_1_1
|
||||
SET SSL=%LibrariesPath%\openssl_%OPENSSL_VER%
|
||||
SET LIBS=libcrypto.lib Ws2_32.lib Gdi32.lib Advapi32.lib Crypt32.lib User32.lib
|
||||
|
||||
configure ^
|
||||
-prefix "%LibrariesPath%\Qt-5.12.8" ^
|
||||
-prefix "%LibrariesPath%\Qt-%QT_VER%" ^
|
||||
-debug ^
|
||||
-force-debug-info ^
|
||||
-opensource ^
|
||||
|
|
@ -350,6 +355,57 @@ jobs:
|
|||
cd ..
|
||||
rmdir /S /Q qt_%QT%
|
||||
|
||||
- name: WebRTC cache.
|
||||
id: cache-webrtc
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.LibrariesPath }}/tg_owt
|
||||
key: ${{ runner.OS }}-webrtc-${{ env.CACHE_KEY }}
|
||||
- name: WebRTC.
|
||||
if: steps.cache-webrtc.outputs.cache-hit != 'true'
|
||||
shell: cmd
|
||||
run: |
|
||||
%VC%
|
||||
|
||||
:: Qt libjpeg.
|
||||
mkdir qt_%QT%
|
||||
cd qt_%QT%
|
||||
git clone -b %QT_VER% https://github.com/qt/qtbase
|
||||
|
||||
move qtbase\src\3rdparty\libjpeg ..
|
||||
cd ..
|
||||
dir
|
||||
rmdir /S /Q qt_%QT%
|
||||
mkdir qt_%QT%\qtbase\src\3rdparty\
|
||||
move libjpeg qt_%QT%\qtbase\src\3rdparty\
|
||||
|
||||
:: WebRTC.
|
||||
cd %LibrariesPath%
|
||||
|
||||
git clone %GIT%/desktop-app/tg_owt.git
|
||||
mkdir tg_owt\out\Debug
|
||||
cd tg_owt\out\Debug
|
||||
cmake -G Ninja ^
|
||||
-DCMAKE_BUILD_TYPE=Debug ^
|
||||
-DTG_OWT_SPECIAL_TARGET=win ^
|
||||
-DTG_OWT_LIBJPEG_INCLUDE_PATH=%cd%/../../../qt_%QT%/qtbase/src/3rdparty/libjpeg ^
|
||||
-DTG_OWT_OPENSSL_INCLUDE_PATH=%cd%/../../../openssl_%OPENSSL_VER%/include ^
|
||||
-DTG_OWT_OPUS_INCLUDE_PATH=%cd%/../../../opus/include ^
|
||||
-DTG_OWT_FFMPEG_INCLUDE_PATH=%cd%/../../../ffmpeg ^
|
||||
../..
|
||||
|
||||
ninja
|
||||
|
||||
:: Cleanup.
|
||||
cd %LibrariesPath%\tg_owt
|
||||
move out\Debug\tg_owt.lib tg_owt.lib
|
||||
rmdir /S /Q out
|
||||
mkdir out\Debug
|
||||
move tg_owt.lib out\Debug\tg_owt.lib
|
||||
|
||||
cd %LibrariesPath%
|
||||
rmdir /S /Q qt_%QT%
|
||||
|
||||
- name: Read defines.
|
||||
shell: bash
|
||||
run: |
|
||||
|
|
|
|||
6
.gitmodules
vendored
|
|
@ -94,3 +94,9 @@
|
|||
[submodule "Telegram/ThirdParty/fcitx5-qt"]
|
||||
path = Telegram/ThirdParty/fcitx5-qt
|
||||
url = https://github.com/fcitx/fcitx5-qt.git
|
||||
[submodule "Telegram/lib_webrtc"]
|
||||
path = Telegram/lib_webrtc
|
||||
url = https://github.com/desktop-app/lib_webrtc.git
|
||||
[submodule "Telegram/ThirdParty/tgcalls"]
|
||||
path = Telegram/ThirdParty/tgcalls
|
||||
url = https://github.com/TelegramMessenger/tgcalls.git
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ endif()
|
|||
add_subdirectory(lib_storage)
|
||||
add_subdirectory(lib_lottie)
|
||||
add_subdirectory(lib_qr)
|
||||
add_subdirectory(lib_webrtc)
|
||||
add_subdirectory(codegen)
|
||||
|
||||
include(CheckCXXSourceCompiles)
|
||||
|
|
@ -34,6 +35,7 @@ include(cmake/lib_ffmpeg.cmake)
|
|||
include(cmake/lib_mtproto.cmake)
|
||||
include(cmake/lib_scheme.cmake)
|
||||
include(cmake/lib_tgvoip.cmake)
|
||||
include(cmake/lib_tgcalls.cmake)
|
||||
|
||||
set(style_files
|
||||
boxes/boxes.style
|
||||
|
|
@ -67,6 +69,35 @@ generate_numbers(Telegram ${res_loc}/numbers.txt)
|
|||
|
||||
set_target_properties(Telegram PROPERTIES AUTOMOC ON AUTORCC ON)
|
||||
|
||||
target_link_libraries(Telegram
|
||||
PRIVATE
|
||||
tdesktop::lib_tgcalls_legacy
|
||||
tdesktop::lib_tgcalls
|
||||
tdesktop::lib_tgvoip
|
||||
tdesktop::lib_mtproto
|
||||
tdesktop::lib_scheme
|
||||
tdesktop::lib_export
|
||||
desktop-app::lib_webrtc
|
||||
desktop-app::lib_base
|
||||
desktop-app::lib_crl
|
||||
desktop-app::lib_ui
|
||||
desktop-app::lib_tl
|
||||
desktop-app::lib_storage
|
||||
desktop-app::lib_lottie
|
||||
desktop-app::lib_qr
|
||||
desktop-app::lib_ffmpeg
|
||||
desktop-app::external_lz4
|
||||
desktop-app::external_rlottie
|
||||
desktop-app::external_zlib
|
||||
desktop-app::external_minizip
|
||||
desktop-app::external_qt
|
||||
desktop-app::external_qr_code_generator
|
||||
desktop-app::external_crash_reports
|
||||
desktop-app::external_auto_updates
|
||||
desktop-app::external_openssl
|
||||
desktop-app::external_openal
|
||||
)
|
||||
|
||||
if (LINUX)
|
||||
target_link_libraries(Telegram
|
||||
PRIVATE
|
||||
|
|
@ -93,39 +124,8 @@ if (LINUX)
|
|||
desktop-app::external_hime_qt
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (add_hunspell_library)
|
||||
target_link_libraries(Telegram PRIVATE desktop-app::external_hunspell)
|
||||
endif()
|
||||
|
||||
target_link_libraries(Telegram
|
||||
PRIVATE
|
||||
tdesktop::lib_mtproto
|
||||
tdesktop::lib_scheme
|
||||
tdesktop::lib_export
|
||||
tdesktop::lib_tgvoip
|
||||
desktop-app::lib_base
|
||||
desktop-app::lib_crl
|
||||
desktop-app::lib_ui
|
||||
desktop-app::lib_tl
|
||||
desktop-app::lib_storage
|
||||
desktop-app::lib_lottie
|
||||
desktop-app::lib_qr
|
||||
desktop-app::lib_ffmpeg
|
||||
desktop-app::external_lz4
|
||||
desktop-app::external_rlottie
|
||||
desktop-app::external_zlib
|
||||
desktop-app::external_minizip
|
||||
desktop-app::external_qt
|
||||
desktop-app::external_qr_code_generator
|
||||
desktop-app::external_crash_reports
|
||||
desktop-app::external_auto_updates
|
||||
desktop-app::external_openssl
|
||||
desktop-app::external_openal
|
||||
)
|
||||
|
||||
if (LINUX AND DESKTOP_APP_USE_PACKAGED AND Qt5WaylandClient_VERSION VERSION_LESS 5.13.0)
|
||||
if (DESKTOP_APP_USE_PACKAGED AND Qt5WaylandClient_VERSION VERSION_LESS 5.13.0)
|
||||
find_package(PkgConfig REQUIRED)
|
||||
pkg_check_modules(WAYLAND_CLIENT REQUIRED wayland-client)
|
||||
|
||||
|
|
@ -133,9 +133,8 @@ if (LINUX AND DESKTOP_APP_USE_PACKAGED AND Qt5WaylandClient_VERSION VERSION_LESS
|
|||
PRIVATE
|
||||
${WAYLAND_CLIENT_INCLUDE_DIRS}
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (LINUX)
|
||||
if (DESKTOP_APP_USE_PACKAGED)
|
||||
find_package(PkgConfig REQUIRED)
|
||||
pkg_check_modules(XCB_SCREENSAVER REQUIRED IMPORTED_TARGET xcb-screensaver)
|
||||
|
|
@ -150,34 +149,39 @@ if (LINUX)
|
|||
target_link_static_libraries(Telegram PRIVATE xcb-screensaver)
|
||||
target_link_libraries(Telegram PRIVATE xcb)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (LINUX AND NOT TDESKTOP_DISABLE_GTK_INTEGRATION)
|
||||
find_package(PkgConfig REQUIRED)
|
||||
target_compile_options(Telegram PRIVATE -Wno-register)
|
||||
|
||||
if (DESKTOP_APP_USE_PACKAGED AND NOT DESKTOP_APP_USE_PACKAGED_LAZY)
|
||||
pkg_check_modules(X11 REQUIRED IMPORTED_TARGET x11)
|
||||
pkg_check_modules(GOBJECT2 REQUIRED IMPORTED_TARGET gobject-2.0)
|
||||
pkg_check_modules(GLIB2 REQUIRED IMPORTED_TARGET glib-2.0)
|
||||
pkg_check_modules(GTK3 REQUIRED IMPORTED_TARGET gtk+-3.0)
|
||||
pkg_check_modules(GOBJECT REQUIRED IMPORTED_TARGET gobject-2.0)
|
||||
pkg_check_modules(GIO REQUIRED IMPORTED_TARGET gio-2.0)
|
||||
|
||||
target_link_libraries(Telegram
|
||||
PRIVATE
|
||||
PkgConfig::X11
|
||||
PkgConfig::GOBJECT2
|
||||
PkgConfig::GLIB2
|
||||
PkgConfig::GOBJECT
|
||||
PkgConfig::GIO
|
||||
)
|
||||
|
||||
target_compile_definitions(Telegram PRIVATE G_LOG_DOMAIN="Telegram")
|
||||
target_compile_options(Telegram PRIVATE -Wno-register)
|
||||
|
||||
if (NOT TDESKTOP_DISABLE_GTK_INTEGRATION)
|
||||
find_package(PkgConfig REQUIRED)
|
||||
|
||||
if (DESKTOP_APP_USE_PACKAGED AND NOT DESKTOP_APP_USE_PACKAGED_LAZY)
|
||||
pkg_check_modules(GTK3 REQUIRED IMPORTED_TARGET gtk+-3.0)
|
||||
pkg_check_modules(X11 REQUIRED IMPORTED_TARGET x11)
|
||||
|
||||
target_link_libraries(Telegram
|
||||
PRIVATE
|
||||
PkgConfig::GTK3
|
||||
PkgConfig::X11
|
||||
)
|
||||
else()
|
||||
pkg_search_module(GTK REQUIRED gtk+-2.0 gtk+-3.0)
|
||||
target_link_libraries(Telegram
|
||||
PRIVATE
|
||||
X11
|
||||
gobject-2.0
|
||||
glib-2.0
|
||||
)
|
||||
target_include_directories(Telegram PRIVATE ${GTK_INCLUDE_DIRS})
|
||||
target_link_libraries(Telegram PRIVATE X11)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
|
@ -213,6 +217,8 @@ PRIVATE
|
|||
api/api_single_message_search.h
|
||||
api/api_text_entities.cpp
|
||||
api/api_text_entities.h
|
||||
api/api_toggling_media.cpp
|
||||
api/api_toggling_media.h
|
||||
api/api_updates.cpp
|
||||
api/api_updates.h
|
||||
boxes/filters/edit_filter_box.cpp
|
||||
|
|
@ -313,17 +319,20 @@ PRIVATE
|
|||
calls/calls_box_controller.h
|
||||
calls/calls_call.cpp
|
||||
calls/calls_call.h
|
||||
calls/calls_controller.cpp
|
||||
calls/calls_controller.h
|
||||
calls/calls_controller_tgvoip.h
|
||||
calls/calls_emoji_fingerprint.cpp
|
||||
calls/calls_emoji_fingerprint.h
|
||||
calls/calls_instance.cpp
|
||||
calls/calls_instance.h
|
||||
calls/calls_panel.cpp
|
||||
calls/calls_panel.h
|
||||
calls/calls_signal_bars.cpp
|
||||
calls/calls_signal_bars.h
|
||||
calls/calls_top_bar.cpp
|
||||
calls/calls_top_bar.h
|
||||
calls/calls_userpic.cpp
|
||||
calls/calls_userpic.h
|
||||
calls/calls_video_bubble.cpp
|
||||
calls/calls_video_bubble.h
|
||||
chat_helpers/bot_keyboard.cpp
|
||||
chat_helpers/bot_keyboard.h
|
||||
chat_helpers/emoji_keywords.cpp
|
||||
|
|
@ -340,6 +349,8 @@ PRIVATE
|
|||
chat_helpers/gifs_list_widget.h
|
||||
chat_helpers/message_field.cpp
|
||||
chat_helpers/message_field.h
|
||||
chat_helpers/send_context_menu.cpp
|
||||
chat_helpers/send_context_menu.h
|
||||
chat_helpers/spellchecker_common.cpp
|
||||
chat_helpers/spellchecker_common.h
|
||||
chat_helpers/stickers_emoji_image_loader.cpp
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ pacman --noconfirm -S pkg-config
|
|||
PKG_CONFIG_PATH="/mingw64/lib/pkgconfig:$PKG_CONFIG_PATH"
|
||||
|
||||
./configure --toolchain=msvc \
|
||||
--extra-cflags="-DCONFIG_SAFE_BITSTREAM_READER=1" \
|
||||
--extra-cxxflags="-DCONFIG_SAFE_BITSTREAM_READER=1" \
|
||||
--extra-ldflags="-libpath:$FullExecPath/../opus/win32/VS2015/Win32/Release" \
|
||||
--disable-programs \
|
||||
--disable-doc \
|
||||
|
|
|
|||
225
Telegram/Patches/ffmpeg.diff
Normal file
|
|
@ -0,0 +1,225 @@
|
|||
diff --git a/libavcodec/aarch64/Makefile b/libavcodec/aarch64/Makefile
|
||||
index 00f93bf59f..52da7036f3 100644
|
||||
--- a/libavcodec/aarch64/Makefile
|
||||
+++ b/libavcodec/aarch64/Makefile
|
||||
@@ -6,6 +6,7 @@ OBJS-$(CONFIG_H264DSP) += aarch64/h264dsp_init_aarch64.o
|
||||
OBJS-$(CONFIG_H264PRED) += aarch64/h264pred_init.o
|
||||
OBJS-$(CONFIG_H264QPEL) += aarch64/h264qpel_init_aarch64.o
|
||||
OBJS-$(CONFIG_HPELDSP) += aarch64/hpeldsp_init_aarch64.o
|
||||
+OBJS-$(CONFIG_IDCTDSP) += aarch64/idctdsp_init_aarch64.o
|
||||
OBJS-$(CONFIG_MPEGAUDIODSP) += aarch64/mpegaudiodsp_init.o
|
||||
OBJS-$(CONFIG_NEON_CLOBBER_TEST) += aarch64/neontest.o
|
||||
OBJS-$(CONFIG_VIDEODSP) += aarch64/videodsp_init.o
|
||||
@@ -21,6 +22,7 @@ OBJS-$(CONFIG_VC1DSP) += aarch64/vc1dsp_init_aarch64.o
|
||||
OBJS-$(CONFIG_VORBIS_DECODER) += aarch64/vorbisdsp_init.o
|
||||
OBJS-$(CONFIG_VP9_DECODER) += aarch64/vp9dsp_init_10bpp_aarch64.o \
|
||||
aarch64/vp9dsp_init_12bpp_aarch64.o \
|
||||
+ aarch64/vp9mc_aarch64.o \
|
||||
aarch64/vp9dsp_init_aarch64.o
|
||||
|
||||
# ARMv8 optimizations
|
||||
@@ -41,8 +43,7 @@ NEON-OBJS-$(CONFIG_H264PRED) += aarch64/h264pred_neon.o
|
||||
NEON-OBJS-$(CONFIG_H264QPEL) += aarch64/h264qpel_neon.o \
|
||||
aarch64/hpeldsp_neon.o
|
||||
NEON-OBJS-$(CONFIG_HPELDSP) += aarch64/hpeldsp_neon.o
|
||||
-NEON-OBJS-$(CONFIG_IDCTDSP) += aarch64/idctdsp_init_aarch64.o \
|
||||
- aarch64/simple_idct_neon.o
|
||||
+NEON-OBJS-$(CONFIG_IDCTDSP) += aarch64/simple_idct_neon.o
|
||||
NEON-OBJS-$(CONFIG_MDCT) += aarch64/mdct_neon.o
|
||||
NEON-OBJS-$(CONFIG_MPEGAUDIODSP) += aarch64/mpegaudiodsp_neon.o
|
||||
NEON-OBJS-$(CONFIG_VP8DSP) += aarch64/vp8dsp_neon.o
|
||||
diff --git a/libavcodec/aarch64/idctdsp_init_aarch64.c b/libavcodec/aarch64/idctdsp_init_aarch64.c
|
||||
index 0406e60830..742a3372e3 100644
|
||||
--- a/libavcodec/aarch64/idctdsp_init_aarch64.c
|
||||
+++ b/libavcodec/aarch64/idctdsp_init_aarch64.c
|
||||
@@ -21,6 +21,8 @@
|
||||
*/
|
||||
|
||||
#include "libavutil/attributes.h"
|
||||
+#include "libavutil/cpu.h"
|
||||
+#include "libavutil/arm/cpu.h"
|
||||
#include "libavcodec/avcodec.h"
|
||||
#include "libavcodec/idctdsp.h"
|
||||
#include "idct.h"
|
||||
@@ -28,7 +30,9 @@
|
||||
av_cold void ff_idctdsp_init_aarch64(IDCTDSPContext *c, AVCodecContext *avctx,
|
||||
unsigned high_bit_depth)
|
||||
{
|
||||
- if (!avctx->lowres && !high_bit_depth) {
|
||||
+ int cpu_flags = av_get_cpu_flags();
|
||||
+
|
||||
+ if (have_neon(cpu_flags) && !avctx->lowres && !high_bit_depth) {
|
||||
if (avctx->idct_algo == FF_IDCT_AUTO ||
|
||||
avctx->idct_algo == FF_IDCT_SIMPLEAUTO ||
|
||||
avctx->idct_algo == FF_IDCT_SIMPLENEON) {
|
||||
diff --git a/libavcodec/aarch64/vp9mc_16bpp_neon.S b/libavcodec/aarch64/vp9mc_16bpp_neon.S
|
||||
index cac6428709..53b372c262 100644
|
||||
--- a/libavcodec/aarch64/vp9mc_16bpp_neon.S
|
||||
+++ b/libavcodec/aarch64/vp9mc_16bpp_neon.S
|
||||
@@ -25,31 +25,6 @@
|
||||
// const uint8_t *ref, ptrdiff_t ref_stride,
|
||||
// int h, int mx, int my);
|
||||
|
||||
-function ff_vp9_copy128_aarch64, export=1
|
||||
-1:
|
||||
- ldp x5, x6, [x2]
|
||||
- ldp x7, x8, [x2, #16]
|
||||
- stp x5, x6, [x0]
|
||||
- ldp x9, x10, [x2, #32]
|
||||
- stp x7, x8, [x0, #16]
|
||||
- subs w4, w4, #1
|
||||
- ldp x11, x12, [x2, #48]
|
||||
- stp x9, x10, [x0, #32]
|
||||
- stp x11, x12, [x0, #48]
|
||||
- ldp x5, x6, [x2, #64]
|
||||
- ldp x7, x8, [x2, #80]
|
||||
- stp x5, x6, [x0, #64]
|
||||
- ldp x9, x10, [x2, #96]
|
||||
- stp x7, x8, [x0, #80]
|
||||
- ldp x11, x12, [x2, #112]
|
||||
- stp x9, x10, [x0, #96]
|
||||
- stp x11, x12, [x0, #112]
|
||||
- add x2, x2, x3
|
||||
- add x0, x0, x1
|
||||
- b.ne 1b
|
||||
- ret
|
||||
-endfunc
|
||||
-
|
||||
function ff_vp9_avg64_16_neon, export=1
|
||||
mov x5, x0
|
||||
sub x1, x1, #64
|
||||
diff --git a/libavcodec/aarch64/vp9mc_aarch64.S b/libavcodec/aarch64/vp9mc_aarch64.S
|
||||
new file mode 100644
|
||||
index 0000000000..f17a8cf04a
|
||||
--- /dev/null
|
||||
+++ b/libavcodec/aarch64/vp9mc_aarch64.S
|
||||
@@ -0,0 +1,81 @@
|
||||
+/*
|
||||
+ * Copyright (c) 2016 Google Inc.
|
||||
+ *
|
||||
+ * This file is part of FFmpeg.
|
||||
+ *
|
||||
+ * FFmpeg is free software; you can redistribute it and/or
|
||||
+ * modify it under the terms of the GNU Lesser General Public
|
||||
+ * License as published by the Free Software Foundation; either
|
||||
+ * version 2.1 of the License, or (at your option) any later version.
|
||||
+ *
|
||||
+ * FFmpeg is distributed in the hope that it will be useful,
|
||||
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
+ * Lesser General Public License for more details.
|
||||
+ *
|
||||
+ * You should have received a copy of the GNU Lesser General Public
|
||||
+ * License along with FFmpeg; if not, write to the Free Software
|
||||
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
+ */
|
||||
+
|
||||
+#include "libavutil/aarch64/asm.S"
|
||||
+
|
||||
+// All public functions in this file have the following signature:
|
||||
+// typedef void (*vp9_mc_func)(uint8_t *dst, ptrdiff_t dst_stride,
|
||||
+// const uint8_t *ref, ptrdiff_t ref_stride,
|
||||
+// int h, int mx, int my);
|
||||
+
|
||||
+function ff_vp9_copy128_aarch64, export=1
|
||||
+1:
|
||||
+ ldp x5, x6, [x2]
|
||||
+ ldp x7, x8, [x2, #16]
|
||||
+ stp x5, x6, [x0]
|
||||
+ ldp x9, x10, [x2, #32]
|
||||
+ stp x7, x8, [x0, #16]
|
||||
+ subs w4, w4, #1
|
||||
+ ldp x11, x12, [x2, #48]
|
||||
+ stp x9, x10, [x0, #32]
|
||||
+ stp x11, x12, [x0, #48]
|
||||
+ ldp x5, x6, [x2, #64]
|
||||
+ ldp x7, x8, [x2, #80]
|
||||
+ stp x5, x6, [x0, #64]
|
||||
+ ldp x9, x10, [x2, #96]
|
||||
+ stp x7, x8, [x0, #80]
|
||||
+ ldp x11, x12, [x2, #112]
|
||||
+ stp x9, x10, [x0, #96]
|
||||
+ stp x11, x12, [x0, #112]
|
||||
+ add x2, x2, x3
|
||||
+ add x0, x0, x1
|
||||
+ b.ne 1b
|
||||
+ ret
|
||||
+endfunc
|
||||
+
|
||||
+function ff_vp9_copy64_aarch64, export=1
|
||||
+1:
|
||||
+ ldp x5, x6, [x2]
|
||||
+ ldp x7, x8, [x2, #16]
|
||||
+ stp x5, x6, [x0]
|
||||
+ ldp x9, x10, [x2, #32]
|
||||
+ stp x7, x8, [x0, #16]
|
||||
+ subs w4, w4, #1
|
||||
+ ldp x11, x12, [x2, #48]
|
||||
+ stp x9, x10, [x0, #32]
|
||||
+ stp x11, x12, [x0, #48]
|
||||
+ add x2, x2, x3
|
||||
+ add x0, x0, x1
|
||||
+ b.ne 1b
|
||||
+ ret
|
||||
+endfunc
|
||||
+
|
||||
+function ff_vp9_copy32_aarch64, export=1
|
||||
+1:
|
||||
+ ldp x5, x6, [x2]
|
||||
+ ldp x7, x8, [x2, #16]
|
||||
+ stp x5, x6, [x0]
|
||||
+ subs w4, w4, #1
|
||||
+ stp x7, x8, [x0, #16]
|
||||
+ add x2, x2, x3
|
||||
+ add x0, x0, x1
|
||||
+ b.ne 1b
|
||||
+ ret
|
||||
+endfunc
|
||||
diff --git a/libavcodec/aarch64/vp9mc_neon.S b/libavcodec/aarch64/vp9mc_neon.S
|
||||
index f67624ca04..abf2bae9db 100644
|
||||
--- a/libavcodec/aarch64/vp9mc_neon.S
|
||||
+++ b/libavcodec/aarch64/vp9mc_neon.S
|
||||
@@ -25,23 +25,6 @@
|
||||
// const uint8_t *ref, ptrdiff_t ref_stride,
|
||||
// int h, int mx, int my);
|
||||
|
||||
-function ff_vp9_copy64_aarch64, export=1
|
||||
-1:
|
||||
- ldp x5, x6, [x2]
|
||||
- ldp x7, x8, [x2, #16]
|
||||
- stp x5, x6, [x0]
|
||||
- ldp x9, x10, [x2, #32]
|
||||
- stp x7, x8, [x0, #16]
|
||||
- subs w4, w4, #1
|
||||
- ldp x11, x12, [x2, #48]
|
||||
- stp x9, x10, [x0, #32]
|
||||
- stp x11, x12, [x0, #48]
|
||||
- add x2, x2, x3
|
||||
- add x0, x0, x1
|
||||
- b.ne 1b
|
||||
- ret
|
||||
-endfunc
|
||||
-
|
||||
function ff_vp9_avg64_neon, export=1
|
||||
mov x5, x0
|
||||
1:
|
||||
@@ -64,19 +47,6 @@ function ff_vp9_avg64_neon, export=1
|
||||
ret
|
||||
endfunc
|
||||
|
||||
-function ff_vp9_copy32_aarch64, export=1
|
||||
-1:
|
||||
- ldp x5, x6, [x2]
|
||||
- ldp x7, x8, [x2, #16]
|
||||
- stp x5, x6, [x0]
|
||||
- subs w4, w4, #1
|
||||
- stp x7, x8, [x0, #16]
|
||||
- add x2, x2, x3
|
||||
- add x0, x0, x1
|
||||
- b.ne 1b
|
||||
- ret
|
||||
-endfunc
|
||||
-
|
||||
function ff_vp9_avg32_neon, export=1
|
||||
1:
|
||||
ld1 {v2.16b, v3.16b}, [x2], x3
|
||||
|
Before Width: | Height: | Size: 456 B After Width: | Height: | Size: 595 B |
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 1 KiB After Width: | Height: | Size: 1.7 KiB |
BIN
Telegram/Resources/icons/call_camera_active.png
Normal file
|
After Width: | Height: | Size: 377 B |
BIN
Telegram/Resources/icons/call_camera_active@2x.png
Normal file
|
After Width: | Height: | Size: 733 B |
BIN
Telegram/Resources/icons/call_camera_active@3x.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
Telegram/Resources/icons/call_camera_muted.png
Normal file
|
After Width: | Height: | Size: 727 B |
BIN
Telegram/Resources/icons/call_camera_muted@2x.png
Normal file
|
After Width: | Height: | Size: 1.4 KiB |
BIN
Telegram/Resources/icons/call_camera_muted@3x.png
Normal file
|
After Width: | Height: | Size: 2.1 KiB |
BIN
Telegram/Resources/icons/call_cancel.png
Normal file
|
After Width: | Height: | Size: 214 B |
BIN
Telegram/Resources/icons/call_cancel@2x.png
Normal file
|
After Width: | Height: | Size: 426 B |
BIN
Telegram/Resources/icons/call_cancel@3x.png
Normal file
|
After Width: | Height: | Size: 912 B |
|
Before Width: | Height: | Size: 405 B After Width: | Height: | Size: 460 B |
|
Before Width: | Height: | Size: 736 B After Width: | Height: | Size: 970 B |
|
Before Width: | Height: | Size: 886 B After Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 412 B After Width: | Height: | Size: 489 B |
|
Before Width: | Height: | Size: 800 B After Width: | Height: | Size: 1,002 B |
|
Before Width: | Height: | Size: 945 B After Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 459 B After Width: | Height: | Size: 744 B |
|
Before Width: | Height: | Size: 843 B After Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 2.4 KiB |
BIN
Telegram/Resources/icons/calls_close_main.png
Normal file
|
After Width: | Height: | Size: 305 B |
BIN
Telegram/Resources/icons/calls_close_main@2x.png
Normal file
|
After Width: | Height: | Size: 524 B |
BIN
Telegram/Resources/icons/calls_close_main@3x.png
Normal file
|
After Width: | Height: | Size: 820 B |
BIN
Telegram/Resources/icons/calls_close_shadow.png
Normal file
|
After Width: | Height: | Size: 385 B |
BIN
Telegram/Resources/icons/calls_close_shadow@2x.png
Normal file
|
After Width: | Height: | Size: 873 B |
BIN
Telegram/Resources/icons/calls_close_shadow@3x.png
Normal file
|
After Width: | Height: | Size: 1.3 KiB |
BIN
Telegram/Resources/icons/calls_maximize_main.png
Normal file
|
After Width: | Height: | Size: 230 B |
BIN
Telegram/Resources/icons/calls_maximize_main@2x.png
Normal file
|
After Width: | Height: | Size: 393 B |
BIN
Telegram/Resources/icons/calls_maximize_main@3x.png
Normal file
|
After Width: | Height: | Size: 616 B |
BIN
Telegram/Resources/icons/calls_maximize_shadow.png
Normal file
|
After Width: | Height: | Size: 333 B |
BIN
Telegram/Resources/icons/calls_maximize_shadow@2x.png
Normal file
|
After Width: | Height: | Size: 552 B |
BIN
Telegram/Resources/icons/calls_maximize_shadow@3x.png
Normal file
|
After Width: | Height: | Size: 889 B |
BIN
Telegram/Resources/icons/calls_minimize_main.png
Normal file
|
After Width: | Height: | Size: 173 B |
BIN
Telegram/Resources/icons/calls_minimize_main@2x.png
Normal file
|
After Width: | Height: | Size: 304 B |
BIN
Telegram/Resources/icons/calls_minimize_main@3x.png
Normal file
|
After Width: | Height: | Size: 545 B |
BIN
Telegram/Resources/icons/calls_minimize_shadow.png
Normal file
|
After Width: | Height: | Size: 228 B |
BIN
Telegram/Resources/icons/calls_minimize_shadow@2x.png
Normal file
|
After Width: | Height: | Size: 420 B |
BIN
Telegram/Resources/icons/calls_minimize_shadow@3x.png
Normal file
|
After Width: | Height: | Size: 661 B |
BIN
Telegram/Resources/icons/calls_mute_tooltip.png
Normal file
|
After Width: | Height: | Size: 637 B |
BIN
Telegram/Resources/icons/calls_mute_tooltip@2x.png
Normal file
|
After Width: | Height: | Size: 1.2 KiB |
BIN
Telegram/Resources/icons/calls_mute_tooltip@3x.png
Normal file
|
After Width: | Height: | Size: 1.9 KiB |
BIN
Telegram/Resources/icons/calls_mute_userpic.png
Normal file
|
After Width: | Height: | Size: 545 B |
BIN
Telegram/Resources/icons/calls_mute_userpic@2x.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
Telegram/Resources/icons/calls_mute_userpic@3x.png
Normal file
|
After Width: | Height: | Size: 1.6 KiB |
BIN
Telegram/Resources/icons/calls_restore_main.png
Normal file
|
After Width: | Height: | Size: 290 B |
BIN
Telegram/Resources/icons/calls_restore_main@2x.png
Normal file
|
After Width: | Height: | Size: 499 B |
BIN
Telegram/Resources/icons/calls_restore_main@3x.png
Normal file
|
After Width: | Height: | Size: 846 B |
BIN
Telegram/Resources/icons/calls_restore_shadow.png
Normal file
|
After Width: | Height: | Size: 420 B |
BIN
Telegram/Resources/icons/calls_restore_shadow@2x.png
Normal file
|
After Width: | Height: | Size: 715 B |
BIN
Telegram/Resources/icons/calls_restore_shadow@3x.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
Telegram/Resources/icons/calls_shadow_controls.png
Normal file
|
After Width: | Height: | Size: 3.9 KiB |
BIN
Telegram/Resources/icons/calls_shadow_controls@2x.png
Normal file
|
After Width: | Height: | Size: 7.9 KiB |
BIN
Telegram/Resources/icons/calls_shadow_controls@3x.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
|
|
@ -370,6 +370,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
"lng_settings_adaptive_wide" = "Adaptive layout for wide screens";
|
||||
|
||||
"lng_settings_section_call_settings" = "Calls Settings";
|
||||
"lng_settings_call_camera" = "Camera";
|
||||
"lng_settings_call_section_output" = "Speakers and headphones";
|
||||
"lng_settings_call_section_input" = "Microphone";
|
||||
"lng_settings_call_input_device" = "Input device";
|
||||
|
|
@ -380,7 +381,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
"lng_settings_call_stop_mic_test" = "Stop test";
|
||||
"lng_settings_call_section_other" = "Other settings";
|
||||
"lng_settings_call_open_system_prefs" = "Open system sound preferences";
|
||||
"lng_settings_call_device_default" = "Default";
|
||||
"lng_settings_call_device_default" = "Same as the System";
|
||||
"lng_settings_call_audio_ducking" = "Mute other sounds during calls";
|
||||
|
||||
"lng_settings_language" = "Language";
|
||||
|
|
@ -1219,6 +1220,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
"lng_recent_stickers" = "Frequently used";
|
||||
"lng_faved_stickers_add" = "Add to Favorites";
|
||||
"lng_faved_stickers_remove" = "Remove from Favorites";
|
||||
"lng_recent_stickers_remove" = "Remove from Recent";
|
||||
"lng_group_stickers" = "Group stickers";
|
||||
"lng_group_stickers_description" = "You can choose a sticker set which will be available for every member while in the group chat.";
|
||||
"lng_group_stickers_add" = "Choose sticker set";
|
||||
|
|
@ -1468,6 +1470,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
"lng_context_copy_text" = "Copy Text";
|
||||
"lng_context_open_gif" = "Open GIF";
|
||||
"lng_context_save_gif" = "Save GIF";
|
||||
"lng_context_delete_gif" = "Delete GIF";
|
||||
"lng_context_attached_stickers" = "Attached Stickers";
|
||||
"lng_context_to_msg" = "Go To Message";
|
||||
"lng_context_reply_msg" = "Reply";
|
||||
|
|
@ -1723,6 +1726,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
|
||||
"lng_call_error_not_available" = "Sorry, {user} doesn't accept calls.";
|
||||
"lng_call_error_outdated" = "{user}'s app does not support calls. They need to update their app before you can call them.";
|
||||
"lng_call_error_no_camera" = "No camera could be found. Please make sure that your camera is connected to the computer.";
|
||||
"lng_call_error_camera_not_started" = "You can switch to video call once you're connected.";
|
||||
"lng_call_error_camera_outdated" = "{user}'s app does not support video calls. They need to update their app.";
|
||||
"lng_call_error_audio_io" = "There seems to be a problem with audio playback on your computer. Please make sure that your computer's speakers and microphone are working and try again.";
|
||||
|
||||
"lng_call_bar_hangup" = "End call";
|
||||
|
|
@ -1735,16 +1741,33 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
"lng_call_box_status_group" = "({amount}) {status}";
|
||||
|
||||
"lng_call_outgoing" = "Outgoing call";
|
||||
"lng_call_video_outgoing" = "Outgoing video call";
|
||||
"lng_call_incoming" = "Incoming call";
|
||||
"lng_call_video_incoming" = "Incoming video call";
|
||||
"lng_call_missed" = "Missed call";
|
||||
"lng_call_video_missed" = "Missed video call";
|
||||
"lng_call_cancelled" = "Cancelled call";
|
||||
"lng_call_video_cancelled" = "Cancelled video call";
|
||||
"lng_call_declined" = "Declined call";
|
||||
"lng_call_video_declined" = "Declined video call";
|
||||
"lng_call_duration_info" = "{time}, {duration}";
|
||||
"lng_call_type_and_duration" = "{type} ({duration})";
|
||||
|
||||
"lng_call_rate_label" = "Please rate the quality of your call";
|
||||
"lng_call_rate_comment" = "Comment (optional)";
|
||||
|
||||
"lng_call_start_video" = "Start Video";
|
||||
"lng_call_stop_video" = "Stop Video";
|
||||
"lng_call_end_call" = "End Call";
|
||||
"lng_call_mute_audio" = "Mute";
|
||||
"lng_call_unmute_audio" = "Unmute";
|
||||
"lng_call_accept" = "Accept";
|
||||
"lng_call_decline" = "Decline";
|
||||
"lng_call_redial" = "Redial";
|
||||
"lng_call_cancel" = "Cancel";
|
||||
|
||||
"lng_call_microphone_off" = "{user}'s microphone is off";
|
||||
|
||||
"lng_no_mic_permission" = "Telegram needs access to your microphone so that you can make calls and record voice messages.";
|
||||
|
||||
"lng_player_message_today" = "Today at {time}";
|
||||
|
|
|
|||
|
|
@ -206,7 +206,7 @@
|
|||
53;CU;Cuba;53 XXXX XXXX;10;
|
||||
52;MX;Mexico;
|
||||
51;PE;Peru;51 XXX XXX XXX;11;
|
||||
49;DE;Germany;49 XXX XXXXXXXX;13;
|
||||
49;DE;Germany;
|
||||
48;PL;Poland;48 XXX XXX XXX;11;
|
||||
47;NO;Norway;47 XXXX XXXX;10;
|
||||
46;SE;Sweden;46 XX XXX XXXX;11;
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ storage.fileMp4#b3cea0e4 = storage.FileType;
|
|||
storage.fileWebp#1081464c = storage.FileType;
|
||||
|
||||
userEmpty#200250ba id:int = User;
|
||||
user#938458c1 flags:# self:flags.10?true contact:flags.11?true mutual_contact:flags.12?true deleted:flags.13?true bot:flags.14?true bot_chat_history:flags.15?true bot_nochats:flags.16?true verified:flags.17?true restricted:flags.18?true min:flags.20?true bot_inline_geo:flags.21?true support:flags.23?true scam:flags.24?true id:int access_hash:flags.0?long first_name:flags.1?string last_name:flags.2?string username:flags.3?string phone:flags.4?string photo:flags.5?UserProfilePhoto status:flags.6?UserStatus bot_info_version:flags.14?int restriction_reason:flags.18?Vector<RestrictionReason> bot_inline_placeholder:flags.19?string lang_code:flags.22?string = User;
|
||||
user#938458c1 flags:# self:flags.10?true contact:flags.11?true mutual_contact:flags.12?true deleted:flags.13?true bot:flags.14?true bot_chat_history:flags.15?true bot_nochats:flags.16?true verified:flags.17?true restricted:flags.18?true min:flags.20?true bot_inline_geo:flags.21?true support:flags.23?true scam:flags.24?true apply_min_photo:flags.25?true id:int access_hash:flags.0?long first_name:flags.1?string last_name:flags.2?string username:flags.3?string phone:flags.4?string photo:flags.5?UserProfilePhoto status:flags.6?UserStatus bot_info_version:flags.14?int restriction_reason:flags.18?Vector<RestrictionReason> bot_inline_placeholder:flags.19?string lang_code:flags.22?string = User;
|
||||
|
||||
userProfilePhotoEmpty#4f11bae1 = UserProfilePhoto;
|
||||
userProfilePhoto#69d3ab26 flags:# has_video:flags.0?true photo_id:long photo_small:FileLocation photo_big:FileLocation dc_id:int = UserProfilePhoto;
|
||||
|
|
@ -128,7 +128,7 @@ channel#d31a961e flags:# creator:flags.0?true left:flags.2?true broadcast:flags.
|
|||
channelForbidden#289da732 flags:# broadcast:flags.5?true megagroup:flags.8?true id:int access_hash:long title:string until_date:flags.16?int = Chat;
|
||||
|
||||
chatFull#1b7c9db3 flags:# can_set_username:flags.7?true has_scheduled:flags.8?true id:int about:string participants:ChatParticipants chat_photo:flags.2?Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:flags.3?Vector<BotInfo> pinned_msg_id:flags.6?int folder_id:flags.11?int = ChatFull;
|
||||
channelFull#f0e6672a flags:# can_view_participants:flags.3?true can_set_username:flags.6?true can_set_stickers:flags.7?true hidden_prehistory:flags.10?true can_view_stats:flags.12?true can_set_location:flags.16?true has_scheduled:flags.19?true id:int about:string participants_count:flags.0?int admins_count:flags.1?int kicked_count:flags.2?int banned_count:flags.2?int online_count:flags.13?int read_inbox_max_id:int read_outbox_max_id:int unread_count:int chat_photo:Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:Vector<BotInfo> migrated_from_chat_id:flags.4?int migrated_from_max_id:flags.4?int pinned_msg_id:flags.5?int stickerset:flags.8?StickerSet available_min_id:flags.9?int folder_id:flags.11?int linked_chat_id:flags.14?int location:flags.15?ChannelLocation slowmode_seconds:flags.17?int slowmode_next_send_date:flags.18?int stats_dc:flags.12?int pts:int = ChatFull;
|
||||
channelFull#f0e6672a flags:# can_view_participants:flags.3?true can_set_username:flags.6?true can_set_stickers:flags.7?true hidden_prehistory:flags.10?true can_set_location:flags.16?true has_scheduled:flags.19?true can_view_stats:flags.20?true id:int about:string participants_count:flags.0?int admins_count:flags.1?int kicked_count:flags.2?int banned_count:flags.2?int online_count:flags.13?int read_inbox_max_id:int read_outbox_max_id:int unread_count:int chat_photo:Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:Vector<BotInfo> migrated_from_chat_id:flags.4?int migrated_from_max_id:flags.4?int pinned_msg_id:flags.5?int stickerset:flags.8?StickerSet available_min_id:flags.9?int folder_id:flags.11?int linked_chat_id:flags.14?int location:flags.15?ChannelLocation slowmode_seconds:flags.17?int slowmode_next_send_date:flags.18?int stats_dc:flags.12?int pts:int = ChatFull;
|
||||
|
||||
chatParticipant#c8d7493e user_id:int inviter_id:int date:int = ChatParticipant;
|
||||
chatParticipantCreator#da13538a user_id:int = ChatParticipant;
|
||||
|
|
@ -225,7 +225,7 @@ inputReportReasonOther#e1746d0a text:string = ReportReason;
|
|||
inputReportReasonCopyright#9b89f93a = ReportReason;
|
||||
inputReportReasonGeoIrrelevant#dbd4feed = ReportReason;
|
||||
|
||||
userFull#edf17c12 flags:# blocked:flags.0?true phone_calls_available:flags.4?true phone_calls_private:flags.5?true can_pin_message:flags.7?true has_scheduled:flags.12?true user:User about:flags.1?string settings:PeerSettings profile_photo:flags.2?Photo notify_settings:PeerNotifySettings bot_info:flags.3?BotInfo pinned_msg_id:flags.6?int common_chats_count:int folder_id:flags.11?int = UserFull;
|
||||
userFull#edf17c12 flags:# blocked:flags.0?true phone_calls_available:flags.4?true phone_calls_private:flags.5?true can_pin_message:flags.7?true has_scheduled:flags.12?true video_calls_available:flags.13?true user:User about:flags.1?string settings:PeerSettings profile_photo:flags.2?Photo notify_settings:PeerNotifySettings bot_info:flags.3?BotInfo pinned_msg_id:flags.6?int common_chats_count:int folder_id:flags.11?int = UserFull;
|
||||
|
||||
contact#f911c994 user_id:int mutual:Bool = Contact;
|
||||
|
||||
|
|
@ -818,13 +818,14 @@ inputStickerSetItem#ffa0a496 flags:# document:InputDocument emoji:string mask_co
|
|||
inputPhoneCall#1e36fded id:long access_hash:long = InputPhoneCall;
|
||||
|
||||
phoneCallEmpty#5366c915 id:long = PhoneCall;
|
||||
phoneCallWaiting#1b8f4ad1 flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int protocol:PhoneCallProtocol receive_date:flags.0?int = PhoneCall;
|
||||
phoneCallRequested#87eabb53 flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_hash:bytes protocol:PhoneCallProtocol = PhoneCall;
|
||||
phoneCallAccepted#997c454a flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_b:bytes protocol:PhoneCallProtocol = PhoneCall;
|
||||
phoneCall#8742ae7f flags:# p2p_allowed:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_or_b:bytes key_fingerprint:long protocol:PhoneCallProtocol connections:Vector<PhoneConnection> start_date:int = PhoneCall;
|
||||
phoneCallDiscarded#50ca4de1 flags:# need_rating:flags.2?true need_debug:flags.3?true video:flags.5?true id:long reason:flags.0?PhoneCallDiscardReason duration:flags.1?int = PhoneCall;
|
||||
phoneCallWaiting#1b8f4ad1 flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int protocol:PhoneCallProtocol receive_date:flags.0?int = PhoneCall;
|
||||
phoneCallRequested#87eabb53 flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_hash:bytes protocol:PhoneCallProtocol = PhoneCall;
|
||||
phoneCallAccepted#997c454a flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_b:bytes protocol:PhoneCallProtocol = PhoneCall;
|
||||
phoneCall#8742ae7f flags:# p2p_allowed:flags.5?true video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_or_b:bytes key_fingerprint:long protocol:PhoneCallProtocol connections:Vector<PhoneConnection> start_date:int = PhoneCall;
|
||||
phoneCallDiscarded#50ca4de1 flags:# need_rating:flags.2?true need_debug:flags.3?true video:flags.6?true id:long reason:flags.0?PhoneCallDiscardReason duration:flags.1?int = PhoneCall;
|
||||
|
||||
phoneConnection#9d4c17c0 id:long ip:string ipv6:string port:int peer_tag:bytes = PhoneConnection;
|
||||
phoneConnectionWebrtc#635fe375 flags:# turn:flags.0?true stun:flags.1?true id:long ip:string ipv6:string port:int username:string password:string = PhoneConnection;
|
||||
|
||||
phoneCallProtocol#fc878fc8 flags:# udp_p2p:flags.0?true udp_reflector:flags.1?true min_layer:int max_layer:int library_versions:Vector<string> = PhoneCallProtocol;
|
||||
|
||||
|
|
@ -1398,7 +1399,7 @@ updates.getState#edd4882a = updates.State;
|
|||
updates.getDifference#25939651 flags:# pts:int pts_total_limit:flags.0?int date:int qts:int = updates.Difference;
|
||||
updates.getChannelDifference#3173d78 flags:# force:flags.0?true channel:InputChannel filter:ChannelMessagesFilter pts:int limit:int = updates.ChannelDifference;
|
||||
|
||||
photos.updateProfilePhoto#f0bb5152 id:InputPhoto = UserProfilePhoto;
|
||||
photos.updateProfilePhoto#72d4742c id:InputPhoto = photos.Photo;
|
||||
photos.uploadProfilePhoto#89f30f69 flags:# file:flags.0?InputFile video:flags.1?InputFile video_start_ts:flags.2?double = photos.Photo;
|
||||
photos.deletePhotos#87cf7f2f id:Vector<InputPhoto> = Vector<long>;
|
||||
photos.getUserPhotos#91cd32a8 user_id:InputUser offset:int max_id:long limit:int = photos.Photos;
|
||||
|
|
@ -1511,4 +1512,4 @@ stats.getBroadcastStats#ab42441a flags:# dark:flags.0?true channel:InputChannel
|
|||
stats.loadAsyncGraph#621d5fa0 flags:# token:string x:flags.0?long = StatsGraph;
|
||||
stats.getMegagroupStats#dcdf8607 flags:# dark:flags.0?true channel:InputChannel = stats.MegagroupStats;
|
||||
|
||||
// LAYER 116
|
||||
// LAYER 117
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
<Identity Name="TelegramMessengerLLP.TelegramDesktop"
|
||||
ProcessorArchitecture="ARCHITECTURE"
|
||||
Publisher="CN=536BC709-8EE1-4478-AF22-F0F0F26FF64A"
|
||||
Version="2.2.0.0" />
|
||||
Version="2.3.2.0" />
|
||||
<Properties>
|
||||
<DisplayName>Telegram Desktop</DisplayName>
|
||||
<PublisherDisplayName>Telegram FZ-LLC</PublisherDisplayName>
|
||||
|
|
|
|||
|
|
@ -44,8 +44,8 @@ IDI_ICON1 ICON "..\\art\\icon256.ico"
|
|||
//
|
||||
|
||||
VS_VERSION_INFO VERSIONINFO
|
||||
FILEVERSION 2,2,0,0
|
||||
PRODUCTVERSION 2,2,0,0
|
||||
FILEVERSION 2,3,2,0
|
||||
PRODUCTVERSION 2,3,2,0
|
||||
FILEFLAGSMASK 0x3fL
|
||||
#ifdef _DEBUG
|
||||
FILEFLAGS 0x1L
|
||||
|
|
@ -62,10 +62,10 @@ BEGIN
|
|||
BEGIN
|
||||
VALUE "CompanyName", "Telegram FZ-LLC"
|
||||
VALUE "FileDescription", "Telegram Desktop"
|
||||
VALUE "FileVersion", "2.2.0.0"
|
||||
VALUE "FileVersion", "2.3.2.0"
|
||||
VALUE "LegalCopyright", "Copyright (C) 2014-2020"
|
||||
VALUE "ProductName", "Telegram Desktop"
|
||||
VALUE "ProductVersion", "2.2.0.0"
|
||||
VALUE "ProductVersion", "2.3.2.0"
|
||||
END
|
||||
END
|
||||
BLOCK "VarFileInfo"
|
||||
|
|
|
|||
|
|
@ -35,8 +35,8 @@ LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
|
|||
//
|
||||
|
||||
VS_VERSION_INFO VERSIONINFO
|
||||
FILEVERSION 2,2,0,0
|
||||
PRODUCTVERSION 2,2,0,0
|
||||
FILEVERSION 2,3,2,0
|
||||
PRODUCTVERSION 2,3,2,0
|
||||
FILEFLAGSMASK 0x3fL
|
||||
#ifdef _DEBUG
|
||||
FILEFLAGS 0x1L
|
||||
|
|
@ -53,10 +53,10 @@ BEGIN
|
|||
BEGIN
|
||||
VALUE "CompanyName", "Telegram FZ-LLC"
|
||||
VALUE "FileDescription", "Telegram Desktop Updater"
|
||||
VALUE "FileVersion", "2.2.0.0"
|
||||
VALUE "FileVersion", "2.3.2.0"
|
||||
VALUE "LegalCopyright", "Copyright (C) 2014-2020"
|
||||
VALUE "ProductName", "Telegram Desktop"
|
||||
VALUE "ProductVersion", "2.2.0.0"
|
||||
VALUE "ProductVersion", "2.3.2.0"
|
||||
END
|
||||
END
|
||||
BLOCK "VarFileInfo"
|
||||
|
|
|
|||
117
Telegram/SourceFiles/api/api_toggling_media.cpp
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#include "api/api_toggling_media.h"
|
||||
|
||||
#include "apiwrap.h"
|
||||
#include "data/data_document.h"
|
||||
#include "data/data_file_origin.h"
|
||||
#include "data/data_session.h"
|
||||
#include "data/stickers/data_stickers.h"
|
||||
#include "main/main_session.h"
|
||||
|
||||
namespace Api {
|
||||
namespace {
|
||||
|
||||
template <typename ToggleRequest, typename DoneCallback>
|
||||
void ToggleExistingMedia(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
ToggleRequest toggleRequest,
|
||||
DoneCallback &&done) {
|
||||
const auto api = &document->owner().session().api();
|
||||
|
||||
auto performRequest = [=](const auto &repeatRequest) -> void {
|
||||
const auto usedFileReference = document->fileReference();
|
||||
api->request(std::move(
|
||||
toggleRequest
|
||||
)).done([=](const MTPBool &result) {
|
||||
if (mtpIsTrue(result)) {
|
||||
done();
|
||||
}
|
||||
}).fail([=](const RPCError &error) {
|
||||
if (error.code() == 400
|
||||
&& error.type().startsWith(u"FILE_REFERENCE_"_q)) {
|
||||
auto refreshed = [=](const Data::UpdatedFileReferences &d) {
|
||||
if (document->fileReference() != usedFileReference) {
|
||||
repeatRequest(repeatRequest);
|
||||
}
|
||||
};
|
||||
api->refreshFileReference(origin, std::move(refreshed));
|
||||
}
|
||||
}).send();
|
||||
};
|
||||
performRequest(performRequest);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void ToggleFavedSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin) {
|
||||
ToggleFavedSticker(
|
||||
document,
|
||||
std::move(origin),
|
||||
!document->owner().stickers().isFaved(document));
|
||||
}
|
||||
|
||||
void ToggleFavedSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool faved) {
|
||||
if (faved && !document->sticker()) {
|
||||
return;
|
||||
}
|
||||
ToggleExistingMedia(
|
||||
document,
|
||||
std::move(origin),
|
||||
MTPmessages_FaveSticker(document->mtpInput(), MTP_bool(!faved)),
|
||||
[=] { document->owner().stickers().setFaved(document, faved); });
|
||||
}
|
||||
|
||||
void ToggleRecentSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool saved) {
|
||||
if (!document->sticker()) {
|
||||
return;
|
||||
}
|
||||
auto done = [=] {
|
||||
if (!saved) {
|
||||
document->owner().stickers().removeFromRecentSet(document);
|
||||
}
|
||||
};
|
||||
ToggleExistingMedia(
|
||||
document,
|
||||
std::move(origin),
|
||||
MTPmessages_SaveRecentSticker(
|
||||
MTP_flags(MTPmessages_SaveRecentSticker::Flag(0)),
|
||||
document->mtpInput(),
|
||||
MTP_bool(!saved)),
|
||||
std::move(done));
|
||||
}
|
||||
|
||||
void ToggleSavedGif(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool saved) {
|
||||
if (saved && !document->isGifv()) {
|
||||
return;
|
||||
}
|
||||
auto done = [=] {
|
||||
if (saved) {
|
||||
document->owner().stickers().addSavedGif(document);
|
||||
}
|
||||
};
|
||||
ToggleExistingMedia(
|
||||
document,
|
||||
std::move(origin),
|
||||
MTPmessages_SaveGif(document->mtpInput(), MTP_bool(!saved)),
|
||||
std::move(done));
|
||||
}
|
||||
|
||||
} // namespace Api
|
||||
31
Telegram/SourceFiles/api/api_toggling_media.h
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
namespace Api {
|
||||
|
||||
void ToggleFavedSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin);
|
||||
|
||||
void ToggleFavedSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool faved);
|
||||
|
||||
void ToggleRecentSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool saved);
|
||||
|
||||
void ToggleSavedGif(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool saved);
|
||||
|
||||
} // namespace Api
|
||||
|
|
@ -1726,8 +1726,9 @@ void Updates::feedUpdate(const MTPUpdate &update) {
|
|||
auto &d = update.c_updateEncryptedMessagesRead();
|
||||
} break;
|
||||
|
||||
case mtpc_updatePhoneCall: {
|
||||
Core::App().calls().handleUpdate(&session(), update.c_updatePhoneCall());
|
||||
case mtpc_updatePhoneCall:
|
||||
case mtpc_updatePhoneCallSignalingData: {
|
||||
Core::App().calls().handleUpdate(&session(), update);
|
||||
} break;
|
||||
|
||||
case mtpc_updateUserBlocked: {
|
||||
|
|
|
|||
|
|
@ -2936,72 +2936,6 @@ std::vector<not_null<DocumentData*>> *ApiWrap::stickersByEmoji(
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
void ApiWrap::toggleFavedSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool faved) {
|
||||
if (faved && !document->sticker()) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto performRequest = [=](const auto &repeatRequest) -> void {
|
||||
const auto usedFileReference = document->fileReference();
|
||||
request(MTPmessages_FaveSticker(
|
||||
document->mtpInput(),
|
||||
MTP_bool(!faved)
|
||||
)).done([=](const MTPBool &result) {
|
||||
if (mtpIsTrue(result)) {
|
||||
_session->data().stickers().setFaved(document, faved);
|
||||
}
|
||||
}).fail([=](const RPCError &error) {
|
||||
if (error.code() == 400
|
||||
&& error.type().startsWith(qstr("FILE_REFERENCE_"))) {
|
||||
auto refreshed = [=](const UpdatedFileReferences &data) {
|
||||
if (document->fileReference() != usedFileReference) {
|
||||
repeatRequest(repeatRequest);
|
||||
}
|
||||
};
|
||||
refreshFileReference(origin, std::move(refreshed));
|
||||
}
|
||||
}).send();
|
||||
};
|
||||
performRequest(performRequest);
|
||||
}
|
||||
|
||||
void ApiWrap::toggleSavedGif(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool saved) {
|
||||
if (saved && !document->isGifv()) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto performRequest = [=](const auto &repeatRequest) -> void {
|
||||
const auto usedFileReference = document->fileReference();
|
||||
request(MTPmessages_SaveGif(
|
||||
document->mtpInput(),
|
||||
MTP_bool(!saved)
|
||||
)).done([=](const MTPBool &result) {
|
||||
if (mtpIsTrue(result)) {
|
||||
if (saved) {
|
||||
_session->data().stickers().addSavedGif(document);
|
||||
}
|
||||
}
|
||||
}).fail([=](const RPCError &error) {
|
||||
if (error.code() == 400
|
||||
&& error.type().startsWith(qstr("FILE_REFERENCE_"))) {
|
||||
auto refreshed = [=](const UpdatedFileReferences &data) {
|
||||
if (document->fileReference() != usedFileReference) {
|
||||
repeatRequest(repeatRequest);
|
||||
}
|
||||
};
|
||||
refreshFileReference(origin, std::move(refreshed));
|
||||
}
|
||||
}).send();
|
||||
};
|
||||
performRequest(performRequest);
|
||||
}
|
||||
|
||||
void ApiWrap::requestStickers(TimeId now) {
|
||||
if (!_session->data().stickers().updateNeeded(now)
|
||||
|| _stickersUpdateRequest) {
|
||||
|
|
@ -4918,8 +4852,8 @@ void ApiWrap::clearPeerPhoto(not_null<PhotoData*> photo) {
|
|||
if (self->userpicPhotoId() == photo->id) {
|
||||
request(MTPphotos_UpdateProfilePhoto(
|
||||
MTP_inputPhotoEmpty()
|
||||
)).done([=](const MTPUserProfilePhoto &result) {
|
||||
self->setPhoto(result);
|
||||
)).done([=](const MTPphotos_Photo &result) {
|
||||
self->setPhoto(MTP_userProfilePhotoEmpty());
|
||||
}).send();
|
||||
} else if (photo->peer && photo->peer->userpicPhotoId() == photo->id) {
|
||||
const auto applier = [=](const MTPUpdates &result) {
|
||||
|
|
|
|||
|
|
@ -275,14 +275,6 @@ public:
|
|||
const MTPInputStickerSet &set);
|
||||
std::vector<not_null<DocumentData*>> *stickersByEmoji(
|
||||
not_null<EmojiPtr> emoji);
|
||||
void toggleFavedSticker(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool faved);
|
||||
void toggleSavedGif(
|
||||
not_null<DocumentData*> document,
|
||||
Data::FileOrigin origin,
|
||||
bool saved);
|
||||
|
||||
void joinChannel(not_null<ChannelData*> channel);
|
||||
void leaveChannel(not_null<ChannelData*> channel);
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "core/core_settings.h"
|
||||
#include "chat_helpers/emoji_suggestions_widget.h"
|
||||
#include "chat_helpers/message_field.h"
|
||||
#include "chat_helpers/send_context_menu.h"
|
||||
#include "history/view/history_view_schedule_box.h"
|
||||
#include "settings/settings_common.h"
|
||||
#include "base/unique_qptr.h"
|
||||
|
|
@ -1058,19 +1059,19 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
|
|||
*error &= ~Error::Solution;
|
||||
}
|
||||
};
|
||||
const auto showError = [=](const QString &text) {
|
||||
Ui::Toast::Show(text);
|
||||
const auto showError = [](tr::phrase<> text) {
|
||||
Ui::Toast::Show(text(tr::now));
|
||||
};
|
||||
const auto send = [=](Api::SendOptions sendOptions) {
|
||||
collectError();
|
||||
if (*error & Error::Question) {
|
||||
showError(tr::lng_polls_choose_question(tr::now));
|
||||
showError(tr::lng_polls_choose_question);
|
||||
question->setFocus();
|
||||
} else if (*error & Error::Options) {
|
||||
showError(tr::lng_polls_choose_answers(tr::now));
|
||||
showError(tr::lng_polls_choose_answers);
|
||||
options->focusFirst();
|
||||
} else if (*error & Error::Correct) {
|
||||
showError(tr::lng_polls_choose_correct(tr::now));
|
||||
showError(tr::lng_polls_choose_correct);
|
||||
} else if (*error & Error::Solution) {
|
||||
solution->showError();
|
||||
} else if (!*error) {
|
||||
|
|
@ -1078,15 +1079,13 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
|
|||
}
|
||||
};
|
||||
const auto sendSilent = [=] {
|
||||
auto options = Api::SendOptions();
|
||||
options.silent = true;
|
||||
send(options);
|
||||
send({ .silent = true });
|
||||
};
|
||||
const auto sendScheduled = [=] {
|
||||
Ui::show(
|
||||
HistoryView::PrepareScheduleBox(
|
||||
this,
|
||||
SendMenuType::Scheduled,
|
||||
SendMenu::Type::Scheduled,
|
||||
send),
|
||||
Ui::LayerOption::KeepOther);
|
||||
};
|
||||
|
|
@ -1101,15 +1100,22 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
|
|||
FocusAtEnd(question);
|
||||
}, lifetime());
|
||||
|
||||
const auto isNormal = (_sendType == Api::SendType::Normal);
|
||||
const auto isScheduled = (_sendType == Api::SendType::Scheduled);
|
||||
|
||||
const auto submit = addButton(
|
||||
tr::lng_polls_create_button(),
|
||||
[=] { send({}); });
|
||||
if (_sendType == Api::SendType::Normal) {
|
||||
isNormal
|
||||
? tr::lng_polls_create_button()
|
||||
: tr::lng_schedule_button(),
|
||||
[=] { isNormal ? send({}) : sendScheduled(); });
|
||||
if (isNormal || isScheduled) {
|
||||
const auto sendMenuType = [=] {
|
||||
collectError();
|
||||
return *error ? SendMenuType::Disabled : SendMenuType::Scheduled;
|
||||
return (*error || isScheduled)
|
||||
? SendMenu::Type::Disabled
|
||||
: SendMenu::Type::Scheduled;
|
||||
};
|
||||
SetupSendMenuAndShortcuts(
|
||||
SendMenu::SetupMenuAndShortcuts(
|
||||
submit.data(),
|
||||
sendMenuType,
|
||||
sendSilent,
|
||||
|
|
|
|||
|
|
@ -91,25 +91,6 @@ auto ListFromMimeData(not_null<const QMimeData*> data) {
|
|||
return result;
|
||||
}
|
||||
|
||||
auto CheckMimeData(not_null<const QMimeData*> data, bool isAlbum) {
|
||||
if (data->urls().size() > 1) {
|
||||
return false;
|
||||
} else if (data->hasImage()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (isAlbum && data->hasUrls()) {
|
||||
const auto url = data->urls().front();
|
||||
if (url.isLocalFile()) {
|
||||
using namespace Core;
|
||||
const auto info = QFileInfo(Platform::File::UrlToLocal(url));
|
||||
return IsMimeAcceptedForAlbum(MimeTypeForFile(info).name());
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
EditCaptionBox::EditCaptionBox(
|
||||
|
|
@ -663,7 +644,7 @@ void EditCaptionBox::prepare() {
|
|||
if (action == Ui::InputField::MimeAction::Check) {
|
||||
if (!data->hasText() && !_isAllowedEditMedia) {
|
||||
return false;
|
||||
} else if (CheckMimeData(data, _isAlbum)) {
|
||||
} else if (Storage::ValidateDragData(data, _isAlbum)) {
|
||||
return true;
|
||||
}
|
||||
return data->hasText();
|
||||
|
|
@ -766,7 +747,9 @@ void EditCaptionBox::setupEmojiPanel() {
|
|||
|
||||
void EditCaptionBox::setupDragArea() {
|
||||
auto enterFilter = [=](not_null<const QMimeData*> data) {
|
||||
return !_isAllowedEditMedia ? false : CheckMimeData(data, _isAlbum);
|
||||
return !_isAllowedEditMedia
|
||||
? false
|
||||
: Storage::ValidateDragData(data, _isAlbum);
|
||||
};
|
||||
// Avoid both drag areas appearing at one time.
|
||||
auto computeState = [=](const QMimeData *data) {
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "main/main_session.h"
|
||||
#include "mtproto/mtproto_config.h"
|
||||
#include "chat_helpers/message_field.h"
|
||||
#include "chat_helpers/send_context_menu.h"
|
||||
#include "chat_helpers/emoji_suggestions_widget.h"
|
||||
#include "chat_helpers/tabbed_panel.h"
|
||||
#include "chat_helpers/tabbed_selector.h"
|
||||
|
|
@ -1666,7 +1667,7 @@ SendFilesBox::SendFilesBox(
|
|||
CompressConfirm compressed,
|
||||
SendLimit limit,
|
||||
Api::SendType sendType,
|
||||
SendMenuType sendMenuType)
|
||||
SendMenu::Type sendMenuType)
|
||||
: _controller(controller)
|
||||
, _sendType(sendType)
|
||||
, _list(std::move(list))
|
||||
|
|
@ -1836,7 +1837,7 @@ void SendFilesBox::setupShadows(
|
|||
void SendFilesBox::prepare() {
|
||||
_send = addButton(tr::lng_send_button(), [=] { send({}); });
|
||||
if (_sendType == Api::SendType::Normal) {
|
||||
SetupSendMenuAndShortcuts(
|
||||
SendMenu::SetupMenuAndShortcuts(
|
||||
_send,
|
||||
[=] { return _sendMenuType; },
|
||||
[=] { sendSilent(); },
|
||||
|
|
@ -1865,12 +1866,13 @@ void SendFilesBox::prepare() {
|
|||
void SendFilesBox::setupDragArea() {
|
||||
// Avoid both drag areas appearing at one time.
|
||||
auto computeState = [=](const QMimeData *data) {
|
||||
using DragState = Storage::MimeDataState;
|
||||
const auto state = Storage::ComputeMimeDataState(data);
|
||||
return (state == Storage::MimeDataState::PhotoFiles)
|
||||
? Storage::MimeDataState::Image
|
||||
: (state == Storage::MimeDataState::Files)
|
||||
// Temporary enable drag'n'drop only for images. TODO.
|
||||
? Storage::MimeDataState::None
|
||||
return (state == DragState::PhotoFiles)
|
||||
? DragState::Image
|
||||
: (state == DragState::Files
|
||||
&& !Storage::ValidateDragData(data, true))
|
||||
? DragState::None
|
||||
: state;
|
||||
};
|
||||
const auto areas = DragArea::SetupDragAreaToContainer(
|
||||
|
|
@ -2416,7 +2418,7 @@ void SendFilesBox::sendSilent() {
|
|||
|
||||
void SendFilesBox::sendScheduled() {
|
||||
const auto type = (_sendType == Api::SendType::ScheduledToUser)
|
||||
? SendMenuType::ScheduledToUser
|
||||
? SendMenu::Type::ScheduledToUser
|
||||
: _sendMenuType;
|
||||
const auto callback = [=](Api::SendOptions options) { send(options); };
|
||||
Ui::show(
|
||||
|
|
|
|||
|
|
@ -40,7 +40,9 @@ namespace Window {
|
|||
class SessionController;
|
||||
} // namespace Window
|
||||
|
||||
enum class SendMenuType;
|
||||
namespace SendMenu {
|
||||
enum class Type;
|
||||
} // namespace SendMenu
|
||||
|
||||
enum class SendFilesWay {
|
||||
Album,
|
||||
|
|
@ -62,7 +64,7 @@ public:
|
|||
CompressConfirm compressed,
|
||||
SendLimit limit,
|
||||
Api::SendType sendType,
|
||||
SendMenuType sendMenuType);
|
||||
SendMenu::Type sendMenuType);
|
||||
|
||||
void setConfirmedCallback(
|
||||
Fn<void(
|
||||
|
|
@ -142,7 +144,7 @@ private:
|
|||
CompressConfirm _compressConfirmInitial = CompressConfirm::None;
|
||||
CompressConfirm _compressConfirm = CompressConfirm::None;
|
||||
SendLimit _sendLimit = SendLimit::Many;
|
||||
SendMenuType _sendMenuType = SendMenuType();
|
||||
SendMenu::Type _sendMenuType = SendMenu::Type();
|
||||
|
||||
Fn<void(
|
||||
Storage::PreparedList &&list,
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "ui/wrap/slide_wrap.h"
|
||||
#include "ui/text_options.h"
|
||||
#include "chat_helpers/message_field.h"
|
||||
#include "chat_helpers/send_context_menu.h"
|
||||
#include "history/history.h"
|
||||
#include "history/history_message.h"
|
||||
#include "history/view/history_view_schedule_box.h"
|
||||
|
|
@ -426,13 +427,13 @@ void ShareBox::keyPressEvent(QKeyEvent *e) {
|
|||
}
|
||||
}
|
||||
|
||||
SendMenuType ShareBox::sendMenuType() const {
|
||||
SendMenu::Type ShareBox::sendMenuType() const {
|
||||
const auto selected = _inner->selected();
|
||||
return ranges::all_of(selected, HistoryView::CanScheduleUntilOnline)
|
||||
? SendMenuType::ScheduledToUser
|
||||
? SendMenu::Type::ScheduledToUser
|
||||
: (selected.size() == 1 && selected.front()->isSelf())
|
||||
? SendMenuType::Reminder
|
||||
: SendMenuType::Scheduled;
|
||||
? SendMenu::Type::Reminder
|
||||
: SendMenu::Type::Scheduled;
|
||||
}
|
||||
|
||||
void ShareBox::createButtons() {
|
||||
|
|
@ -446,7 +447,7 @@ void ShareBox::createButtons() {
|
|||
const auto send = addButton(tr::lng_share_confirm(), [=] {
|
||||
submit({});
|
||||
});
|
||||
SetupSendMenuAndShortcuts(
|
||||
SendMenu::SetupMenuAndShortcuts(
|
||||
send,
|
||||
[=] { return sendMenuType(); },
|
||||
[=] { submitSilent(); },
|
||||
|
|
|
|||
|
|
@ -14,7 +14,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "ui/effects/round_checkbox.h"
|
||||
#include "mtproto/sender.h"
|
||||
|
||||
enum class SendMenuType;
|
||||
namespace SendMenu {
|
||||
enum class Type;
|
||||
} // namespace SendMenu
|
||||
|
||||
namespace Window {
|
||||
class SessionNavigation;
|
||||
|
|
@ -85,7 +87,7 @@ private:
|
|||
void goToChat(not_null<PeerData*> peer);
|
||||
bool searchByUsername(bool useCache = false);
|
||||
|
||||
SendMenuType sendMenuType() const;
|
||||
SendMenu::Type sendMenuType() const;
|
||||
|
||||
void scrollTo(Ui::ScrollToRequest request);
|
||||
void needSearchByUsername();
|
||||
|
|
|
|||
|
|
@ -14,12 +14,12 @@ CallSignalBars {
|
|||
width: pixels;
|
||||
radius: pixels;
|
||||
skip: pixels;
|
||||
min: pixels;
|
||||
max: pixels;
|
||||
color: color;
|
||||
inactiveOpacity: double;
|
||||
}
|
||||
|
||||
callWidth: 300px;
|
||||
callHeight: 470px;
|
||||
callRadius: 6px;
|
||||
callShadow: Shadow {
|
||||
left: icon {{ "call_shadow_left", windowShadowFg }};
|
||||
|
|
@ -34,16 +34,83 @@ callShadow: Shadow {
|
|||
fallback: windowShadowFgFallback;
|
||||
}
|
||||
|
||||
callButton: IconButton {
|
||||
width: 72px;
|
||||
height: 72px;
|
||||
callWidthMin: 300px;
|
||||
callHeightMin: 440px;
|
||||
callWidth: 720px;
|
||||
callHeight: 540px;
|
||||
|
||||
iconPosition: point(-1px, -1px);
|
||||
callBottomControlsHeight: 87px;
|
||||
|
||||
CallBodyLayout {
|
||||
height: pixels;
|
||||
photoTop: pixels;
|
||||
photoSize: pixels;
|
||||
nameTop: pixels;
|
||||
statusTop: pixels;
|
||||
muteStroke: pixels;
|
||||
muteSize: pixels;
|
||||
mutePosition: point;
|
||||
}
|
||||
|
||||
callBodyLayout: CallBodyLayout {
|
||||
height: 284px;
|
||||
photoTop: 21px;
|
||||
photoSize: 160px;
|
||||
nameTop: 221px;
|
||||
statusTop: 254px;
|
||||
muteStroke: 3px;
|
||||
muteSize: 36px;
|
||||
mutePosition: point(142px, 135px);
|
||||
}
|
||||
callBodyWithPreview: CallBodyLayout {
|
||||
height: 185px;
|
||||
photoTop: 21px;
|
||||
photoSize: 100px;
|
||||
nameTop: 132px;
|
||||
statusTop: 163px;
|
||||
muteStroke: 3px;
|
||||
muteSize: 0px;
|
||||
mutePosition: point(90px, 84px);
|
||||
}
|
||||
callMutedPeerIcon: icon {{ "calls_mute_userpic", callIconFg }};
|
||||
|
||||
callOutgoingPreviewMin: size(360px, 120px);
|
||||
callOutgoingPreview: size(540px, 180px); // default, for height == callHeight.
|
||||
callOutgoingPreviewMax: size(1620px, 540px);
|
||||
callOutgoingDefaultSize: size(160px, 110px);
|
||||
|
||||
callInnerPadding: 12px;
|
||||
|
||||
callFingerprintPadding: margins(10px, 4px, 8px, 5px);
|
||||
callFingerprintSkip: 4px;
|
||||
callFingerprintSignalBarsSkip: 2px;
|
||||
callSignalBarsPadding: margins(8px, 9px, 11px, 5px);
|
||||
|
||||
callFingerprintTop: 8px;
|
||||
callFingerprintBottom: -16px;
|
||||
|
||||
callTooltipMutedIcon: icon{{ "calls_mute_tooltip", videoPlayIconFg }};
|
||||
callTooltipMutedIconPosition: point(10px, 5px);
|
||||
callTooltipPadding: margins(41px, 7px, 15px, 8px);
|
||||
|
||||
callButton: IconButton {
|
||||
width: 68px;
|
||||
height: 79px;
|
||||
|
||||
iconPosition: point(-1px, 16px);
|
||||
|
||||
rippleAreaPosition: point(12px, 12px);
|
||||
rippleAreaSize: 48px;
|
||||
rippleAreaSize: 44px;
|
||||
ripple: defaultRippleAnimation;
|
||||
}
|
||||
callButtonLabel: FlatLabel(defaultFlatLabel) {
|
||||
textFg: callNameFg;
|
||||
style: TextStyle(defaultTextStyle) {
|
||||
font: font(11px);
|
||||
linkFont: font(11px);
|
||||
linkFontOver: font(11px underline);
|
||||
}
|
||||
}
|
||||
|
||||
callAnswer: CallButton {
|
||||
button: IconButton(callButton) {
|
||||
|
|
@ -56,6 +123,7 @@ callAnswer: CallButton {
|
|||
angle: 135.;
|
||||
outerRadius: 12px;
|
||||
outerBg: callAnswerBgOuter;
|
||||
label: callButtonLabel;
|
||||
}
|
||||
callHangup: CallButton {
|
||||
button: IconButton(callButton) {
|
||||
|
|
@ -66,30 +134,57 @@ callHangup: CallButton {
|
|||
}
|
||||
bg: callHangupBg;
|
||||
outerBg: callHangupBg;
|
||||
label: callButtonLabel;
|
||||
}
|
||||
callCancel: CallButton {
|
||||
button: IconButton(callButton) {
|
||||
icon: icon {{ "box_button_close", callCancelFg }};
|
||||
icon: icon {{ "call_cancel", callIconFgActive }};
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callCancelRipple;
|
||||
color: callIconActiveRipple;
|
||||
}
|
||||
}
|
||||
bg: callCancelBg;
|
||||
outerBg: callCancelBg;
|
||||
bg: callIconBgActive;
|
||||
outerBg: callIconBgActive;
|
||||
label: callButtonLabel;
|
||||
}
|
||||
callMuteToggle: IconButton(callButton) {
|
||||
callMicrophoneMute: CallButton {
|
||||
button: IconButton(callButton) {
|
||||
icon: icon {{ "call_record_active", callIconFg }};
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callMuteRipple;
|
||||
}
|
||||
}
|
||||
bg: callIconBg;
|
||||
outerBg: callMuteRipple;
|
||||
label: callButtonLabel;
|
||||
}
|
||||
callUnmuteIcon: icon {{ "call_record_muted", callIconFg }};
|
||||
callMicrophoneUnmute: CallButton(callMicrophoneMute) {
|
||||
button: IconButton(callButton) {
|
||||
icon: icon {{ "call_record_muted", callIconFgActive }};
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callIconActiveRipple;
|
||||
}
|
||||
}
|
||||
bg: callIconBgActive;
|
||||
}
|
||||
callCameraMute: CallButton(callMicrophoneMute) {
|
||||
button: IconButton(callButton) {
|
||||
icon: icon {{ "call_camera_active", callIconFg }};
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callMuteRipple;
|
||||
}
|
||||
}
|
||||
}
|
||||
callCameraUnmute: CallButton(callMicrophoneUnmute) {
|
||||
button: IconButton(callButton) {
|
||||
icon: icon {{ "call_camera_muted", callIconFgActive }};
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callIconActiveRipple;
|
||||
}
|
||||
}
|
||||
}
|
||||
callBottomShadowSize: 124px;
|
||||
|
||||
callControlsTop: 80px;
|
||||
callControlsSkip: 0px;
|
||||
callMuteRight: 8px;
|
||||
|
||||
callNameTop: 15px;
|
||||
callName: FlatLabel(defaultFlatLabel) {
|
||||
minWidth: 260px;
|
||||
maxHeight: 30px;
|
||||
|
|
@ -101,7 +196,6 @@ callName: FlatLabel(defaultFlatLabel) {
|
|||
linkFontOver: font(21px semibold underline);
|
||||
}
|
||||
}
|
||||
callStatusTop: 46px;
|
||||
callStatus: FlatLabel(defaultFlatLabel) {
|
||||
minWidth: 260px;
|
||||
maxHeight: 20px;
|
||||
|
|
@ -113,10 +207,16 @@ callStatus: FlatLabel(defaultFlatLabel) {
|
|||
linkFontOver: font(14px underline);
|
||||
}
|
||||
}
|
||||
|
||||
callFingerprintPadding: margins(9px, 4px, 9px, 5px);
|
||||
callFingerprintSkip: 3px;
|
||||
callFingerprintBottom: 8px;
|
||||
callRemoteAudioMute: FlatLabel(callStatus) {
|
||||
minWidth: 0px;
|
||||
textFg: videoPlayIconFg;
|
||||
style: TextStyle(defaultTextStyle) {
|
||||
font: font(12px);
|
||||
linkFont: font(12px);
|
||||
linkFontOver: font(12px underline);
|
||||
}
|
||||
}
|
||||
callRemoteAudioMuteSkip: 12px;
|
||||
|
||||
callBarHeight: 38px;
|
||||
callBarMuteToggle: IconButton {
|
||||
|
|
@ -124,7 +224,7 @@ callBarMuteToggle: IconButton {
|
|||
height: 38px;
|
||||
|
||||
icon: icon {{ "call_record_active", callBarFg }};
|
||||
iconPosition: point(9px, 8px);
|
||||
iconPosition: point(3px, 2px);
|
||||
|
||||
ripple: RippleAnimation(defaultRippleAnimation) {
|
||||
color: callBarMuteRipple;
|
||||
|
|
@ -137,7 +237,7 @@ callBarRightSkip: 12px;
|
|||
callBarSkip: 10px;
|
||||
callBarHangup: IconButton(callBarMuteToggle) {
|
||||
icon: icon {{ "call_discard", callBarFg }};
|
||||
iconPosition: point(9px, 11px);
|
||||
iconPosition: point(3px, 1px);
|
||||
}
|
||||
callBarLabel: LabelSimple(defaultLabelSimple) {
|
||||
font: semiboldFont;
|
||||
|
|
@ -162,8 +262,8 @@ callReDial: IconButton {
|
|||
width: 40px;
|
||||
height: 56px;
|
||||
|
||||
icon: mainMenuCalls;
|
||||
iconOver: mainMenuCallsOver;
|
||||
icon: icon {{ "call_answer", menuIconFg }};
|
||||
iconOver: icon {{ "call_answer", menuIconFgOver }};
|
||||
iconPosition: point(-1px, -1px);
|
||||
|
||||
ripple: defaultRippleAnimation;
|
||||
|
|
@ -171,6 +271,11 @@ callReDial: IconButton {
|
|||
rippleAreaSize: 40px;
|
||||
}
|
||||
|
||||
callCameraReDial: IconButton(callReDial) {
|
||||
icon: icon {{ "call_camera_active", menuIconFg }};
|
||||
iconOver: icon {{ "call_camera_active", menuIconFgOver }};
|
||||
}
|
||||
|
||||
callRatingPadding: margins(24px, 12px, 24px, 0px);
|
||||
callRatingStar: IconButton {
|
||||
width: 36px;
|
||||
|
|
@ -200,14 +305,97 @@ callDebugLabel: FlatLabel(defaultFlatLabel) {
|
|||
callPanelDuration: 150;
|
||||
|
||||
callPanelSignalBars: CallSignalBars {
|
||||
width: 3px;
|
||||
width: 2px;
|
||||
radius: 1px;
|
||||
skip: 1px;
|
||||
skip: 2px;
|
||||
min: 4px;
|
||||
max: 10px;
|
||||
color: callNameFg;
|
||||
inactiveOpacity: 0.5;
|
||||
}
|
||||
callBarSignalBars: CallSignalBars(callPanelSignalBars) {
|
||||
width: 3px;
|
||||
skip: 1px;
|
||||
min: 3px;
|
||||
max: 12px;
|
||||
color: callBarFg;
|
||||
}
|
||||
callSignalMargin: 8px;
|
||||
callSignalPadding: 4px;
|
||||
|
||||
callTitleButton: IconButton {
|
||||
width: 34px;
|
||||
height: 30px;
|
||||
iconPosition: point(0px, 0px);
|
||||
}
|
||||
callTitleMinimizeIcon: icon {
|
||||
{ "calls_minimize_shadow", windowShadowFg },
|
||||
{ "calls_minimize_main", callNameFg },
|
||||
};
|
||||
callTitleMinimizeIconOver: icon {
|
||||
{ size(34px, 30px), callBgButton },
|
||||
{ size(34px, 30px), callMuteRipple },
|
||||
{ "calls_minimize_shadow", windowShadowFg },
|
||||
{ "calls_minimize_main", callNameFg },
|
||||
};
|
||||
callTitleMaximizeIcon: icon {
|
||||
{ "calls_maximize_shadow", windowShadowFg },
|
||||
{ "calls_maximize_main", callNameFg },
|
||||
};
|
||||
callTitleMaximizeIconOver: icon {
|
||||
{ size(34px, 30px), callBgButton },
|
||||
{ size(34px, 30px), callMuteRipple },
|
||||
{ "calls_maximize_shadow", windowShadowFg },
|
||||
{ "calls_maximize_main", callNameFg },
|
||||
};
|
||||
callTitleRestoreIcon: icon {
|
||||
{ "calls_restore_shadow", windowShadowFg },
|
||||
{ "calls_restore_main", callNameFg },
|
||||
};
|
||||
callTitleRestoreIconOver: icon {
|
||||
{ size(34px, 30px), callBgButton },
|
||||
{ size(34px, 30px), callMuteRipple },
|
||||
{ "calls_restore_shadow", windowShadowFg },
|
||||
{ "calls_restore_main", callNameFg },
|
||||
};
|
||||
callTitleCloseIcon: icon {
|
||||
{ "calls_close_shadow", windowShadowFg },
|
||||
{ "calls_close_main", callNameFg },
|
||||
};
|
||||
callTitleCloseIconOver: icon {
|
||||
{ size(34px, 30px), titleButtonCloseBgOver },
|
||||
{ "calls_close_shadow", windowShadowFg },
|
||||
{ "calls_close_main", titleButtonCloseFgOver },
|
||||
};
|
||||
callTitle: WindowTitle(defaultWindowTitle) {
|
||||
height: 0px;
|
||||
bg: callBgOpaque;
|
||||
bgActive: callBgOpaque;
|
||||
fg: transparent;
|
||||
fgActive: transparent;
|
||||
minimize: IconButton(callTitleButton) {
|
||||
icon: callTitleMinimizeIcon;
|
||||
iconOver: callTitleMinimizeIconOver;
|
||||
}
|
||||
minimizeIconActive: callTitleMinimizeIcon;
|
||||
minimizeIconActiveOver: callTitleMinimizeIconOver;
|
||||
maximize: IconButton(callTitleButton) {
|
||||
icon: callTitleMaximizeIcon;
|
||||
iconOver: callTitleMaximizeIconOver;
|
||||
}
|
||||
maximizeIconActive: callTitleMaximizeIcon;
|
||||
maximizeIconActiveOver: callTitleMaximizeIconOver;
|
||||
restoreIcon: callTitleRestoreIcon;
|
||||
restoreIconOver: callTitleRestoreIconOver;
|
||||
restoreIconActive: callTitleRestoreIcon;
|
||||
restoreIconActiveOver: callTitleRestoreIconOver;
|
||||
close: IconButton(callTitleButton) {
|
||||
icon: callTitleCloseIcon;
|
||||
iconOver: callTitleCloseIconOver;
|
||||
}
|
||||
closeIconActive: callTitleCloseIcon;
|
||||
closeIconActiveOver: callTitleCloseIconOver;
|
||||
}
|
||||
callTitleShadow: icon {{ "calls_shadow_controls", windowShadowFg }};
|
||||
|
||||
callErrorToast: Toast(defaultToast) {
|
||||
minWidth: 240px;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -43,6 +43,11 @@ public:
|
|||
Missed,
|
||||
};
|
||||
|
||||
enum class CallType {
|
||||
Voice,
|
||||
Video,
|
||||
};
|
||||
|
||||
bool canAddItem(not_null<const HistoryItem*> item) const {
|
||||
return (ComputeType(item) == _type)
|
||||
&& (ItemDateTime(item).date() == _date);
|
||||
|
|
@ -91,7 +96,7 @@ public:
|
|||
return 0;
|
||||
}
|
||||
QSize actionSize() const override {
|
||||
return peer()->isUser() ? QSize(st::callReDial.width, st::callReDial.height) : QSize();
|
||||
return peer()->isUser() ? QSize(_st->width, _st->height) : QSize();
|
||||
}
|
||||
QMargins actionMargins() const override {
|
||||
return QMargins(
|
||||
|
|
@ -111,10 +116,12 @@ public:
|
|||
private:
|
||||
void refreshStatus() override;
|
||||
static Type ComputeType(not_null<const HistoryItem*> item);
|
||||
static CallType ComputeCallType(not_null<const HistoryItem*> item);
|
||||
|
||||
std::vector<not_null<HistoryItem*>> _items;
|
||||
QDate _date;
|
||||
Type _type;
|
||||
not_null<const style::IconButton*> _st;
|
||||
|
||||
std::unique_ptr<Ui::RippleAnimation> _actionRipple;
|
||||
|
||||
|
|
@ -124,7 +131,10 @@ BoxController::Row::Row(not_null<HistoryItem*> item)
|
|||
: PeerListRow(item->history()->peer, item->id)
|
||||
, _items(1, item)
|
||||
, _date(ItemDateTime(item).date())
|
||||
, _type(ComputeType(item)) {
|
||||
, _type(ComputeType(item))
|
||||
, _st(ComputeCallType(item) == CallType::Voice
|
||||
? &st::callReDial
|
||||
: &st::callCameraReDial) {
|
||||
refreshStatus();
|
||||
}
|
||||
|
||||
|
|
@ -154,12 +164,18 @@ void BoxController::Row::paintAction(
|
|||
bool actionSelected) {
|
||||
auto size = actionSize();
|
||||
if (_actionRipple) {
|
||||
_actionRipple->paint(p, x + st::callReDial.rippleAreaPosition.x(), y + st::callReDial.rippleAreaPosition.y(), outerWidth);
|
||||
_actionRipple->paint(
|
||||
p,
|
||||
x + _st->rippleAreaPosition.x(),
|
||||
y + _st->rippleAreaPosition.y(),
|
||||
outerWidth);
|
||||
if (_actionRipple->empty()) {
|
||||
_actionRipple.reset();
|
||||
}
|
||||
}
|
||||
st::callReDial.icon.paintInCenter(p, style::rtlrect(x, y, size.width(), size.height(), outerWidth));
|
||||
_st->icon.paintInCenter(
|
||||
p,
|
||||
style::rtlrect(x, y, size.width(), size.height(), outerWidth));
|
||||
}
|
||||
|
||||
void BoxController::Row::refreshStatus() {
|
||||
|
|
@ -202,12 +218,28 @@ BoxController::Row::Type BoxController::Row::ComputeType(
|
|||
return Type::In;
|
||||
}
|
||||
|
||||
BoxController::Row::CallType BoxController::Row::ComputeCallType(
|
||||
not_null<const HistoryItem*> item) {
|
||||
if (auto media = item->media()) {
|
||||
if (const auto call = media->call()) {
|
||||
if (call->video) {
|
||||
return CallType::Video;
|
||||
}
|
||||
}
|
||||
}
|
||||
return CallType::Voice;
|
||||
}
|
||||
|
||||
void BoxController::Row::addActionRipple(QPoint point, Fn<void()> updateCallback) {
|
||||
if (!_actionRipple) {
|
||||
auto mask = Ui::RippleAnimation::ellipseMask(QSize(st::callReDial.rippleAreaSize, st::callReDial.rippleAreaSize));
|
||||
_actionRipple = std::make_unique<Ui::RippleAnimation>(st::callReDial.ripple, std::move(mask), std::move(updateCallback));
|
||||
auto mask = Ui::RippleAnimation::ellipseMask(
|
||||
QSize(_st->rippleAreaSize, _st->rippleAreaSize));
|
||||
_actionRipple = std::make_unique<Ui::RippleAnimation>(
|
||||
_st->ripple,
|
||||
std::move(mask),
|
||||
std::move(updateCallback));
|
||||
}
|
||||
_actionRipple->add(point - st::callReDial.rippleAreaPosition);
|
||||
_actionRipple->add(point - _st->rippleAreaPosition);
|
||||
}
|
||||
|
||||
void BoxController::Row::stopLastActionRipple() {
|
||||
|
|
@ -321,10 +353,10 @@ void BoxController::rowActionClicked(not_null<PeerListRow*> row) {
|
|||
if (cConfirmBeforeCall()) {
|
||||
Ui::show(Box<ConfirmBox>(tr::ktg_call_sure(tr::now), tr::ktg_call_button(tr::now), [=] {
|
||||
Ui::hideLayer();
|
||||
Core::App().calls().startOutgoingCall(user);
|
||||
Core::App().calls().startOutgoingCall(user, false);
|
||||
}));
|
||||
} else {
|
||||
Core::App().calls().startOutgoingCall(user);
|
||||
Core::App().calls().startOutgoingCall(user, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "calls/calls_call.h"
|
||||
|
||||
#include "main/main_session.h"
|
||||
#include "main/main_account.h"
|
||||
#include "main/main_app_config.h"
|
||||
#include "apiwrap.h"
|
||||
#include "lang/lang_keys.h"
|
||||
#include "boxes/confirm_box.h"
|
||||
|
|
@ -21,50 +23,97 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "media/audio/media_audio_track.h"
|
||||
#include "base/platform/base_platform_info.h"
|
||||
#include "calls/calls_panel.h"
|
||||
#include "calls/calls_controller.h"
|
||||
#include "webrtc/webrtc_video_track.h"
|
||||
#include "webrtc/webrtc_media_devices.h"
|
||||
#include "data/data_user.h"
|
||||
#include "data/data_session.h"
|
||||
#include "facades.h"
|
||||
|
||||
#include <tgcalls/Instance.h>
|
||||
#include <tgcalls/VideoCaptureInterface.h>
|
||||
|
||||
namespace tgcalls {
|
||||
class InstanceImpl;
|
||||
class InstanceImplLegacy;
|
||||
class InstanceImplReference;
|
||||
void SetLegacyGlobalServerConfig(const std::string &serverConfig);
|
||||
} // namespace tgcalls
|
||||
|
||||
namespace Calls {
|
||||
namespace {
|
||||
|
||||
constexpr auto kMinLayer = 65;
|
||||
constexpr auto kHangupTimeoutMs = 5000;
|
||||
constexpr auto kSha256Size = 32;
|
||||
const auto kDefaultVersion = "2.4.4"_q;
|
||||
|
||||
#ifndef DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
|
||||
const auto RegisterTag = tgcalls::Register<tgcalls::InstanceImpl>();
|
||||
//const auto RegisterTagReference = tgcalls::Register<tgcalls::InstanceImplReference>();
|
||||
#endif // DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
|
||||
const auto RegisterTagLegacy = tgcalls::Register<tgcalls::InstanceImplLegacy>();
|
||||
|
||||
void AppendEndpoint(
|
||||
std::vector<TgVoipEndpoint> &list,
|
||||
std::vector<tgcalls::Endpoint> &list,
|
||||
const MTPPhoneConnection &connection) {
|
||||
connection.match([&](const MTPDphoneConnection &data) {
|
||||
if (data.vpeer_tag().v.length() != 16) {
|
||||
return;
|
||||
}
|
||||
#ifdef Q_OS_WIN
|
||||
auto endpoint = TgVoipEndpoint();
|
||||
endpoint.endpointId = (int64_t)data.vid().v;
|
||||
|
||||
endpoint.host = TgVoipEdpointHost();
|
||||
endpoint.host.ipv4 = data.vip().v.toStdString();
|
||||
endpoint.host.ipv6 = data.vipv6().v.toStdString();
|
||||
|
||||
endpoint.port = (uint16_t)data.vport().v;
|
||||
endpoint.type = TgVoipEndpointType::UdpRelay;
|
||||
#else
|
||||
auto endpoint = TgVoipEndpoint{
|
||||
tgcalls::Endpoint endpoint = {
|
||||
.endpointId = (int64_t)data.vid().v,
|
||||
.host = TgVoipEdpointHost{
|
||||
.host = tgcalls::EndpointHost{
|
||||
.ipv4 = data.vip().v.toStdString(),
|
||||
.ipv6 = data.vipv6().v.toStdString() },
|
||||
.port = (uint16_t)data.vport().v,
|
||||
.type = TgVoipEndpointType::UdpRelay
|
||||
.type = tgcalls::EndpointType::UdpRelay,
|
||||
};
|
||||
#endif
|
||||
const auto tag = data.vpeer_tag().v;
|
||||
if (tag.size() >= 16) {
|
||||
memcpy(endpoint.peerTag, tag.data(), 16);
|
||||
}
|
||||
list.push_back(std::move(endpoint));
|
||||
}, [&](const MTPDphoneConnectionWebrtc &data) {
|
||||
});
|
||||
}
|
||||
|
||||
void AppendServer(
|
||||
std::vector<tgcalls::RtcServer> &list,
|
||||
const MTPPhoneConnection &connection) {
|
||||
connection.match([&](const MTPDphoneConnection &data) {
|
||||
}, [&](const MTPDphoneConnectionWebrtc &data) {
|
||||
const auto host = qs(data.vip());
|
||||
const auto hostv6 = qs(data.vipv6());
|
||||
const auto port = uint16_t(data.vport().v);
|
||||
if (data.is_stun()) {
|
||||
const auto pushStun = [&](const QString &host) {
|
||||
if (host.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
list.push_back(tgcalls::RtcServer{
|
||||
.host = host.toStdString(),
|
||||
.port = port,
|
||||
.isTurn = false
|
||||
});
|
||||
};
|
||||
pushStun(host);
|
||||
pushStun(hostv6);
|
||||
}
|
||||
const auto username = qs(data.vusername());
|
||||
const auto password = qs(data.vpassword());
|
||||
if (data.is_turn() && !username.isEmpty() && !password.isEmpty()) {
|
||||
const auto pushTurn = [&](const QString &host) {
|
||||
list.push_back(tgcalls::RtcServer{
|
||||
.host = host.toStdString(),
|
||||
.port = port,
|
||||
.login = username.toStdString(),
|
||||
.password = password.toStdString(),
|
||||
.isTurn = true,
|
||||
});
|
||||
};
|
||||
pushTurn(host);
|
||||
pushTurn(hostv6);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -83,10 +132,6 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
|
|||
| (gsl::to_integer<uint64>(hash[12]));
|
||||
}
|
||||
|
||||
[[nodiscard]] std::vector<std::string> CollectVersions() {
|
||||
return { TgVoip::getVersion() };
|
||||
}
|
||||
|
||||
[[nodiscard]] QVector<MTPstring> WrapVersions(
|
||||
const std::vector<std::string> &data) {
|
||||
auto result = QVector<MTPstring>();
|
||||
|
|
@ -98,28 +143,35 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
|
|||
}
|
||||
|
||||
[[nodiscard]] QVector<MTPstring> CollectVersionsForApi() {
|
||||
return WrapVersions(CollectVersions());
|
||||
return WrapVersions(tgcalls::Meta::Versions() | ranges::action::reverse);
|
||||
}
|
||||
|
||||
[[nodiscard]] Webrtc::VideoState StartVideoState(bool enabled) {
|
||||
using State = Webrtc::VideoState;
|
||||
return enabled ? State::Active : State::Inactive;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
Call::Delegate::~Delegate() = default;
|
||||
|
||||
Call::Call(
|
||||
not_null<Delegate*> delegate,
|
||||
not_null<UserData*> user,
|
||||
Type type)
|
||||
Type type,
|
||||
bool video)
|
||||
: _delegate(delegate)
|
||||
, _user(user)
|
||||
, _api(&_user->session().mtp())
|
||||
, _type(type) {
|
||||
_discardByTimeoutTimer.setCallback([this] { hangup(); });
|
||||
, _type(type)
|
||||
, _videoIncoming(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video)))
|
||||
, _videoOutgoing(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video))) {
|
||||
_discardByTimeoutTimer.setCallback([=] { hangup(); });
|
||||
|
||||
if (_type == Type::Outgoing) {
|
||||
setState(State::Requesting);
|
||||
} else {
|
||||
startWaitingTrack();
|
||||
}
|
||||
setupOutgoingVideo();
|
||||
}
|
||||
|
||||
void Call::generateModExpFirst(bytes::const_span randomSeed) {
|
||||
|
|
@ -173,8 +225,11 @@ void Call::startOutgoing() {
|
|||
Expects(_state.current() == State::Requesting);
|
||||
Expects(_gaHash.size() == kSha256Size);
|
||||
|
||||
const auto flags = _videoCapture
|
||||
? MTPphone_RequestCall::Flag::f_video
|
||||
: MTPphone_RequestCall::Flag(0);
|
||||
_api.request(MTPphone_RequestCall(
|
||||
MTP_flags(0),
|
||||
MTP_flags(flags),
|
||||
_user->inputUser,
|
||||
MTP_int(rand_value<int32>()),
|
||||
MTP_bytes(_gaHash),
|
||||
|
|
@ -182,7 +237,7 @@ void Call::startOutgoing() {
|
|||
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
|
||||
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
|
||||
MTP_int(kMinLayer),
|
||||
MTP_int(TgVoip::getConnectionMaxLayer()),
|
||||
MTP_int(tgcalls::Meta::MaxLayer()),
|
||||
MTP_vector(CollectVersionsForApi()))
|
||||
)).done([=](const MTPphone_PhoneCall &result) {
|
||||
Expects(result.type() == mtpc_phone_phoneCall);
|
||||
|
|
@ -234,7 +289,7 @@ void Call::startIncoming() {
|
|||
}
|
||||
|
||||
void Call::answer() {
|
||||
_delegate->requestMicrophonePermissionOrFail(crl::guard(this, [=] {
|
||||
_delegate->requestPermissionsOrFail(crl::guard(this, [=] {
|
||||
actuallyAnswer();
|
||||
}));
|
||||
}
|
||||
|
|
@ -263,10 +318,11 @@ void Call::actuallyAnswer() {
|
|||
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
|
||||
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
|
||||
MTP_int(kMinLayer),
|
||||
MTP_int(TgVoip::getConnectionMaxLayer()),
|
||||
MTP_int(tgcalls::Meta::MaxLayer()),
|
||||
MTP_vector(CollectVersionsForApi()))
|
||||
)).done([=](const MTPphone_PhoneCall &result) {
|
||||
Expects(result.type() == mtpc_phone_phoneCall);
|
||||
|
||||
auto &call = result.c_phone_phoneCall();
|
||||
_user->session().data().processUsers(call.vusers());
|
||||
if (call.vphone_call().type() != mtpc_phoneCallWaiting) {
|
||||
|
|
@ -282,12 +338,61 @@ void Call::actuallyAnswer() {
|
|||
}).send();
|
||||
}
|
||||
|
||||
void Call::setMute(bool mute) {
|
||||
_mute = mute;
|
||||
if (_controller) {
|
||||
_controller->setMuteMicrophone(_mute);
|
||||
void Call::setMuted(bool mute) {
|
||||
_muted = mute;
|
||||
if (_instance) {
|
||||
_instance->setMuteMicrophone(mute);
|
||||
}
|
||||
_muteChanged.notify(_mute);
|
||||
}
|
||||
|
||||
void Call::setupOutgoingVideo() {
|
||||
static const auto hasDevices = [] {
|
||||
return !Webrtc::GetVideoInputList().empty();
|
||||
};
|
||||
const auto started = _videoOutgoing->state();
|
||||
if (!hasDevices()) {
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
|
||||
}
|
||||
_videoOutgoing->stateValue(
|
||||
) | rpl::start_with_next([=](Webrtc::VideoState state) {
|
||||
if (state != Webrtc::VideoState::Inactive && !hasDevices()) {
|
||||
_errors.fire({ ErrorType::NoCamera });
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
|
||||
} else if (_state.current() != State::Established
|
||||
&& state != started
|
||||
&& !_videoCapture) {
|
||||
_errors.fire({ ErrorType::NotStartedCall });
|
||||
_videoOutgoing->setState(started);
|
||||
} else if (state != Webrtc::VideoState::Inactive
|
||||
&& _instance
|
||||
&& !_instance->supportsVideo()) {
|
||||
_errors.fire({ ErrorType::NotVideoCall });
|
||||
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
|
||||
} else if (state != Webrtc::VideoState::Inactive) {
|
||||
// Paused not supported right now.
|
||||
#ifndef DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
|
||||
Assert(state == Webrtc::VideoState::Active);
|
||||
if (!_videoCapture) {
|
||||
_videoCapture = _delegate->getVideoCapture();
|
||||
_videoCapture->setOutput(_videoOutgoing->sink());
|
||||
}
|
||||
if (_instance) {
|
||||
_instance->setVideoCapture(_videoCapture);
|
||||
}
|
||||
_videoCapture->setState(tgcalls::VideoState::Active);
|
||||
#endif // DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
|
||||
} else if (_videoCapture) {
|
||||
_videoCapture->setState(tgcalls::VideoState::Inactive);
|
||||
}
|
||||
}, _lifetime);
|
||||
}
|
||||
|
||||
not_null<Webrtc::VideoTrack*> Call::videoIncoming() const {
|
||||
return _videoIncoming.get();
|
||||
}
|
||||
|
||||
not_null<Webrtc::VideoTrack*> Call::videoOutgoing() const {
|
||||
return _videoOutgoing.get();
|
||||
}
|
||||
|
||||
crl::time Call::getDurationMs() const {
|
||||
|
|
@ -311,7 +416,7 @@ void Call::redial() {
|
|||
if (_state.current() != State::Busy) {
|
||||
return;
|
||||
}
|
||||
Assert(_controller == nullptr);
|
||||
Assert(_instance == nullptr);
|
||||
_type = Type::Outgoing;
|
||||
setState(State::Requesting);
|
||||
_answerAfterDhConfigReceived = false;
|
||||
|
|
@ -320,7 +425,9 @@ void Call::redial() {
|
|||
}
|
||||
|
||||
QString Call::getDebugLog() const {
|
||||
return QString::fromStdString(_controller->getDebugInfo());
|
||||
return _instance
|
||||
? QString::fromStdString(_instance->getDebugInfo())
|
||||
: QString();
|
||||
}
|
||||
|
||||
void Call::startWaitingTrack() {
|
||||
|
|
@ -334,6 +441,21 @@ void Call::startWaitingTrack() {
|
|||
_waitingTrack->playInLoop();
|
||||
}
|
||||
|
||||
void Call::sendSignalingData(const QByteArray &data) {
|
||||
_api.request(MTPphone_SendSignalingData(
|
||||
MTP_inputPhoneCall(
|
||||
MTP_long(_id),
|
||||
MTP_long(_accessHash)),
|
||||
MTP_bytes(data)
|
||||
)).done([=](const MTPBool &result) {
|
||||
if (!mtpIsTrue(result)) {
|
||||
finish(FinishType::Failed);
|
||||
}
|
||||
}).fail([=](const RPCError &error) {
|
||||
handleRequestError(error);
|
||||
}).send();
|
||||
}
|
||||
|
||||
float64 Call::getWaitingSoundPeakValue() const {
|
||||
if (_waitingTrack) {
|
||||
auto when = crl::now() + kSoundSampleMs / 4;
|
||||
|
|
@ -372,6 +494,7 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
|
|||
finish(FinishType::Failed);
|
||||
return true;
|
||||
}
|
||||
|
||||
_id = data.vid().v;
|
||||
_accessHash = data.vaccess_hash().v;
|
||||
auto gaHashBytes = bytes::make_span(data.vg_a_hash().v);
|
||||
|
|
@ -416,7 +539,7 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
|
|||
}
|
||||
if (_type == Type::Incoming
|
||||
&& _state.current() == State::ExchangingKeys
|
||||
&& !_controller) {
|
||||
&& !_instance) {
|
||||
startConfirmedCall(data);
|
||||
}
|
||||
} return true;
|
||||
|
|
@ -427,8 +550,8 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
|
|||
return false;
|
||||
}
|
||||
if (data.is_need_debug()) {
|
||||
auto debugLog = _controller
|
||||
? _controller->getDebugInfo()
|
||||
auto debugLog = _instance
|
||||
? _instance->getDebugInfo()
|
||||
: std::string();
|
||||
if (!debugLog.empty()) {
|
||||
user()->session().api().request(MTPphone_SaveCallDebug(
|
||||
|
|
@ -474,11 +597,49 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
|
|||
Unexpected("phoneCall type inside an existing call handleUpdate()");
|
||||
}
|
||||
|
||||
void Call::updateRemoteMediaState(
|
||||
tgcalls::AudioState audio,
|
||||
tgcalls::VideoState video) {
|
||||
_remoteAudioState = [&] {
|
||||
using From = tgcalls::AudioState;
|
||||
using To = RemoteAudioState;
|
||||
switch (audio) {
|
||||
case From::Active: return To::Active;
|
||||
case From::Muted: return To::Muted;
|
||||
}
|
||||
Unexpected("Audio state in remoteMediaStateUpdated.");
|
||||
}();
|
||||
_videoIncoming->setState([&] {
|
||||
using From = tgcalls::VideoState;
|
||||
using To = Webrtc::VideoState;
|
||||
switch (video) {
|
||||
case From::Inactive: return To::Inactive;
|
||||
case From::Paused: return To::Paused;
|
||||
case From::Active: return To::Active;
|
||||
}
|
||||
Unexpected("Video state in remoteMediaStateUpdated.");
|
||||
}());
|
||||
}
|
||||
|
||||
bool Call::handleSignalingData(
|
||||
const MTPDupdatePhoneCallSignalingData &data) {
|
||||
if (data.vphone_call_id().v != _id || !_instance) {
|
||||
return false;
|
||||
}
|
||||
auto prepared = ranges::view::all(
|
||||
data.vdata().v
|
||||
) | ranges::view::transform([](char byte) {
|
||||
return static_cast<uint8_t>(byte);
|
||||
}) | ranges::to_vector;
|
||||
_instance->receiveSignalingData(std::move(prepared));
|
||||
return true;
|
||||
}
|
||||
|
||||
void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
|
||||
Expects(_type == Type::Outgoing);
|
||||
|
||||
if (_state.current() == State::ExchangingKeys
|
||||
|| _controller) {
|
||||
|| _instance) {
|
||||
LOG(("Call Warning: Unexpected confirmAcceptedCall."));
|
||||
return;
|
||||
}
|
||||
|
|
@ -506,9 +667,9 @@ void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
|
|||
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
|
||||
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
|
||||
MTP_int(kMinLayer),
|
||||
MTP_int(TgVoip::getConnectionMaxLayer()),
|
||||
MTP_int(tgcalls::Meta::MaxLayer()),
|
||||
MTP_vector(CollectVersionsForApi()))
|
||||
)).done([this](const MTPphone_PhoneCall &result) {
|
||||
)).done([=](const MTPphone_PhoneCall &result) {
|
||||
Expects(result.type() == mtpc_phone_phoneCall);
|
||||
|
||||
auto &call = result.c_phone_phoneCall();
|
||||
|
|
@ -520,7 +681,7 @@ void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
|
|||
}
|
||||
|
||||
createAndStartController(call.vphone_call().c_phoneCall());
|
||||
}).fail([this](const RPCError &error) {
|
||||
}).fail([=](const RPCError &error) {
|
||||
handleRequestError(error);
|
||||
}).send();
|
||||
}
|
||||
|
|
@ -551,119 +712,150 @@ void Call::startConfirmedCall(const MTPDphoneCall &call) {
|
|||
|
||||
void Call::createAndStartController(const MTPDphoneCall &call) {
|
||||
_discardByTimeoutTimer.cancel();
|
||||
if (!checkCallFields(call)) {
|
||||
if (!checkCallFields(call) || _authKey.size() != 256) {
|
||||
return;
|
||||
}
|
||||
|
||||
const auto &protocol = call.vprotocol().c_phoneCallProtocol();
|
||||
const auto &serverConfig = _user->session().serverConfig();
|
||||
|
||||
TgVoipConfig config;
|
||||
config.dataSaving = TgVoipDataSaving::Never;
|
||||
config.enableAEC = !Platform::IsMac10_7OrGreater();
|
||||
config.enableNS = true;
|
||||
config.enableAGC = true;
|
||||
config.enableVolumeControl = true;
|
||||
config.initializationTimeout = serverConfig.callConnectTimeoutMs / 1000.;
|
||||
config.receiveTimeout = serverConfig.callPacketTimeoutMs / 1000.;
|
||||
config.enableP2P = call.is_p2p_allowed();
|
||||
config.maxApiLayer = protocol.vmax_layer().v;
|
||||
auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
|
||||
memcpy(encryptionKeyValue->data(), _authKey.data(), 256);
|
||||
|
||||
const auto &settings = Core::App().settings();
|
||||
|
||||
const auto weak = base::make_weak(this);
|
||||
tgcalls::Descriptor descriptor = {
|
||||
.config = tgcalls::Config{
|
||||
.initializationTimeout = serverConfig.callConnectTimeoutMs / 1000.,
|
||||
.receiveTimeout = serverConfig.callPacketTimeoutMs / 1000.,
|
||||
.dataSaving = tgcalls::DataSaving::Never,
|
||||
.enableP2P = call.is_p2p_allowed(),
|
||||
.enableAEC = !Platform::IsMac10_7OrGreater(),
|
||||
.enableNS = true,
|
||||
.enableAGC = true,
|
||||
.enableVolumeControl = true,
|
||||
.maxApiLayer = protocol.vmax_layer().v,
|
||||
},
|
||||
.encryptionKey = tgcalls::EncryptionKey(
|
||||
std::move(encryptionKeyValue),
|
||||
(_type == Type::Outgoing)),
|
||||
.mediaDevicesConfig = tgcalls::MediaDevicesConfig{
|
||||
.audioInputId = settings.callInputDeviceId().toStdString(),
|
||||
.audioOutputId = settings.callOutputDeviceId().toStdString(),
|
||||
.inputVolume = 1.f,//settings.callInputVolume() / 100.f,
|
||||
.outputVolume = 1.f,//settings.callOutputVolume() / 100.f,
|
||||
},
|
||||
.videoCapture = _videoCapture,
|
||||
.stateUpdated = [=](tgcalls::State state) {
|
||||
crl::on_main(weak, [=] {
|
||||
handleControllerStateChange(state);
|
||||
});
|
||||
},
|
||||
.signalBarsUpdated = [=](int count) {
|
||||
crl::on_main(weak, [=] {
|
||||
handleControllerBarCountChange(count);
|
||||
});
|
||||
},
|
||||
.remoteMediaStateUpdated = [=](tgcalls::AudioState audio, tgcalls::VideoState video) {
|
||||
crl::on_main(weak, [=] {
|
||||
updateRemoteMediaState(audio, video);
|
||||
});
|
||||
},
|
||||
.signalingDataEmitted = [=](const std::vector<uint8_t> &data) {
|
||||
const auto bytes = QByteArray(
|
||||
reinterpret_cast<const char*>(data.data()),
|
||||
data.size());
|
||||
crl::on_main(weak, [=] {
|
||||
sendSignalingData(bytes);
|
||||
});
|
||||
},
|
||||
};
|
||||
if (Logs::DebugEnabled()) {
|
||||
auto callLogFolder = cWorkingDir() + qsl("DebugLogs");
|
||||
auto callLogPath = callLogFolder + qsl("/last_call_log.txt");
|
||||
auto callLogNative = QDir::toNativeSeparators(callLogPath);
|
||||
#ifdef Q_OS_WIN
|
||||
config.logPath = callLogNative.toStdWString();
|
||||
descriptor.config.logPath = callLogNative.toStdWString();
|
||||
#else // Q_OS_WIN
|
||||
const auto callLogUtf = QFile::encodeName(callLogNative);
|
||||
config.logPath.resize(callLogUtf.size());
|
||||
ranges::copy(callLogUtf, config.logPath.begin());
|
||||
descriptor.config.logPath.resize(callLogUtf.size());
|
||||
ranges::copy(callLogUtf, descriptor.config.logPath.begin());
|
||||
#endif // Q_OS_WIN
|
||||
QFile(callLogPath).remove();
|
||||
QDir().mkpath(callLogFolder);
|
||||
}
|
||||
|
||||
auto endpoints = std::vector<TgVoipEndpoint>();
|
||||
for (const auto &connection : call.vconnections().v) {
|
||||
AppendEndpoint(endpoints, connection);
|
||||
AppendEndpoint(descriptor.endpoints, connection);
|
||||
}
|
||||
for (const auto &connection : call.vconnections().v) {
|
||||
AppendServer(descriptor.rtcServers, connection);
|
||||
}
|
||||
|
||||
auto proxy = TgVoipProxy();
|
||||
if (Global::UseProxyForCalls()
|
||||
&& (Global::ProxySettings() == MTP::ProxyData::Settings::Enabled)) {
|
||||
const auto &selected = Global::SelectedProxy();
|
||||
if (selected.supportsCalls()) {
|
||||
if (selected.supportsCalls() && !selected.host.isEmpty()) {
|
||||
Assert(selected.type == MTP::ProxyData::Type::Socks5);
|
||||
proxy.host = selected.host.toStdString();
|
||||
proxy.port = selected.port;
|
||||
proxy.login = selected.user.toStdString();
|
||||
proxy.password = selected.password.toStdString();
|
||||
descriptor.proxy = std::make_unique<tgcalls::Proxy>();
|
||||
descriptor.proxy->host = selected.host.toStdString();
|
||||
descriptor.proxy->port = selected.port;
|
||||
descriptor.proxy->login = selected.user.toStdString();
|
||||
descriptor.proxy->password = selected.password.toStdString();
|
||||
}
|
||||
}
|
||||
|
||||
auto encryptionKey = TgVoipEncryptionKey();
|
||||
encryptionKey.isOutgoing = (_type == Type::Outgoing);
|
||||
encryptionKey.value = ranges::view::all(
|
||||
_authKey
|
||||
) | ranges::view::transform([](bytes::type byte) {
|
||||
return static_cast<uint8_t>(byte);
|
||||
}) | ranges::to_vector;
|
||||
const auto version = call.vprotocol().match([&](
|
||||
const MTPDphoneCallProtocol &data) {
|
||||
return data.vlibrary_versions().v;
|
||||
}).value(0, MTP_bytes(kDefaultVersion)).v;
|
||||
|
||||
_controller = MakeController(
|
||||
"2.4.4",
|
||||
config,
|
||||
TgVoipPersistentState(),
|
||||
endpoints,
|
||||
proxy.host.empty() ? nullptr : &proxy,
|
||||
TgVoipNetworkType::Unknown,
|
||||
encryptionKey);
|
||||
|
||||
const auto raw = _controller.get();
|
||||
raw->setOnStateUpdated([=](TgVoipState state) {
|
||||
handleControllerStateChange(raw, state);
|
||||
});
|
||||
raw->setOnSignalBarsUpdated([=](int count) {
|
||||
handleControllerBarCountChange(count);
|
||||
});
|
||||
if (_mute) {
|
||||
raw->setMuteMicrophone(_mute);
|
||||
LOG(("Call Info: Creating instance with version '%1', allowP2P: %2"
|
||||
).arg(QString::fromUtf8(version)
|
||||
).arg(Logs::b(descriptor.config.enableP2P)));
|
||||
_instance = tgcalls::Meta::Create(
|
||||
version.toStdString(),
|
||||
std::move(descriptor));
|
||||
if (!_instance) {
|
||||
LOG(("Call Error: Wrong library version: %1."
|
||||
).arg(QString::fromUtf8(version)));
|
||||
finish(FinishType::Failed);
|
||||
return;
|
||||
}
|
||||
const auto &settings = Core::App().settings();
|
||||
raw->setAudioOutputDevice(
|
||||
settings.callOutputDeviceID().toStdString());
|
||||
raw->setAudioInputDevice(
|
||||
settings.callInputDeviceID().toStdString());
|
||||
raw->setOutputVolume(settings.callOutputVolume() / 100.0f);
|
||||
raw->setInputVolume(settings.callInputVolume() / 100.0f);
|
||||
|
||||
const auto raw = _instance.get();
|
||||
if (_muted.current()) {
|
||||
raw->setMuteMicrophone(_muted.current());
|
||||
}
|
||||
|
||||
raw->setIncomingVideoOutput(_videoIncoming->sink());
|
||||
raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled());
|
||||
}
|
||||
|
||||
void Call::handleControllerStateChange(
|
||||
not_null<Controller*> controller,
|
||||
TgVoipState state) {
|
||||
// NB! Can be called from an arbitrary thread!
|
||||
// This can be called from ~VoIPController()!
|
||||
|
||||
void Call::handleControllerStateChange(tgcalls::State state) {
|
||||
switch (state) {
|
||||
case TgVoipState::WaitInit: {
|
||||
case tgcalls::State::WaitInit: {
|
||||
DEBUG_LOG(("Call Info: State changed to WaitingInit."));
|
||||
setStateQueued(State::WaitingInit);
|
||||
setState(State::WaitingInit);
|
||||
} break;
|
||||
|
||||
case TgVoipState::WaitInitAck: {
|
||||
case tgcalls::State::WaitInitAck: {
|
||||
DEBUG_LOG(("Call Info: State changed to WaitingInitAck."));
|
||||
setStateQueued(State::WaitingInitAck);
|
||||
setState(State::WaitingInitAck);
|
||||
} break;
|
||||
|
||||
case TgVoipState::Established: {
|
||||
case tgcalls::State::Established: {
|
||||
DEBUG_LOG(("Call Info: State changed to Established."));
|
||||
setStateQueued(State::Established);
|
||||
setState(State::Established);
|
||||
} break;
|
||||
|
||||
case TgVoipState::Failed: {
|
||||
auto error = QString::fromStdString(controller->getLastError());
|
||||
case tgcalls::State::Failed: {
|
||||
auto error = _instance
|
||||
? QString::fromStdString(_instance->getLastError())
|
||||
: QString();
|
||||
LOG(("Call Info: State changed to Failed, error: %1.").arg(error));
|
||||
setFailedQueued(error);
|
||||
handleControllerError(error);
|
||||
} break;
|
||||
|
||||
default: LOG(("Call Error: Unexpected state in handleStateChange: %1"
|
||||
|
|
@ -672,19 +864,11 @@ void Call::handleControllerStateChange(
|
|||
}
|
||||
|
||||
void Call::handleControllerBarCountChange(int count) {
|
||||
// NB! Can be called from an arbitrary thread!
|
||||
// This can be called from ~VoIPController()!
|
||||
|
||||
crl::on_main(this, [=] {
|
||||
setSignalBarCount(count);
|
||||
});
|
||||
}
|
||||
|
||||
void Call::setSignalBarCount(int count) {
|
||||
if (_signalBarCount != count) {
|
||||
_signalBarCount = count;
|
||||
_signalBarCountChanged.notify(count);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
|
|
@ -777,29 +961,36 @@ void Call::setState(State state) {
|
|||
}
|
||||
}
|
||||
|
||||
void Call::setCurrentAudioDevice(bool input, std::string deviceID) {
|
||||
if (_controller) {
|
||||
void Call::setCurrentAudioDevice(bool input, const QString &deviceId) {
|
||||
if (_instance) {
|
||||
const auto id = deviceId.toStdString();
|
||||
if (input) {
|
||||
_controller->setAudioInputDevice(deviceID);
|
||||
_instance->setAudioInputDevice(id);
|
||||
} else {
|
||||
_controller->setAudioOutputDevice(deviceID);
|
||||
_instance->setAudioOutputDevice(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Call::setCurrentVideoDevice(const QString &deviceId) {
|
||||
if (_videoCapture) {
|
||||
_videoCapture->switchToDevice(deviceId.toStdString());
|
||||
}
|
||||
}
|
||||
|
||||
void Call::setAudioVolume(bool input, float level) {
|
||||
if (_controller) {
|
||||
if (_instance) {
|
||||
if (input) {
|
||||
_controller->setInputVolume(level);
|
||||
_instance->setInputVolume(level);
|
||||
} else {
|
||||
_controller->setOutputVolume(level);
|
||||
_instance->setOutputVolume(level);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Call::setAudioDuckingEnabled(bool enabled) {
|
||||
if (_controller) {
|
||||
_controller->setAudioOutputDuckingEnabled(enabled);
|
||||
if (_instance) {
|
||||
_instance->setAudioOutputDuckingEnabled(enabled);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -830,10 +1021,14 @@ void Call::finish(FinishType type, const MTPPhoneCallDiscardReason &reason) {
|
|||
|
||||
setState(hangupState);
|
||||
auto duration = getDurationMs() / 1000;
|
||||
auto connectionId = _controller ? _controller->getPreferredRelayId() : 0;
|
||||
auto connectionId = _instance ? _instance->getPreferredRelayId() : 0;
|
||||
_finishByTimeoutTimer.call(kHangupTimeoutMs, [this, finalState] { setState(finalState); });
|
||||
const auto flags = ((_videoIncoming->state() != Webrtc::VideoState::Inactive)
|
||||
|| (_videoOutgoing->state() != Webrtc::VideoState::Inactive))
|
||||
? MTPphone_DiscardCall::Flag::f_video
|
||||
: MTPphone_DiscardCall::Flag(0);
|
||||
_api.request(MTPphone_DiscardCall(
|
||||
MTP_flags(0),
|
||||
MTP_flags(flags),
|
||||
MTP_inputPhoneCall(
|
||||
MTP_long(_id),
|
||||
MTP_long(_accessHash)),
|
||||
|
|
@ -886,9 +1081,11 @@ void Call::handleControllerError(const QString &error) {
|
|||
}
|
||||
|
||||
void Call::destroyController() {
|
||||
if (_controller) {
|
||||
if (_instance) {
|
||||
const auto state = _instance->stop();
|
||||
|
||||
DEBUG_LOG(("Call Info: Destroying call controller.."));
|
||||
_controller.reset();
|
||||
_instance.reset();
|
||||
DEBUG_LOG(("Call Info: Call controller destroyed."));
|
||||
}
|
||||
setSignalBarCount(kSignalBarFinished);
|
||||
|
|
@ -899,7 +1096,7 @@ Call::~Call() {
|
|||
}
|
||||
|
||||
void UpdateConfig(const std::string &data) {
|
||||
TgVoip::setGlobalServerConfig(data);
|
||||
tgcalls::SetLegacyGlobalServerConfig(data);
|
||||
}
|
||||
|
||||
} // namespace Calls
|
||||
|
|
|
|||
|
|
@ -19,18 +19,40 @@ class Track;
|
|||
} // namespace Audio
|
||||
} // namespace Media
|
||||
|
||||
enum class TgVoipState;
|
||||
namespace tgcalls {
|
||||
class Instance;
|
||||
class VideoCaptureInterface;
|
||||
enum class State;
|
||||
enum class VideoState;
|
||||
enum class AudioState;
|
||||
} // namespace tgcalls
|
||||
|
||||
namespace Webrtc {
|
||||
enum class VideoState;
|
||||
class VideoTrack;
|
||||
} // namespace Webrtc
|
||||
|
||||
namespace Calls {
|
||||
|
||||
class Controller;
|
||||
|
||||
struct DhConfig {
|
||||
int32 version = 0;
|
||||
int32 g = 0;
|
||||
bytes::vector p;
|
||||
};
|
||||
|
||||
enum class ErrorType {
|
||||
NoCamera,
|
||||
NoMicrophone,
|
||||
NotStartedCall,
|
||||
NotVideoCall,
|
||||
Unknown,
|
||||
};
|
||||
|
||||
struct Error {
|
||||
ErrorType type = ErrorType::Unknown;
|
||||
QString details;
|
||||
};
|
||||
|
||||
class Call : public base::has_weak_ptr {
|
||||
public:
|
||||
class Delegate {
|
||||
|
|
@ -46,9 +68,11 @@ public:
|
|||
Ended,
|
||||
};
|
||||
virtual void playSound(Sound sound) = 0;
|
||||
virtual void requestMicrophonePermissionOrFail(Fn<void()> result) = 0;
|
||||
virtual void requestPermissionsOrFail(Fn<void()> onSuccess) = 0;
|
||||
virtual auto getVideoCapture()
|
||||
-> std::shared_ptr<tgcalls::VideoCaptureInterface> = 0;
|
||||
|
||||
virtual ~Delegate();
|
||||
virtual ~Delegate() = default;
|
||||
|
||||
};
|
||||
|
||||
|
|
@ -58,7 +82,7 @@ public:
|
|||
Incoming,
|
||||
Outgoing,
|
||||
};
|
||||
Call(not_null<Delegate*> delegate, not_null<UserData*> user, Type type);
|
||||
Call(not_null<Delegate*> delegate, not_null<UserData*> user, Type type, bool video);
|
||||
|
||||
[[nodiscard]] Type type() const {
|
||||
return _type;
|
||||
|
|
@ -70,6 +94,7 @@ public:
|
|||
|
||||
void start(bytes::const_span random);
|
||||
bool handleUpdate(const MTPPhoneCall &call);
|
||||
bool handleSignalingData(const MTPDupdatePhoneCallSignalingData &data);
|
||||
|
||||
enum State {
|
||||
Starting,
|
||||
|
|
@ -88,28 +113,55 @@ public:
|
|||
Ringing,
|
||||
Busy,
|
||||
};
|
||||
State state() const {
|
||||
[[nodiscard]] State state() const {
|
||||
return _state.current();
|
||||
}
|
||||
rpl::producer<State> stateValue() const {
|
||||
[[nodiscard]] rpl::producer<State> stateValue() const {
|
||||
return _state.value();
|
||||
}
|
||||
|
||||
[[nodiscard]] rpl::producer<Error> errors() const {
|
||||
return _errors.events();
|
||||
}
|
||||
|
||||
enum class RemoteAudioState {
|
||||
Muted,
|
||||
Active,
|
||||
};
|
||||
[[nodiscard]] RemoteAudioState remoteAudioState() const {
|
||||
return _remoteAudioState.current();
|
||||
}
|
||||
[[nodiscard]] auto remoteAudioStateValue() const
|
||||
-> rpl::producer<RemoteAudioState> {
|
||||
return _remoteAudioState.value();
|
||||
}
|
||||
|
||||
[[nodiscard]] Webrtc::VideoState remoteVideoState() const {
|
||||
return _remoteVideoState.current();
|
||||
}
|
||||
[[nodiscard]] auto remoteVideoStateValue() const
|
||||
-> rpl::producer<Webrtc::VideoState> {
|
||||
return _remoteVideoState.value();
|
||||
}
|
||||
|
||||
static constexpr auto kSignalBarStarting = -1;
|
||||
static constexpr auto kSignalBarFinished = -2;
|
||||
static constexpr auto kSignalBarCount = 4;
|
||||
base::Observable<int> &signalBarCountChanged() {
|
||||
return _signalBarCountChanged;
|
||||
[[nodiscard]] rpl::producer<int> signalBarCountValue() const {
|
||||
return _signalBarCount.value();
|
||||
}
|
||||
|
||||
void setMute(bool mute);
|
||||
bool isMute() const {
|
||||
return _mute;
|
||||
void setMuted(bool mute);
|
||||
[[nodiscard]] bool muted() const {
|
||||
return _muted.current();
|
||||
}
|
||||
base::Observable<bool> &muteChanged() {
|
||||
return _muteChanged;
|
||||
[[nodiscard]] rpl::producer<bool> mutedValue() const {
|
||||
return _muted.value();
|
||||
}
|
||||
|
||||
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoIncoming() const;
|
||||
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoOutgoing() const;
|
||||
|
||||
crl::time getDurationMs() const;
|
||||
float64 getWaitingSoundPeakValue() const;
|
||||
|
||||
|
|
@ -122,7 +174,8 @@ public:
|
|||
|
||||
QString getDebugLog() const;
|
||||
|
||||
void setCurrentAudioDevice(bool input, std::string deviceID);
|
||||
void setCurrentAudioDevice(bool input, const QString &deviceId);
|
||||
void setCurrentVideoDevice(const QString &deviceId);
|
||||
void setAudioVolume(bool input, float level);
|
||||
void setAudioDuckingEnabled(bool enabled);
|
||||
|
||||
|
|
@ -140,15 +193,17 @@ private:
|
|||
};
|
||||
void handleRequestError(const RPCError &error);
|
||||
void handleControllerError(const QString &error);
|
||||
void finish(FinishType type, const MTPPhoneCallDiscardReason &reason = MTP_phoneCallDiscardReasonDisconnect());
|
||||
void finish(
|
||||
FinishType type,
|
||||
const MTPPhoneCallDiscardReason &reason
|
||||
= MTP_phoneCallDiscardReasonDisconnect());
|
||||
void startOutgoing();
|
||||
void startIncoming();
|
||||
void startWaitingTrack();
|
||||
void sendSignalingData(const QByteArray &data);
|
||||
|
||||
void generateModExpFirst(bytes::const_span randomSeed);
|
||||
void handleControllerStateChange(
|
||||
not_null<Controller*> controller,
|
||||
TgVoipState state);
|
||||
void handleControllerStateChange(tgcalls::State state);
|
||||
void handleControllerBarCountChange(int count);
|
||||
void createAndStartController(const MTPDphoneCall &call);
|
||||
|
||||
|
|
@ -166,21 +221,27 @@ private:
|
|||
void setSignalBarCount(int count);
|
||||
void destroyController();
|
||||
|
||||
not_null<Delegate*> _delegate;
|
||||
not_null<UserData*> _user;
|
||||
void setupOutgoingVideo();
|
||||
void updateRemoteMediaState(
|
||||
tgcalls::AudioState audio,
|
||||
tgcalls::VideoState video);
|
||||
|
||||
const not_null<Delegate*> _delegate;
|
||||
const not_null<UserData*> _user;
|
||||
MTP::Sender _api;
|
||||
Type _type = Type::Outgoing;
|
||||
rpl::variable<State> _state = State::Starting;
|
||||
rpl::variable<RemoteAudioState> _remoteAudioState = RemoteAudioState::Active;
|
||||
rpl::variable<Webrtc::VideoState> _remoteVideoState;
|
||||
rpl::event_stream<Error> _errors;
|
||||
FinishType _finishAfterRequestingCall = FinishType::None;
|
||||
bool _answerAfterDhConfigReceived = false;
|
||||
int _signalBarCount = kSignalBarStarting;
|
||||
base::Observable<int> _signalBarCountChanged;
|
||||
rpl::variable<int> _signalBarCount = kSignalBarStarting;
|
||||
crl::time _startTime = 0;
|
||||
base::DelayedCallTimer _finishByTimeoutTimer;
|
||||
base::Timer _discardByTimeoutTimer;
|
||||
|
||||
bool _mute = false;
|
||||
base::Observable<bool> _muteChanged;
|
||||
rpl::variable<bool> _muted = false;
|
||||
|
||||
DhConfig _dhConfig;
|
||||
bytes::vector _ga;
|
||||
|
|
@ -194,7 +255,10 @@ private:
|
|||
uint64 _accessHash = 0;
|
||||
uint64 _keyFingerprint = 0;
|
||||
|
||||
std::unique_ptr<Controller> _controller;
|
||||
std::unique_ptr<tgcalls::Instance> _instance;
|
||||
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
||||
const std::unique_ptr<Webrtc::VideoTrack> _videoIncoming;
|
||||
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
|
||||
|
||||
std::unique_ptr<Media::Audio::Track> _waitingTrack;
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "calls/calls_controller.h"
|
||||
|
||||
#include "calls/calls_controller_tgvoip.h"
|
||||
#include "calls/calls_controller_webrtc.h"
|
||||
|
||||
namespace Calls {
|
||||
|
||||
|
|
@ -18,7 +19,20 @@ namespace Calls {
|
|||
const std::vector<TgVoipEndpoint> &endpoints,
|
||||
const TgVoipProxy *proxy,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
const TgVoipEncryptionKey &encryptionKey) {
|
||||
const TgVoipEncryptionKey &encryptionKey,
|
||||
Fn<void(QByteArray)> sendSignalingData,
|
||||
Fn<void(QImage)> displayNextFrame) {
|
||||
if (version == WebrtcController::Version()) {
|
||||
return std::make_unique<WebrtcController>(
|
||||
config,
|
||||
persistentState,
|
||||
endpoints,
|
||||
proxy,
|
||||
initialNetworkType,
|
||||
encryptionKey,
|
||||
std::move(sendSignalingData),
|
||||
std::move(displayNextFrame));
|
||||
}
|
||||
return std::make_unique<TgVoipController>(
|
||||
config,
|
||||
persistentState,
|
||||
|
|
@ -28,4 +42,12 @@ namespace Calls {
|
|||
encryptionKey);
|
||||
}
|
||||
|
||||
std::vector<std::string> CollectControllerVersions() {
|
||||
return { WebrtcController::Version(), TgVoipController::Version() };
|
||||
}
|
||||
|
||||
int ControllerMaxLayer() {
|
||||
return TgVoip::getConnectionMaxLayer();
|
||||
}
|
||||
|
||||
} // namespace Calls
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ public:
|
|||
virtual void setInputVolume(float level) = 0;
|
||||
virtual void setOutputVolume(float level) = 0;
|
||||
virtual void setAudioOutputDuckingEnabled(bool enabled) = 0;
|
||||
virtual bool receiveSignalingData(const QByteArray &data) = 0;
|
||||
|
||||
virtual std::string getLastError() = 0;
|
||||
virtual std::string getDebugInfo() = 0;
|
||||
|
|
@ -48,6 +49,11 @@ public:
|
|||
const std::vector<TgVoipEndpoint> &endpoints,
|
||||
const TgVoipProxy *proxy,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
const TgVoipEncryptionKey &encryptionKey);
|
||||
const TgVoipEncryptionKey &encryptionKey,
|
||||
Fn<void(QByteArray)> sendSignalingData,
|
||||
Fn<void(QImage)> displayNextFrame);
|
||||
|
||||
[[nodiscard]] std::vector<std::string> CollectControllerVersions();
|
||||
[[nodiscard]] int ControllerMaxLayer();
|
||||
|
||||
} // namespace Calls
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ public:
|
|||
return TgVoip::getVersion();
|
||||
}
|
||||
|
||||
[[nodiscard]] std::string version() override {
|
||||
std::string version() override {
|
||||
return Version();
|
||||
}
|
||||
void setNetworkType(TgVoipNetworkType networkType) override {
|
||||
|
|
@ -63,6 +63,9 @@ public:
|
|||
void setAudioOutputDuckingEnabled(bool enabled) override {
|
||||
_impl->setAudioOutputDuckingEnabled(enabled);
|
||||
}
|
||||
bool receiveSignalingData(const QByteArray &data) override {
|
||||
return false;
|
||||
}
|
||||
std::string getLastError() override {
|
||||
return _impl->getLastError();
|
||||
}
|
||||
|
|
@ -81,8 +84,7 @@ public:
|
|||
void setOnStateUpdated(Fn<void(TgVoipState)> onStateUpdated) override {
|
||||
_impl->setOnStateUpdated(std::move(onStateUpdated));
|
||||
}
|
||||
void setOnSignalBarsUpdated(
|
||||
Fn<void(int)> onSignalBarsUpdated) override {
|
||||
void setOnSignalBarsUpdated(Fn<void(int)> onSignalBarsUpdated) override {
|
||||
_impl->setOnSignalBarsUpdated(std::move(onSignalBarsUpdated));
|
||||
}
|
||||
TgVoipFinalState stop() override {
|
||||
|
|
|
|||
175
Telegram/SourceFiles/calls/calls_controller_webrtc.cpp
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#include "calls/calls_controller_webrtc.h"
|
||||
|
||||
#include "webrtc/webrtc_call_context.h"
|
||||
|
||||
namespace Calls {
|
||||
namespace {
|
||||
|
||||
using namespace Webrtc;
|
||||
|
||||
[[nodiscard]] CallConnectionDescription ConvertEndpoint(const TgVoipEndpoint &data) {
|
||||
return CallConnectionDescription{
|
||||
.ip = QString::fromStdString(data.host.ipv4),
|
||||
.ipv6 = QString::fromStdString(data.host.ipv6),
|
||||
.peerTag = QByteArray(
|
||||
reinterpret_cast<const char*>(data.peerTag),
|
||||
base::array_size(data.peerTag)),
|
||||
.connectionId = data.endpointId,
|
||||
.port = data.port,
|
||||
};
|
||||
}
|
||||
|
||||
[[nodiscard]] CallContext::Config MakeContextConfig(
|
||||
const TgVoipConfig &config,
|
||||
const TgVoipPersistentState &persistentState,
|
||||
const std::vector<TgVoipEndpoint> &endpoints,
|
||||
const TgVoipProxy *proxy,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
const TgVoipEncryptionKey &encryptionKey,
|
||||
Fn<void(QByteArray)> sendSignalingData,
|
||||
Fn<void(QImage)> displayNextFrame) {
|
||||
Expects(!endpoints.empty());
|
||||
|
||||
auto result = CallContext::Config{
|
||||
.proxy = (proxy
|
||||
? ProxyServer{
|
||||
.host = QString::fromStdString(proxy->host),
|
||||
.username = QString::fromStdString(proxy->login),
|
||||
.password = QString::fromStdString(proxy->password),
|
||||
.port = proxy->port }
|
||||
: ProxyServer()),
|
||||
.dataSaving = (config.dataSaving != TgVoipDataSaving::Never),
|
||||
.key = QByteArray(
|
||||
reinterpret_cast<const char*>(encryptionKey.value.data()),
|
||||
encryptionKey.value.size()),
|
||||
.outgoing = encryptionKey.isOutgoing,
|
||||
.primary = ConvertEndpoint(endpoints.front()),
|
||||
.alternatives = endpoints | ranges::view::drop(
|
||||
1
|
||||
) | ranges::view::transform(ConvertEndpoint) | ranges::to_vector,
|
||||
.maxLayer = config.maxApiLayer,
|
||||
.allowP2P = config.enableP2P,
|
||||
.sendSignalingData = std::move(sendSignalingData),
|
||||
.displayNextFrame = std::move(displayNextFrame),
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
WebrtcController::WebrtcController(
|
||||
const TgVoipConfig &config,
|
||||
const TgVoipPersistentState &persistentState,
|
||||
const std::vector<TgVoipEndpoint> &endpoints,
|
||||
const TgVoipProxy *proxy,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
const TgVoipEncryptionKey &encryptionKey,
|
||||
Fn<void(QByteArray)> sendSignalingData,
|
||||
Fn<void(QImage)> displayNextFrame)
|
||||
: _impl(std::make_unique<CallContext>(MakeContextConfig(
|
||||
config,
|
||||
persistentState,
|
||||
endpoints,
|
||||
proxy,
|
||||
initialNetworkType,
|
||||
encryptionKey,
|
||||
std::move(sendSignalingData),
|
||||
std::move(displayNextFrame)))) {
|
||||
}
|
||||
|
||||
WebrtcController::~WebrtcController() = default;
|
||||
|
||||
std::string WebrtcController::Version() {
|
||||
return CallContext::Version().toStdString();
|
||||
}
|
||||
|
||||
std::string WebrtcController::version() {
|
||||
return Version();
|
||||
}
|
||||
|
||||
void WebrtcController::setNetworkType(TgVoipNetworkType networkType) {
|
||||
}
|
||||
|
||||
void WebrtcController::setMuteMicrophone(bool muteMicrophone) {
|
||||
_impl->setIsMuted(muteMicrophone);
|
||||
}
|
||||
|
||||
void WebrtcController::setAudioOutputGainControlEnabled(bool enabled) {
|
||||
}
|
||||
|
||||
void WebrtcController::setEchoCancellationStrength(int strength) {
|
||||
}
|
||||
|
||||
void WebrtcController::setAudioInputDevice(std::string id) {
|
||||
}
|
||||
|
||||
void WebrtcController::setAudioOutputDevice(std::string id) {
|
||||
}
|
||||
|
||||
void WebrtcController::setInputVolume(float level) {
|
||||
}
|
||||
|
||||
void WebrtcController::setOutputVolume(float level) {
|
||||
}
|
||||
|
||||
void WebrtcController::setAudioOutputDuckingEnabled(bool enabled) {
|
||||
}
|
||||
|
||||
bool WebrtcController::receiveSignalingData(const QByteArray &data) {
|
||||
return _impl->receiveSignalingData(data);
|
||||
}
|
||||
|
||||
std::string WebrtcController::getLastError() {
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string WebrtcController::getDebugInfo() {
|
||||
return _impl->getDebugInfo().toStdString();
|
||||
}
|
||||
|
||||
int64_t WebrtcController::getPreferredRelayId() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
TgVoipTrafficStats WebrtcController::getTrafficStats() {
|
||||
return {};
|
||||
}
|
||||
|
||||
TgVoipPersistentState WebrtcController::getPersistentState() {
|
||||
return TgVoipPersistentState{};
|
||||
}
|
||||
|
||||
void WebrtcController::setOnStateUpdated(
|
||||
Fn<void(TgVoipState)> onStateUpdated) {
|
||||
_stateUpdatedLifetime.destroy();
|
||||
_impl->state().changes(
|
||||
) | rpl::start_with_next([=](CallState state) {
|
||||
onStateUpdated([&] {
|
||||
switch (state) {
|
||||
case CallState::Initializing: return TgVoipState::WaitInit;
|
||||
case CallState::Reconnecting: return TgVoipState::Reconnecting;
|
||||
case CallState::Connected: return TgVoipState::Established;
|
||||
case CallState::Failed: return TgVoipState::Failed;
|
||||
}
|
||||
Unexpected("State value in Webrtc::CallContext::state.");
|
||||
}());
|
||||
}, _stateUpdatedLifetime);
|
||||
}
|
||||
|
||||
void WebrtcController::setOnSignalBarsUpdated(
|
||||
Fn<void(int)> onSignalBarsUpdated) {
|
||||
}
|
||||
|
||||
TgVoipFinalState WebrtcController::stop() {
|
||||
_impl->stop();
|
||||
return TgVoipFinalState();
|
||||
}
|
||||
|
||||
} // namespace Calls
|
||||
60
Telegram/SourceFiles/calls/calls_controller_webrtc.h
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include "calls/calls_controller.h"
|
||||
|
||||
namespace Webrtc {
|
||||
class CallContext;
|
||||
} // namespace Webrtc
|
||||
|
||||
namespace Calls {
|
||||
|
||||
class WebrtcController final : public Controller {
|
||||
public:
|
||||
WebrtcController(
|
||||
const TgVoipConfig &config,
|
||||
const TgVoipPersistentState &persistentState,
|
||||
const std::vector<TgVoipEndpoint> &endpoints,
|
||||
const TgVoipProxy *proxy,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
const TgVoipEncryptionKey &encryptionKey,
|
||||
Fn<void(QByteArray)> sendSignalingData,
|
||||
Fn<void(QImage)> displayNextFrame);
|
||||
~WebrtcController();
|
||||
|
||||
[[nodiscard]] static std::string Version();
|
||||
|
||||
std::string version() override;
|
||||
void setNetworkType(TgVoipNetworkType networkType) override;
|
||||
void setMuteMicrophone(bool muteMicrophone) override;
|
||||
void setAudioOutputGainControlEnabled(bool enabled) override;
|
||||
void setEchoCancellationStrength(int strength) override;
|
||||
void setAudioInputDevice(std::string id) override;
|
||||
void setAudioOutputDevice(std::string id) override;
|
||||
void setInputVolume(float level) override;
|
||||
void setOutputVolume(float level) override;
|
||||
void setAudioOutputDuckingEnabled(bool enabled) override;
|
||||
bool receiveSignalingData(const QByteArray &data) override;
|
||||
std::string getLastError() override;
|
||||
std::string getDebugInfo() override;
|
||||
int64_t getPreferredRelayId() override;
|
||||
TgVoipTrafficStats getTrafficStats() override;
|
||||
TgVoipPersistentState getPersistentState() override;
|
||||
void setOnStateUpdated(Fn<void(TgVoipState)> onStateUpdated) override;
|
||||
void setOnSignalBarsUpdated(Fn<void(int)> onSignalBarsUpdated) override;
|
||||
TgVoipFinalState stop() override;
|
||||
|
||||
private:
|
||||
const std::unique_ptr<Webrtc::CallContext> _impl;
|
||||
|
||||
rpl::lifetime _stateUpdatedLifetime;
|
||||
|
||||
};
|
||||
|
||||
} // namespace Calls
|
||||
|
|
@ -8,11 +8,19 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "calls/calls_emoji_fingerprint.h"
|
||||
|
||||
#include "calls/calls_call.h"
|
||||
#include "calls/calls_signal_bars.h"
|
||||
#include "lang/lang_keys.h"
|
||||
#include "data/data_user.h"
|
||||
#include "ui/widgets/tooltip.h"
|
||||
#include "ui/emoji_config.h"
|
||||
#include "ui/rp_widget.h"
|
||||
#include "styles/style_calls.h"
|
||||
|
||||
namespace Calls {
|
||||
namespace {
|
||||
|
||||
constexpr auto kTooltipShowTimeoutMs = 1000;
|
||||
|
||||
const ushort Data[] = {
|
||||
0xd83d, 0xde09, 0xd83d, 0xde0d, 0xd83d, 0xde1b, 0xd83d, 0xde2d, 0xd83d, 0xde31, 0xd83d, 0xde21,
|
||||
0xd83d, 0xde0e, 0xd83d, 0xde34, 0xd83d, 0xde35, 0xd83d, 0xde08, 0xd83d, 0xde2c, 0xd83d, 0xde07,
|
||||
|
|
@ -143,7 +151,147 @@ std::vector<EmojiPtr> ComputeEmojiFingerprint(not_null<Call*> call) {
|
|||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
object_ptr<Ui::RpWidget> CreateFingerprintAndSignalBars(
|
||||
not_null<QWidget*> parent,
|
||||
not_null<Call*> call) {
|
||||
class EmojiTooltipShower final : public Ui::AbstractTooltipShower {
|
||||
public:
|
||||
EmojiTooltipShower(not_null<QWidget*> window, const QString &text)
|
||||
: _window(window)
|
||||
, _text(text) {
|
||||
}
|
||||
|
||||
QString tooltipText() const override {
|
||||
return _text;
|
||||
}
|
||||
QPoint tooltipPos() const override {
|
||||
return QCursor::pos();
|
||||
}
|
||||
bool tooltipWindowActive() const override {
|
||||
return _window->isActiveWindow();
|
||||
}
|
||||
|
||||
private:
|
||||
const not_null<QWidget*> _window;
|
||||
const QString _text;
|
||||
|
||||
};
|
||||
|
||||
auto result = object_ptr<Ui::RpWidget>(parent);
|
||||
const auto raw = result.data();
|
||||
|
||||
// Emoji tooltip.
|
||||
const auto shower = raw->lifetime().make_state<EmojiTooltipShower>(
|
||||
parent->window(),
|
||||
tr::lng_call_fingerprint_tooltip(
|
||||
tr::now,
|
||||
lt_user,
|
||||
call->user()->name));
|
||||
raw->setMouseTracking(true);
|
||||
raw->events(
|
||||
) | rpl::start_with_next([=](not_null<QEvent*> e) {
|
||||
if (e->type() == QEvent::MouseMove) {
|
||||
Ui::Tooltip::Show(kTooltipShowTimeoutMs, shower);
|
||||
} else if (e->type() == QEvent::Leave) {
|
||||
Ui::Tooltip::Hide();
|
||||
}
|
||||
}, raw->lifetime());
|
||||
|
||||
// Signal bars.
|
||||
const auto bars = Ui::CreateChild<SignalBars>(
|
||||
raw,
|
||||
call,
|
||||
st::callPanelSignalBars);
|
||||
bars->setAttribute(Qt::WA_TransparentForMouseEvents);
|
||||
|
||||
// Geometry.
|
||||
const auto print = ComputeEmojiFingerprint(call);
|
||||
auto realSize = Ui::Emoji::GetSizeNormal();
|
||||
auto size = realSize / cIntRetinaFactor();
|
||||
auto count = print.size();
|
||||
const auto printSize = QSize(
|
||||
count * size + (count - 1) * st::callFingerprintSkip,
|
||||
size);
|
||||
const auto fullPrintSize = QRect(
|
||||
QPoint(),
|
||||
printSize
|
||||
).marginsAdded(st::callFingerprintPadding).size();
|
||||
const auto fullBarsSize = bars->rect().marginsAdded(
|
||||
st::callSignalBarsPadding
|
||||
).size();
|
||||
const auto fullSize = QSize(
|
||||
(fullPrintSize.width()
|
||||
+ st::callFingerprintSignalBarsSkip
|
||||
+ fullBarsSize.width()),
|
||||
fullPrintSize.height());
|
||||
raw->resize(fullSize);
|
||||
bars->moveToRight(
|
||||
st::callSignalBarsPadding.right(),
|
||||
st::callSignalBarsPadding.top());
|
||||
|
||||
// Paint.
|
||||
const auto background = raw->lifetime().make_state<QImage>(
|
||||
fullSize * cIntRetinaFactor(),
|
||||
QImage::Format_ARGB32_Premultiplied);
|
||||
background->setDevicePixelRatio(cRetinaFactor());
|
||||
rpl::merge(
|
||||
rpl::single(rpl::empty_value()),
|
||||
Ui::Emoji::Updated(),
|
||||
style::PaletteChanged()
|
||||
) | rpl::start_with_next([=] {
|
||||
background->fill(Qt::transparent);
|
||||
|
||||
// Prepare.
|
||||
auto p = QPainter(background);
|
||||
const auto height = fullSize.height();
|
||||
const auto fullPrintRect = QRect(QPoint(), fullPrintSize);
|
||||
const auto fullBarsRect = QRect(
|
||||
fullSize.width() - fullBarsSize.width(),
|
||||
0,
|
||||
fullBarsSize.width(),
|
||||
height);
|
||||
const auto bigRadius = height / 2;
|
||||
const auto smallRadius = st::buttonRadius;
|
||||
const auto hq = PainterHighQualityEnabler(p);
|
||||
p.setPen(Qt::NoPen);
|
||||
p.setBrush(st::callBgButton);
|
||||
|
||||
// Fingerprint part.
|
||||
p.setClipRect(0, 0, fullPrintSize.width() / 2, height);
|
||||
p.drawRoundedRect(fullPrintRect, bigRadius, bigRadius);
|
||||
p.setClipRect(fullPrintSize.width() / 2, 0, fullSize.width(), height);
|
||||
p.drawRoundedRect(fullPrintRect, smallRadius, smallRadius);
|
||||
|
||||
// Signal bars part.
|
||||
const auto middle = fullBarsRect.center().x();
|
||||
p.setClipRect(0, 0, middle, height);
|
||||
p.drawRoundedRect(fullBarsRect, smallRadius, smallRadius);
|
||||
p.setClipRect(middle, 0, fullBarsRect.width(), height);
|
||||
p.drawRoundedRect(fullBarsRect, bigRadius, bigRadius);
|
||||
|
||||
// Emoji.
|
||||
const auto realSize = Ui::Emoji::GetSizeNormal();
|
||||
const auto size = realSize / cIntRetinaFactor();
|
||||
auto left = st::callFingerprintPadding.left();
|
||||
const auto top = st::callFingerprintPadding.top();
|
||||
p.setClipping(false);
|
||||
for (const auto emoji : print) {
|
||||
Ui::Emoji::Draw(p, emoji, realSize, left, top);
|
||||
left += st::callFingerprintSkip + size;
|
||||
}
|
||||
|
||||
raw->update();
|
||||
}, raw->lifetime());
|
||||
|
||||
raw->paintRequest(
|
||||
) | rpl::start_with_next([=](QRect clip) {
|
||||
QPainter(raw).drawImage(raw->rect(), *background);
|
||||
}, raw->lifetime());
|
||||
|
||||
raw->show();
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace Calls
|
||||
|
|
|
|||
|
|
@ -7,10 +7,21 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
*/
|
||||
#pragma once
|
||||
|
||||
#include "base/object_ptr.h"
|
||||
|
||||
namespace Ui {
|
||||
class RpWidget;
|
||||
} // namespace Ui
|
||||
|
||||
namespace Calls {
|
||||
|
||||
class Call;
|
||||
|
||||
std::vector<EmojiPtr> ComputeEmojiFingerprint(not_null<Call*> call);
|
||||
[[nodiscard]] std::vector<EmojiPtr> ComputeEmojiFingerprint(
|
||||
not_null<Call*> call);
|
||||
|
||||
[[nodiscard]] object_ptr<Ui::RpWidget> CreateFingerprintAndSignalBars(
|
||||
not_null<QWidget*> parent,
|
||||
not_null<Call*> call);
|
||||
|
||||
} // namespace Calls
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "mainwidget.h"
|
||||
#include "mtproto/mtproto_config.h"
|
||||
#include "boxes/rate_call_box.h"
|
||||
#include "tgcalls/VideoCaptureInterface.h"
|
||||
#include "app.h"
|
||||
|
||||
namespace Calls {
|
||||
|
|
@ -35,15 +36,9 @@ constexpr auto kServerConfigUpdateTimeoutMs = 24 * 3600 * crl::time(1000);
|
|||
|
||||
Instance::Instance() = default;
|
||||
|
||||
Instance::~Instance() {
|
||||
for (const auto panel : _pendingPanels) {
|
||||
if (panel) {
|
||||
delete panel;
|
||||
}
|
||||
}
|
||||
}
|
||||
Instance::~Instance() = default;
|
||||
|
||||
void Instance::startOutgoingCall(not_null<UserData*> user) {
|
||||
void Instance::startOutgoingCall(not_null<UserData*> user, bool video) {
|
||||
if (alreadyInCall()) { // Already in a call.
|
||||
_currentCallPanel->showAndActivate();
|
||||
return;
|
||||
|
|
@ -55,17 +50,21 @@ void Instance::startOutgoingCall(not_null<UserData*> user) {
|
|||
tr::lng_call_error_not_available(tr::now, lt_user, user->name)));
|
||||
return;
|
||||
}
|
||||
requestMicrophonePermissionOrFail(crl::guard(this, [=] {
|
||||
createCall(user, Call::Type::Outgoing);
|
||||
requestPermissionsOrFail(crl::guard(this, [=] {
|
||||
createCall(user, Call::Type::Outgoing, video);
|
||||
}));
|
||||
}
|
||||
|
||||
void Instance::callFinished(not_null<Call*> call) {
|
||||
crl::on_main(call, [=] {
|
||||
destroyCall(call);
|
||||
});
|
||||
}
|
||||
|
||||
void Instance::callFailed(not_null<Call*> call) {
|
||||
crl::on_main(call, [=] {
|
||||
destroyCall(call);
|
||||
});
|
||||
}
|
||||
|
||||
void Instance::callRedial(not_null<Call*> call) {
|
||||
|
|
@ -107,7 +106,9 @@ void Instance::playSound(Sound sound) {
|
|||
|
||||
void Instance::destroyCall(not_null<Call*> call) {
|
||||
if (_currentCall.get() == call) {
|
||||
destroyCurrentPanel();
|
||||
_currentCallPanel->closeBeforeDestroy();
|
||||
_currentCallPanel = nullptr;
|
||||
|
||||
auto taken = base::take(_currentCall);
|
||||
_currentCallChanges.fire(nullptr);
|
||||
taken.reset();
|
||||
|
|
@ -119,19 +120,8 @@ void Instance::destroyCall(not_null<Call*> call) {
|
|||
}
|
||||
}
|
||||
|
||||
void Instance::destroyCurrentPanel() {
|
||||
_pendingPanels.erase(
|
||||
std::remove_if(
|
||||
_pendingPanels.begin(),
|
||||
_pendingPanels.end(),
|
||||
[](auto &&panel) { return !panel; }),
|
||||
_pendingPanels.end());
|
||||
_pendingPanels.emplace_back(_currentCallPanel.release());
|
||||
_pendingPanels.back()->hideAndDestroy(); // Always queues the destruction.
|
||||
}
|
||||
|
||||
void Instance::createCall(not_null<UserData*> user, Call::Type type) {
|
||||
auto call = std::make_unique<Call>(getCallDelegate(), user, type);
|
||||
void Instance::createCall(not_null<UserData*> user, Call::Type type, bool video) {
|
||||
auto call = std::make_unique<Call>(getCallDelegate(), user, type, video);
|
||||
const auto raw = call.get();
|
||||
|
||||
user->session().account().sessionChanges(
|
||||
|
|
@ -237,8 +227,14 @@ void Instance::refreshServerConfig(not_null<Main::Session*> session) {
|
|||
|
||||
void Instance::handleUpdate(
|
||||
not_null<Main::Session*> session,
|
||||
const MTPDupdatePhoneCall& update) {
|
||||
handleCallUpdate(session, update.vphone_call());
|
||||
const MTPUpdate &update) {
|
||||
update.match([&](const MTPDupdatePhoneCall &data) {
|
||||
handleCallUpdate(session, data.vphone_call());
|
||||
}, [&](const MTPDupdatePhoneCallSignalingData &data) {
|
||||
handleSignalingData(data);
|
||||
}, [](const auto &) {
|
||||
Unexpected("Update type in Calls::Instance::handleUpdate.");
|
||||
});
|
||||
}
|
||||
|
||||
void Instance::showInfoPanel(not_null<Call*> call) {
|
||||
|
|
@ -272,8 +268,11 @@ void Instance::handleCallUpdate(
|
|||
}
|
||||
const auto &config = session->serverConfig();
|
||||
if (alreadyInCall() || !user || user->isSelf()) {
|
||||
const auto flags = phoneCall.is_video()
|
||||
? MTPphone_DiscardCall::Flag::f_video
|
||||
: MTPphone_DiscardCall::Flag(0);
|
||||
session->api().request(MTPphone_DiscardCall(
|
||||
MTP_flags(0),
|
||||
MTP_flags(flags),
|
||||
MTP_inputPhoneCall(phoneCall.vid(), phoneCall.vaccess_hash()),
|
||||
MTP_int(0),
|
||||
MTP_phoneCallDiscardReasonBusy(),
|
||||
|
|
@ -283,7 +282,7 @@ void Instance::handleCallUpdate(
|
|||
< base::unixtime::now()) {
|
||||
LOG(("Ignoring too old call."));
|
||||
} else {
|
||||
createCall(user, Call::Type::Incoming);
|
||||
createCall(user, Call::Type::Incoming, phoneCall.is_video());
|
||||
_currentCall->handleUpdate(call);
|
||||
}
|
||||
} else if (!_currentCall || !_currentCall->handleUpdate(call)) {
|
||||
|
|
@ -291,6 +290,14 @@ void Instance::handleCallUpdate(
|
|||
}
|
||||
}
|
||||
|
||||
void Instance::handleSignalingData(
|
||||
const MTPDupdatePhoneCallSignalingData &data) {
|
||||
if (!_currentCall || !_currentCall->handleSignalingData(data)) {
|
||||
DEBUG_LOG(("API Warning: unexpected call signaling data %1"
|
||||
).arg(data.vphone_call_id().v));
|
||||
}
|
||||
}
|
||||
|
||||
bool Instance::alreadyInCall() {
|
||||
return (_currentCall && _currentCall->state() != Call::State::Busy);
|
||||
}
|
||||
|
|
@ -303,13 +310,23 @@ rpl::producer<Call*> Instance::currentCallValue() const {
|
|||
return _currentCallChanges.events_starting_with(currentCall());
|
||||
}
|
||||
|
||||
void Instance::requestMicrophonePermissionOrFail(Fn<void()> onSuccess) {
|
||||
Platform::PermissionStatus status=Platform::GetPermissionStatus(Platform::PermissionType::Microphone);
|
||||
if (status==Platform::PermissionStatus::Granted) {
|
||||
void Instance::requestPermissionsOrFail(Fn<void()> onSuccess) {
|
||||
using Type = Platform::PermissionType;
|
||||
requestPermissionOrFail(Type::Microphone, [=] {
|
||||
requestPermissionOrFail(Type::Camera, [=] {
|
||||
crl::on_main(onSuccess);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void Instance::requestPermissionOrFail(Platform::PermissionType type, Fn<void()> onSuccess) {
|
||||
using Status = Platform::PermissionStatus;
|
||||
const auto status = Platform::GetPermissionStatus(type);
|
||||
if (status == Status::Granted) {
|
||||
onSuccess();
|
||||
} else if(status==Platform::PermissionStatus::CanRequest) {
|
||||
Platform::RequestPermission(Platform::PermissionType::Microphone, crl::guard(this, [=](Platform::PermissionStatus status) {
|
||||
if (status==Platform::PermissionStatus::Granted) {
|
||||
} else if (status == Status::CanRequest) {
|
||||
Platform::RequestPermission(type, crl::guard(this, [=](Status status) {
|
||||
if (status == Status::Granted) {
|
||||
crl::on_main(onSuccess);
|
||||
} else {
|
||||
if (_currentCall) {
|
||||
|
|
@ -321,11 +338,22 @@ void Instance::requestMicrophonePermissionOrFail(Fn<void()> onSuccess) {
|
|||
if (alreadyInCall()) {
|
||||
_currentCall->hangup();
|
||||
}
|
||||
Ui::show(Box<ConfirmBox>(tr::ktg_no_mic_permission(tr::now), tr::lng_menu_settings(tr::now), crl::guard(this, [] {
|
||||
Platform::OpenSystemSettingsForPermission(Platform::PermissionType::Microphone);
|
||||
Ui::show(Box<ConfirmBox>(tr::ktg_no_mic_permission(tr::now), tr::lng_menu_settings(tr::now), crl::guard(this, [=] {
|
||||
Platform::OpenSystemSettingsForPermission(type);
|
||||
Ui::hideLayer();
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<tgcalls::VideoCaptureInterface> Instance::getVideoCapture() {
|
||||
if (auto result = _videoCapture.lock()) {
|
||||
return result;
|
||||
}
|
||||
auto result = std::shared_ptr<tgcalls::VideoCaptureInterface>(
|
||||
tgcalls::VideoCaptureInterface::Create(
|
||||
Core::App().settings().callVideoInputDeviceId().toStdString()));
|
||||
_videoCapture = result;
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace Calls
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "mtproto/sender.h"
|
||||
#include "calls/calls_call.h"
|
||||
|
||||
namespace Platform {
|
||||
enum class PermissionType;
|
||||
} // namespace Platform
|
||||
|
||||
namespace Media {
|
||||
namespace Audio {
|
||||
class Track;
|
||||
|
|
@ -32,13 +36,14 @@ public:
|
|||
Instance();
|
||||
~Instance();
|
||||
|
||||
void startOutgoingCall(not_null<UserData*> user);
|
||||
void startOutgoingCall(not_null<UserData*> user, bool video);
|
||||
void handleUpdate(
|
||||
not_null<Main::Session*> session,
|
||||
const MTPDupdatePhoneCall &update);
|
||||
const MTPUpdate &update);
|
||||
void showInfoPanel(not_null<Call*> call);
|
||||
[[nodiscard]] Call *currentCall() const;
|
||||
[[nodiscard]] rpl::producer<Call*> currentCallValue() const;
|
||||
std::shared_ptr<tgcalls::VideoCaptureInterface> getVideoCapture() override;
|
||||
|
||||
[[nodiscard]] bool isQuitPrevent();
|
||||
|
||||
|
|
@ -54,10 +59,12 @@ private:
|
|||
void callRedial(not_null<Call*> call) override;
|
||||
using Sound = Call::Delegate::Sound;
|
||||
void playSound(Sound sound) override;
|
||||
void createCall(not_null<UserData*> user, Call::Type type);
|
||||
void createCall(not_null<UserData*> user, Call::Type type, bool video);
|
||||
void destroyCall(not_null<Call*> call);
|
||||
void destroyCurrentPanel();
|
||||
void requestMicrophonePermissionOrFail(Fn<void()> onSuccess) override;
|
||||
void requestPermissionsOrFail(Fn<void()> onSuccess) override;
|
||||
void requestPermissionOrFail(Platform::PermissionType type, Fn<void()> onSuccess);
|
||||
|
||||
void handleSignalingData(const MTPDupdatePhoneCallSignalingData &data);
|
||||
|
||||
void refreshDhConfig();
|
||||
void refreshServerConfig(not_null<Main::Session*> session);
|
||||
|
|
@ -72,13 +79,13 @@ private:
|
|||
|
||||
crl::time _lastServerConfigUpdateTime = 0;
|
||||
base::weak_ptr<Main::Session> _serverConfigRequestSession;
|
||||
std::weak_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
||||
|
||||
std::unique_ptr<Call> _currentCall;
|
||||
rpl::event_stream<Call*> _currentCallChanges;
|
||||
std::unique_ptr<Panel> _currentCallPanel;
|
||||
base::Observable<Call*> _currentCallChanged;
|
||||
base::Observable<FullMsgId> _newServiceMessage;
|
||||
std::vector<QPointer<Panel>> _pendingPanels;
|
||||
|
||||
std::unique_ptr<Media::Audio::Track> _callConnectingTrack;
|
||||
std::unique_ptr<Media::Audio::Track> _callEndedTrack;
|
||||
|
|
|
|||
|
|
@ -9,8 +9,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
|
||||
#include "base/weak_ptr.h"
|
||||
#include "base/timer.h"
|
||||
#include "base/object_ptr.h"
|
||||
#include "calls/calls_call.h"
|
||||
#include "ui/widgets/tooltip.h"
|
||||
#include "ui/effects/animations.h"
|
||||
#include "ui/rp_widget.h"
|
||||
|
||||
|
|
@ -26,138 +26,117 @@ class IconButton;
|
|||
class FlatLabel;
|
||||
template <typename Widget>
|
||||
class FadeWrap;
|
||||
template <typename Widget>
|
||||
class PaddingWrap;
|
||||
class Window;
|
||||
namespace Platform {
|
||||
class TitleControls;
|
||||
} // namespace Platform
|
||||
} // namespace Ui
|
||||
|
||||
namespace style {
|
||||
struct CallSignalBars;
|
||||
struct CallBodyLayout;
|
||||
} // namespace style
|
||||
|
||||
namespace Calls {
|
||||
|
||||
class SignalBars : public Ui::RpWidget, private base::Subscriber {
|
||||
public:
|
||||
SignalBars(
|
||||
QWidget *parent,
|
||||
not_null<Call*> call,
|
||||
const style::CallSignalBars &st,
|
||||
Fn<void()> displayedChangedCallback = nullptr);
|
||||
|
||||
bool isDisplayed() const;
|
||||
|
||||
protected:
|
||||
void paintEvent(QPaintEvent *e) override;
|
||||
|
||||
private:
|
||||
void changed(int count);
|
||||
|
||||
const style::CallSignalBars &_st;
|
||||
int _count = Call::kSignalBarStarting;
|
||||
Fn<void()> _displayedChangedCallback;
|
||||
|
||||
};
|
||||
|
||||
class Panel
|
||||
: public Ui::RpWidget
|
||||
, private base::Subscriber
|
||||
, private Ui::AbstractTooltipShower {
|
||||
class Userpic;
|
||||
class SignalBars;
|
||||
class VideoBubble;
|
||||
|
||||
class Panel final {
|
||||
public:
|
||||
Panel(not_null<Call*> call);
|
||||
~Panel();
|
||||
|
||||
void showAndActivate();
|
||||
void replaceCall(not_null<Call*> call);
|
||||
void hideAndDestroy();
|
||||
|
||||
protected:
|
||||
void paintEvent(QPaintEvent *e) override;
|
||||
void closeEvent(QCloseEvent *e) override;
|
||||
void resizeEvent(QResizeEvent *e) override;
|
||||
void mousePressEvent(QMouseEvent *e) override;
|
||||
void mouseReleaseEvent(QMouseEvent *e) override;
|
||||
void mouseMoveEvent(QMouseEvent *e) override;
|
||||
void leaveEventHook(QEvent *e) override;
|
||||
void leaveToChildEvent(QEvent *e, QWidget *child) override;
|
||||
bool eventHook(QEvent *e) override;
|
||||
void closeBeforeDestroy();
|
||||
|
||||
private:
|
||||
class Incoming;
|
||||
class Button;
|
||||
using State = Call::State;
|
||||
using Type = Call::Type;
|
||||
enum class AnswerHangupRedialState : uchar {
|
||||
Answer,
|
||||
Hangup,
|
||||
Redial,
|
||||
};
|
||||
|
||||
// AbstractTooltipShower interface
|
||||
QString tooltipText() const override;
|
||||
QPoint tooltipPos() const override;
|
||||
bool tooltipWindowActive() const override;
|
||||
[[nodiscard]] not_null<Ui::RpWidget*> widget() const;
|
||||
|
||||
void paint(QRect clip);
|
||||
|
||||
void initWindow();
|
||||
void initWidget();
|
||||
void initControls();
|
||||
void reinitControls();
|
||||
void reinitWithCall(Call *call);
|
||||
void initLayout();
|
||||
void initGeometry();
|
||||
void hideDeactivated();
|
||||
void createBottomImage();
|
||||
void createDefaultCacheImage();
|
||||
void refreshCacheImageUserPhoto();
|
||||
|
||||
void processUserPhoto();
|
||||
void refreshUserPhoto();
|
||||
bool isGoodUserPhoto(PhotoData *photo);
|
||||
void createUserpicCache(Image *image);
|
||||
void handleClose();
|
||||
|
||||
QRect signalBarsRect() const;
|
||||
void paintSignalBarsBg(Painter &p);
|
||||
|
||||
void updateControlsGeometry();
|
||||
void updateHangupGeometry();
|
||||
void updateStatusGeometry();
|
||||
void updateOutgoingVideoBubbleGeometry();
|
||||
void stateChanged(State state);
|
||||
void showControls();
|
||||
void updateStatusText(State state);
|
||||
void startDurationUpdateTimer(crl::time currentDuration);
|
||||
void fillFingerprint();
|
||||
void toggleOpacityAnimation(bool visible);
|
||||
void finishAnimating();
|
||||
void destroyDelayed();
|
||||
void setIncomingSize(QSize size);
|
||||
void refreshIncomingGeometry();
|
||||
|
||||
void refreshOutgoingPreviewInBody(State state);
|
||||
void toggleFullScreen(bool fullscreen);
|
||||
void createRemoteAudioMute();
|
||||
void refreshAnswerHangupRedialLabel();
|
||||
|
||||
[[nodiscard]] QRect incomingFrameGeometry() const;
|
||||
[[nodiscard]] QRect outgoingFrameGeometry() const;
|
||||
|
||||
Call *_call = nullptr;
|
||||
not_null<UserData*> _user;
|
||||
std::shared_ptr<Data::CloudImageView> _userpic;
|
||||
std::shared_ptr<Data::PhotoMedia> _photo;
|
||||
|
||||
bool _useTransparency = true;
|
||||
style::margins _padding;
|
||||
int _contentTop = 0;
|
||||
const std::unique_ptr<Ui::Window> _window;
|
||||
std::unique_ptr<Incoming> _incoming;
|
||||
|
||||
bool _dragging = false;
|
||||
QPoint _dragStartMousePosition;
|
||||
QPoint _dragStartMyPosition;
|
||||
#ifdef Q_OS_WIN
|
||||
std::unique_ptr<Ui::Platform::TitleControls> _controls;
|
||||
#endif // Q_OS_WIN
|
||||
|
||||
rpl::lifetime _stateLifetime;
|
||||
QSize _incomingFrameSize;
|
||||
|
||||
class Button;
|
||||
rpl::lifetime _callLifetime;
|
||||
|
||||
not_null<const style::CallBodyLayout*> _bodySt;
|
||||
object_ptr<Button> _answerHangupRedial;
|
||||
object_ptr<Ui::FadeWrap<Button>> _decline;
|
||||
object_ptr<Ui::FadeWrap<Button>> _cancel;
|
||||
bool _hangupShown = false;
|
||||
bool _outgoingPreviewInBody = false;
|
||||
std::optional<AnswerHangupRedialState> _answerHangupRedialState;
|
||||
Ui::Animations::Simple _hangupShownProgress;
|
||||
object_ptr<Ui::IconButton> _mute;
|
||||
object_ptr<Button> _camera;
|
||||
object_ptr<Button> _mute;
|
||||
object_ptr<Ui::FlatLabel> _name;
|
||||
object_ptr<Ui::FlatLabel> _status;
|
||||
object_ptr<SignalBars> _signalBars;
|
||||
std::vector<EmojiPtr> _fingerprint;
|
||||
QRect _fingerprintArea;
|
||||
object_ptr<Ui::RpWidget> _fingerprint = { nullptr };
|
||||
object_ptr<Ui::PaddingWrap<Ui::FlatLabel>> _remoteAudioMute = { nullptr };
|
||||
std::unique_ptr<Userpic> _userpic;
|
||||
std::unique_ptr<VideoBubble> _outgoingVideoBubble;
|
||||
QPixmap _bottomShadow;
|
||||
int _bodyTop = 0;
|
||||
int _buttonsTop = 0;
|
||||
|
||||
base::Timer _updateDurationTimer;
|
||||
base::Timer _updateOuterRippleTimer;
|
||||
|
||||
bool _visible = false;
|
||||
QPixmap _userPhoto;
|
||||
PhotoId _userPhotoId = 0;
|
||||
bool _userPhotoFull = false;
|
||||
|
||||
Ui::Animations::Simple _opacityAnimation;
|
||||
QPixmap _animationCache;
|
||||
QPixmap _bottomCache;
|
||||
QPixmap _cache;
|
||||
|
||||
};
|
||||
|
||||
} // namespace Calls
|
||||
|
|
|
|||
64
Telegram/SourceFiles/calls/calls_signal_bars.cpp
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#include "calls/calls_signal_bars.h"
|
||||
|
||||
#include "calls/calls_call.h"
|
||||
#include "styles/style_calls.h"
|
||||
|
||||
namespace Calls {
|
||||
|
||||
SignalBars::SignalBars(
|
||||
QWidget *parent,
|
||||
not_null<Call*> call,
|
||||
const style::CallSignalBars &st)
|
||||
: RpWidget(parent)
|
||||
, _st(st)
|
||||
, _count(Call::kSignalBarStarting) {
|
||||
resize(
|
||||
_st.width + (_st.width + _st.skip) * (Call::kSignalBarCount - 1),
|
||||
_st.max);
|
||||
call->signalBarCountValue(
|
||||
) | rpl::start_with_next([=](int count) {
|
||||
changed(count);
|
||||
}, lifetime());
|
||||
}
|
||||
|
||||
void SignalBars::paintEvent(QPaintEvent *e) {
|
||||
Painter p(this);
|
||||
|
||||
PainterHighQualityEnabler hq(p);
|
||||
p.setPen(Qt::NoPen);
|
||||
p.setBrush(_st.color);
|
||||
for (auto i = 0; i < Call::kSignalBarCount; ++i) {
|
||||
p.setOpacity((i < _count) ? 1. : _st.inactiveOpacity);
|
||||
const auto barHeight = _st.min
|
||||
+ (_st.max - _st.min) * (i / float64(Call::kSignalBarCount - 1));
|
||||
const auto barLeft = i * (_st.width + _st.skip);
|
||||
const auto barTop = height() - barHeight;
|
||||
p.drawRoundedRect(
|
||||
QRectF(
|
||||
barLeft,
|
||||
barTop,
|
||||
_st.width,
|
||||
barHeight),
|
||||
_st.radius,
|
||||
_st.radius);
|
||||
}
|
||||
p.setOpacity(1.);
|
||||
}
|
||||
|
||||
void SignalBars::changed(int count) {
|
||||
if (_count == Call::kSignalBarFinished) {
|
||||
return;
|
||||
} else if (_count != count) {
|
||||
_count = count;
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace Calls
|
||||
37
Telegram/SourceFiles/calls/calls_signal_bars.h
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include "ui/rp_widget.h"
|
||||
|
||||
namespace style {
|
||||
struct CallSignalBars;
|
||||
} // namespace style
|
||||
|
||||
namespace Calls {
|
||||
|
||||
class Call;
|
||||
|
||||
class SignalBars final : public Ui::RpWidget {
|
||||
public:
|
||||
SignalBars(
|
||||
QWidget *parent,
|
||||
not_null<Call*> call,
|
||||
const style::CallSignalBars &st);
|
||||
|
||||
private:
|
||||
void paintEvent(QPaintEvent *e) override;
|
||||
|
||||
void changed(int count);
|
||||
|
||||
const style::CallSignalBars &_st;
|
||||
int _count = 0;
|
||||
|
||||
};
|
||||
|
||||
} // namespace Calls
|
||||
|
|
@ -14,7 +14,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|||
#include "core/application.h"
|
||||
#include "calls/calls_call.h"
|
||||
#include "calls/calls_instance.h"
|
||||
#include "calls/calls_panel.h"
|
||||
#include "calls/calls_signal_bars.h"
|
||||
#include "data/data_user.h"
|
||||
#include "data/data_changes.h"
|
||||
#include "main/main_session.h"
|
||||
|
|
@ -94,14 +94,14 @@ TopBar::TopBar(
|
|||
void TopBar::initControls() {
|
||||
_mute->setClickedCallback([=] {
|
||||
if (const auto call = _call.get()) {
|
||||
call->setMute(!call->isMute());
|
||||
call->setMuted(!call->muted());
|
||||
}
|
||||
});
|
||||
setMuted(_call->isMute());
|
||||
subscribe(_call->muteChanged(), [=](bool mute) {
|
||||
setMuted(mute);
|
||||
_call->mutedValue(
|
||||
) | rpl::start_with_next([=](bool muted) {
|
||||
setMuted(muted);
|
||||
update();
|
||||
});
|
||||
}, lifetime());
|
||||
|
||||
_call->user()->session().changes().peerUpdates(
|
||||
Data::PeerUpdate::Flag::Name
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ namespace Calls {
|
|||
class Call;
|
||||
class SignalBars;
|
||||
|
||||
class TopBar : public Ui::RpWidget, private base::Subscriber {
|
||||
class TopBar : public Ui::RpWidget {
|
||||
public:
|
||||
TopBar(QWidget *parent, const base::weak_ptr<Call> &call);
|
||||
|
||||
|
|
|
|||
217
Telegram/SourceFiles/calls/calls_userpic.cpp
Normal file
|
|
@ -0,0 +1,217 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#include "calls/calls_userpic.h"
|
||||
|
||||
#include "data/data_peer.h"
|
||||
#include "main/main_session.h"
|
||||
#include "data/data_changes.h"
|
||||
#include "data/data_peer.h"
|
||||
#include "data/data_session.h"
|
||||
#include "data/data_cloud_file.h"
|
||||
#include "data/data_photo_media.h"
|
||||
#include "data/data_file_origin.h"
|
||||
#include "ui/empty_userpic.h"
|
||||
#include "apiwrap.h" // requestFullPeer.
|
||||
#include "styles/style_calls.h"
|
||||
|
||||
namespace Calls {
|
||||
namespace {
|
||||
|
||||
} // namespace
|
||||
|
||||
Userpic::Userpic(
|
||||
not_null<QWidget*> parent,
|
||||
not_null<PeerData*> peer,
|
||||
rpl::producer<bool> muted)
|
||||
: _content(parent)
|
||||
, _peer(peer) {
|
||||
setGeometry(0, 0, 0);
|
||||
setup(std::move(muted));
|
||||
}
|
||||
|
||||
Userpic::~Userpic() = default;
|
||||
|
||||
void Userpic::setVisible(bool visible) {
|
||||
_content.setVisible(visible);
|
||||
}
|
||||
|
||||
void Userpic::setGeometry(int x, int y, int size) {
|
||||
if (this->size() != size) {
|
||||
_userPhoto = QPixmap();
|
||||
_userPhotoFull = false;
|
||||
}
|
||||
_content.setGeometry(x, y, size, size);
|
||||
_content.update();
|
||||
if (_userPhoto.isNull()) {
|
||||
refreshPhoto();
|
||||
}
|
||||
}
|
||||
|
||||
void Userpic::setup(rpl::producer<bool> muted) {
|
||||
_content.show();
|
||||
_content.setAttribute(Qt::WA_TransparentForMouseEvents);
|
||||
|
||||
_content.paintRequest(
|
||||
) | rpl::start_with_next([=] {
|
||||
paint();
|
||||
}, lifetime());
|
||||
|
||||
std::move(
|
||||
muted
|
||||
) | rpl::start_with_next([=](bool muted) {
|
||||
setMuted(muted);
|
||||
}, lifetime());
|
||||
|
||||
_peer->session().changes().peerFlagsValue(
|
||||
_peer,
|
||||
Data::PeerUpdate::Flag::Photo
|
||||
) | rpl::start_with_next([=] {
|
||||
processPhoto();
|
||||
}, lifetime());
|
||||
|
||||
_peer->session().downloaderTaskFinished(
|
||||
) | rpl::start_with_next([=] {
|
||||
refreshPhoto();
|
||||
}, lifetime());
|
||||
|
||||
_mutedAnimation.stop();
|
||||
}
|
||||
|
||||
void Userpic::setMuteLayout(QPoint position, int size, int stroke) {
|
||||
_mutePosition = position;
|
||||
_muteSize = size;
|
||||
_muteStroke = stroke;
|
||||
_content.update();
|
||||
}
|
||||
|
||||
void Userpic::paint() {
|
||||
Painter p(&_content);
|
||||
|
||||
p.drawPixmap(0, 0, _userPhoto);
|
||||
if (_muted && _muteSize > 0) {
|
||||
auto hq = PainterHighQualityEnabler(p);
|
||||
auto pen = st::callBgOpaque->p;
|
||||
pen.setWidth(_muteStroke);
|
||||
p.setPen(pen);
|
||||
p.setBrush(st::callHangupBg);
|
||||
const auto rect = QRect(
|
||||
_mutePosition.x() - _muteSize / 2,
|
||||
_mutePosition.y() - _muteSize / 2,
|
||||
_muteSize,
|
||||
_muteSize);
|
||||
p.drawEllipse(rect);
|
||||
st::callMutedPeerIcon.paintInCenter(p, rect);
|
||||
}
|
||||
}
|
||||
|
||||
void Userpic::setMuted(bool muted) {
|
||||
if (_muted == muted) {
|
||||
return;
|
||||
}
|
||||
_muted = muted;
|
||||
_content.update();
|
||||
//_mutedAnimation.start(
|
||||
// [=] { _content.update(); },
|
||||
// _muted ? 0. : 1.,
|
||||
// _muted ? 1. : 0.,
|
||||
// st::fadeWrapDuration);
|
||||
}
|
||||
|
||||
int Userpic::size() const {
|
||||
return _content.width();
|
||||
}
|
||||
|
||||
void Userpic::processPhoto() {
|
||||
_userpic = _peer->createUserpicView();
|
||||
_peer->loadUserpic();
|
||||
const auto photo = _peer->userpicPhotoId()
|
||||
? _peer->owner().photo(_peer->userpicPhotoId()).get()
|
||||
: nullptr;
|
||||
if (isGoodPhoto(photo)) {
|
||||
_photo = photo->createMediaView();
|
||||
_photo->wanted(Data::PhotoSize::Thumbnail, _peer->userpicPhotoOrigin());
|
||||
} else {
|
||||
_photo = nullptr;
|
||||
if (_peer->userpicPhotoUnknown() || (photo && !photo->date)) {
|
||||
_peer->session().api().requestFullPeer(_peer);
|
||||
}
|
||||
}
|
||||
refreshPhoto();
|
||||
}
|
||||
|
||||
void Userpic::refreshPhoto() {
|
||||
if (!size()) {
|
||||
return;
|
||||
}
|
||||
const auto isNewBigPhoto = [&] {
|
||||
return _photo
|
||||
&& (_photo->image(Data::PhotoSize::Thumbnail) != nullptr)
|
||||
&& (_photo->owner()->id != _userPhotoId || !_userPhotoFull);
|
||||
}();
|
||||
if (isNewBigPhoto) {
|
||||
_userPhotoId = _photo->owner()->id;
|
||||
_userPhotoFull = true;
|
||||
createCache(_photo->image(Data::PhotoSize::Thumbnail));
|
||||
} else if (_userPhoto.isNull()) {
|
||||
createCache(_userpic ? _userpic->image() : nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
void Userpic::createCache(Image *image) {
|
||||
const auto size = this->size();
|
||||
const auto real = size * cIntRetinaFactor();
|
||||
auto options = Images::Option::Smooth | Images::Option::Circled;
|
||||
// _useTransparency ? (Images::Option::RoundedLarge | Images::Option::RoundedTopLeft | Images::Option::RoundedTopRight | Images::Option::Smooth) : Images::Option::None;
|
||||
if (image) {
|
||||
auto width = image->width();
|
||||
auto height = image->height();
|
||||
if (width > height) {
|
||||
width = qMax((width * real) / height, 1);
|
||||
height = real;
|
||||
} else {
|
||||
height = qMax((height * real) / width, 1);
|
||||
width = real;
|
||||
}
|
||||
_userPhoto = image->pixNoCache(
|
||||
width,
|
||||
height,
|
||||
options,
|
||||
size,
|
||||
size);
|
||||
_userPhoto.setDevicePixelRatio(cRetinaFactor());
|
||||
} else {
|
||||
auto filled = QImage(QSize(real, real), QImage::Format_ARGB32_Premultiplied);
|
||||
filled.setDevicePixelRatio(cRetinaFactor());
|
||||
filled.fill(Qt::transparent);
|
||||
{
|
||||
Painter p(&filled);
|
||||
Ui::EmptyUserpic(
|
||||
Data::PeerUserpicColor(_peer->id),
|
||||
_peer->name
|
||||
).paint(p, 0, 0, size, size);
|
||||
}
|
||||
//Images::prepareRound(filled, ImageRoundRadius::Large, RectPart::TopLeft | RectPart::TopRight);
|
||||
_userPhoto = Images::PixmapFast(std::move(filled));
|
||||
}
|
||||
|
||||
_content.update();
|
||||
}
|
||||
|
||||
bool Userpic::isGoodPhoto(PhotoData *photo) const {
|
||||
if (!photo || photo->isNull()) {
|
||||
return false;
|
||||
}
|
||||
const auto badAspect = [](int a, int b) {
|
||||
return a > 10 * b;
|
||||
};
|
||||
const auto width = photo->width();
|
||||
const auto height = photo->height();
|
||||
return !badAspect(width, height) && !badAspect(height, width);
|
||||
}
|
||||
|
||||
} // namespace Calls
|
||||
67
Telegram/SourceFiles/calls/calls_userpic.h
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
This file is part of Telegram Desktop,
|
||||
the official desktop application for the Telegram messaging service.
|
||||
|
||||
For license and copyright information please follow this link:
|
||||
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include "ui/rp_widget.h"
|
||||
#include "ui/effects/animations.h"
|
||||
|
||||
class PeerData;
|
||||
class Image;
|
||||
|
||||
namespace Data {
|
||||
class CloudImageView;
|
||||
class PhotoMedia;
|
||||
} // namespace Data
|
||||
|
||||
namespace Calls {
|
||||
|
||||
class Userpic final {
|
||||
public:
|
||||
Userpic(
|
||||
not_null<QWidget*> parent,
|
||||
not_null<PeerData*> peer,
|
||||
rpl::producer<bool> muted);
|
||||
~Userpic();
|
||||
|
||||
void setVisible(bool visible);
|
||||
void setGeometry(int x, int y, int size);
|
||||
void setMuteLayout(QPoint position, int size, int stroke);
|
||||
|
||||
[[nodiscard]] rpl::lifetime &lifetime() {
|
||||
return _content.lifetime();
|
||||
}
|
||||
|
||||
private:
|
||||
void setup(rpl::producer<bool> muted);
|
||||
|
||||
void paint();
|
||||
void setMuted(bool muted);
|
||||
[[nodiscard]] int size() const;
|
||||
|
||||
void processPhoto();
|
||||
void refreshPhoto();
|
||||
[[nodiscard]] bool isGoodPhoto(PhotoData *photo) const;
|
||||
void createCache(Image *image);
|
||||
|
||||
Ui::RpWidget _content;
|
||||
|
||||
not_null<PeerData*> _peer;
|
||||
std::shared_ptr<Data::CloudImageView> _userpic;
|
||||
std::shared_ptr<Data::PhotoMedia> _photo;
|
||||
Ui::Animations::Simple _mutedAnimation;
|
||||
QPixmap _userPhoto;
|
||||
PhotoId _userPhotoId = 0;
|
||||
QPoint _mutePosition;
|
||||
int _muteSize = 0;
|
||||
int _muteStroke = 0;
|
||||
bool _userPhotoFull = false;
|
||||
bool _muted = false;
|
||||
|
||||
};
|
||||
|
||||
} // namespace Calls
|
||||