From 2e27f6d05aa1c2215bdb187f0b8222dc6c008585 Mon Sep 17 00:00:00 2001 From: Denis Grigorev Date: Fri, 25 Jun 2021 14:20:28 +0300 Subject: [PATCH] [sailfishos][webrtc] Enable support for WebRTC video. JB#53982 --- embedding/embedlite/embedding.js | 11 +- old-configure.in | 11 + ...or-rects-when-rendering-to-an-offsc.patch} | 0 ...c-Disable-enumeration-of-video-devic.patch | 45 - ...tc-Enable-GMP-for-encoding.-JB-53982.patch | 818 ++++++++++++++++++ ...c-Implement-video-capture-module.-JB.patch | 779 +++++++++++++++++ rpm/xulrunner-qt5.spec | 6 +- 7 files changed, 1621 insertions(+), 49 deletions(-) rename rpm/{0070-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch => 0069-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch} (100%) delete mode 100644 rpm/0069-sailfishos-webrtc-Disable-enumeration-of-video-devic.patch create mode 100644 rpm/0070-sailfishos-webrtc-Enable-GMP-for-encoding.-JB-53982.patch create mode 100644 rpm/0071-sailfishos-webrtc-Implement-video-capture-module.-JB.patch diff --git a/embedding/embedlite/embedding.js b/embedding/embedlite/embedding.js index 3b577153e4be8..d0a1f66bc8ac1 100644 --- a/embedding/embedlite/embedding.js +++ b/embedding/embedlite/embedding.js @@ -436,5 +436,12 @@ pref("media.cubeb.backend", "pulse"); // On ESR60 customelements is only enabled for nightly. Enable for us. pref("dom.webcomponents.customelements.enabled", true); -// Disable WebRTC video until it is implemented -pref("media.navigator.video.enabled", false); +// No native handle support (yet) for video frames, so higher resolution degrade performance +pref("media.navigator.video.default_width", 320); +pref("media.navigator.video.default_height", 240); + +// Many browsers prefer VP9 over H264. If the sailfish-browser is the initiator of the session, +// then the remote peer may override our preference and put VP9 in front of h264. Due to some bug, +// the gecko skips the peer's preference and creates an h264 decoder. As a workaround, disable VP9 +// until the bug is fixed. +pref("media.peerconnection.video.vp9_enabled", false); diff --git a/old-configure.in b/old-configure.in index 46e823b9f7415..7b4f9f7559b7a 100644 --- a/old-configure.in +++ b/old-configure.in @@ -2356,6 +2356,17 @@ if test -n "$MOZ_WEBRTC"; then if test -n "$MOZ_X11"; then MOZ_WEBRTC_X11_LIBS="-lXext -lXdamage -lXfixes -lXcomposite" fi + + MOZ_ENABLE_WEBRTC_AMBERCAMERA= + PKG_CHECK_MODULES(LIBAMBERCAMERA, ambercamera, + MOZ_ENABLE_WEBRTC_AMBERCAMERA=1, + MOZ_ENABLE_WEBRTC_AMBERCAMERA=) + if test "$MOZ_ENABLE_WEBRTC_AMBERCAMERA"; then + MOZ_ENABLE_WEBRTC_AMBERCAMERA=1 + AC_DEFINE(MOZ_ENABLE_WEBRTC_AMBERCAMERA) + AC_SUBST(MOZ_ENABLE_WEBRTC_AMBERCAMERA) + fi + fi dnl ======================================================== diff --git a/rpm/0070-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch b/rpm/0069-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch similarity index 100% rename from rpm/0070-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch rename to rpm/0069-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch diff --git a/rpm/0069-sailfishos-webrtc-Disable-enumeration-of-video-devic.patch b/rpm/0069-sailfishos-webrtc-Disable-enumeration-of-video-devic.patch deleted file mode 100644 index 80d95431629cb..0000000000000 --- a/rpm/0069-sailfishos-webrtc-Disable-enumeration-of-video-devic.patch +++ /dev/null @@ -1,45 +0,0 @@ -From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 -From: Denis Grigorev -Date: Fri, 16 Apr 2021 12:40:46 +0300 -Subject: [PATCH] [sailfishos][webrtc] Disable enumeration of video - devices. JB#53756 - -Video capture is not yet implemented on SFOS. Do not report about -/dev/video* devices as it will break audio calls on sites requesting -video devices. - -Signed-off-by: Denis Grigorev ---- - .../modules/video_capture/linux/device_info_linux.cc | 9 +++++++++ - 1 file changed, 9 insertions(+) - -diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc -index d1bcdf2d3c33..cb8bb6fe6e50 100644 ---- a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc -+++ b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc -@@ -241,6 +241,7 @@ uint32_t DeviceInfoLinux::NumberOfDevices() - WEBRTC_TRACE(webrtc::kTraceApiCall, - webrtc::kTraceVideoCapture, 0, "%s", __FUNCTION__); - -+#if !defined(MOZ_EMBEDLITE) - uint32_t count = 0; - char device[20]; - int fd = -1; -@@ -257,6 +258,14 @@ uint32_t DeviceInfoLinux::NumberOfDevices() - } - - return count; -+#else -+ /* -+ * Video capture is not yet implemented on SFOS. -+ * Do not report about /dev/video* devices as it will break audio calls -+ * on sites requesting video devices. -+ */ -+ return 0; -+#endif - } - - int32_t DeviceInfoLinux::GetDeviceName( --- -2.17.1 - diff --git a/rpm/0070-sailfishos-webrtc-Enable-GMP-for-encoding.-JB-53982.patch b/rpm/0070-sailfishos-webrtc-Enable-GMP-for-encoding.-JB-53982.patch new file mode 100644 index 0000000000000..f17ab6cfc4e95 --- /dev/null +++ b/rpm/0070-sailfishos-webrtc-Enable-GMP-for-encoding.-JB-53982.patch @@ -0,0 +1,818 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: Denis Grigorev +Date: Fri, 30 Apr 2021 14:59:18 +0300 +Subject: [PATCH] [sailfishos][webrtc] Enable GMP for encoding/decoding. + JB#53982 + +--- + dom/media/gmp/GMPSharedMemManager.cpp | 34 +- + dom/media/gmp/GMPSharedMemManager.h | 8 +- + .../src/media-conduit/GmpVideoCodec.cpp | 8 +- + .../src/media-conduit/GmpVideoCodec.h | 4 +- + .../src/media-conduit/VideoConduit.cpp | 61 ++- + .../src/media-conduit/WebrtcGmpVideoCodec.cpp | 351 ++++++++++++------ + .../src/media-conduit/WebrtcGmpVideoCodec.h | 11 +- + .../src/peerconnection/PeerConnectionImpl.cpp | 8 +- + 8 files changed, 329 insertions(+), 156 deletions(-) + +diff --git a/dom/media/gmp/GMPSharedMemManager.cpp b/dom/media/gmp/GMPSharedMemManager.cpp +index a7f462ce7a7f..fa279bffb49e 100644 +--- a/dom/media/gmp/GMPSharedMemManager.cpp ++++ b/dom/media/gmp/GMPSharedMemManager.cpp +@@ -24,13 +24,16 @@ bool GMPSharedMemManager::MgrAllocShmem( + ipc::Shmem::SharedMemory::SharedMemoryType aType, ipc::Shmem* aMem) { + mData->CheckThread(); + +- // first look to see if we have a free buffer large enough +- for (uint32_t i = 0; i < GetGmpFreelist(aClass).Length(); i++) { +- MOZ_ASSERT(GetGmpFreelist(aClass)[i].IsWritable()); +- if (aSize <= GetGmpFreelist(aClass)[i].Size()) { +- *aMem = GetGmpFreelist(aClass)[i]; +- GetGmpFreelist(aClass).RemoveElementAt(i); +- return true; ++ { ++ // first look to see if we have a free buffer large enough ++ MutexAutoLock lock(mMutex); ++ for (uint32_t i = 0; i < GetGmpFreelist(aClass).Length(); i++) { ++ MOZ_ASSERT(GetGmpFreelist(aClass)[i].IsWritable()); ++ if (aSize <= GetGmpFreelist(aClass)[i].Size()) { ++ *aMem = GetGmpFreelist(aClass)[i]; ++ GetGmpFreelist(aClass).RemoveElementAt(i); ++ return true; ++ } + } + } + +@@ -39,6 +42,7 @@ bool GMPSharedMemManager::MgrAllocShmem( + aSize = (aSize + (pagesize - 1)) & ~(pagesize - 1); // round up to page size + bool retval = Alloc(aSize, aType, aMem); + if (retval) { ++ MutexAutoLock lock(mMutex); + // The allocator (or NeedsShmem call) should never return less than we ask + // for... + MOZ_ASSERT(aMem->Size() >= aSize); +@@ -68,11 +72,18 @@ bool GMPSharedMemManager::MgrDeallocShmem(GMPSharedMem::GMPMemoryClasses aClass, + // XXX This works; there are better pool algorithms. We need to avoid + // "falling off a cliff" with too low a number + if (GetGmpFreelist(aClass).Length() > 10) { +- Dealloc(GetGmpFreelist(aClass)[0]); +- GetGmpFreelist(aClass).RemoveElementAt(0); ++ ipc::Shmem element; ++ { ++ MutexAutoLock lock(mMutex); ++ element = GetGmpFreelist(aClass)[0]; ++ GetGmpFreelist(aClass).RemoveElementAt(0); ++ mData->mGmpAllocated[aClass]--; ++ } ++ Dealloc(element); + // The allocation numbers will be fubar on the Child! +- mData->mGmpAllocated[aClass]--; + } ++ ++ MutexAutoLock lock(mMutex); + for (uint32_t i = 0; i < GetGmpFreelist(aClass).Length(); i++) { + MOZ_ASSERT(GetGmpFreelist(aClass)[i].IsWritable()); + total += GetGmpFreelist(aClass)[i].Size(); +@@ -86,7 +97,8 @@ bool GMPSharedMemManager::MgrDeallocShmem(GMPSharedMem::GMPMemoryClasses aClass, + return true; + } + +-uint32_t GMPSharedMemManager::NumInUse(GMPSharedMem::GMPMemoryClasses aClass) { ++int32_t GMPSharedMemManager::NumInUse(GMPSharedMem::GMPMemoryClasses aClass) { ++ MutexAutoLock lock(mMutex); + return mData->mGmpAllocated[aClass] - GetGmpFreelist(aClass).Length(); + } + +diff --git a/dom/media/gmp/GMPSharedMemManager.h b/dom/media/gmp/GMPSharedMemManager.h +index 738a4d114c2b..fc0b34b71f71 100644 +--- a/dom/media/gmp/GMPSharedMemManager.h ++++ b/dom/media/gmp/GMPSharedMemManager.h +@@ -7,6 +7,7 @@ + #define GMPSharedMemManager_h_ + + #include "mozilla/ipc/Shmem.h" ++#include "mozilla/Mutex.h" + #include "nsTArray.h" + + namespace mozilla { +@@ -27,7 +28,7 @@ class GMPSharedMem { + // returned to the parent pool (which is not included). If more than + // this are needed, we presume the client has either crashed or hung + // (perhaps temporarily). +- static const uint32_t kGMPBufLimit = 20; ++ static const int32_t kGMPBufLimit = 32; + + GMPSharedMem() { + for (size_t i = 0; i < sizeof(mGmpAllocated) / sizeof(mGmpAllocated[0]); +@@ -49,7 +50,7 @@ class GMPSharedMem { + + class GMPSharedMemManager { + public: +- explicit GMPSharedMemManager(GMPSharedMem* aData) : mData(aData) {} ++ explicit GMPSharedMemManager(GMPSharedMem* aData) : mMutex("GMPSharedMemManager::mMutex"), mData(aData) {} + virtual ~GMPSharedMemManager() {} + + virtual bool MgrAllocShmem(GMPSharedMem::GMPMemoryClasses aClass, +@@ -61,7 +62,7 @@ class GMPSharedMemManager { + + // So we can know if data is "piling up" for the plugin - I.e. it's hung or + // crashed +- virtual uint32_t NumInUse(GMPSharedMem::GMPMemoryClasses aClass); ++ virtual int32_t NumInUse(GMPSharedMem::GMPMemoryClasses aClass); + + // These have to be implemented using the AllocShmem/etc provided by the + // IPDL-generated interfaces, so have the Parent/Child implement them. +@@ -75,6 +76,7 @@ class GMPSharedMemManager { + return mData->mGmpFreelist[aTypes]; + } + ++ Mutex mMutex; + GMPSharedMem* mData; + }; + +diff --git a/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.cpp b/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.cpp +index b5f39bc3b7d4..6a331c27a033 100644 +--- a/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.cpp ++++ b/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.cpp +@@ -7,12 +7,12 @@ + + namespace mozilla { + +-WebrtcVideoEncoder* GmpVideoCodec::CreateEncoder() { +- return new WebrtcVideoEncoderProxy(); ++WebrtcVideoEncoder* GmpVideoCodec::CreateEncoder(webrtc::VideoCodecType type) { ++ return new WebrtcVideoEncoderProxy(type); + } + +-WebrtcVideoDecoder* GmpVideoCodec::CreateDecoder() { +- return new WebrtcVideoDecoderProxy(); ++WebrtcVideoDecoder* GmpVideoCodec::CreateDecoder(webrtc::VideoCodecType type) { ++ return new WebrtcVideoDecoderProxy(type); + } + + } // namespace mozilla +diff --git a/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.h b/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.h +index 318a891d0678..369b9e698197 100644 +--- a/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.h ++++ b/media/webrtc/signaling/src/media-conduit/GmpVideoCodec.h +@@ -10,8 +10,8 @@ + namespace mozilla { + class GmpVideoCodec { + public: +- static WebrtcVideoEncoder* CreateEncoder(); +- static WebrtcVideoDecoder* CreateDecoder(); ++ static WebrtcVideoEncoder* CreateEncoder(webrtc::VideoCodecType type); ++ static WebrtcVideoDecoder* CreateDecoder(webrtc::VideoCodecType type); + }; + + } // namespace mozilla +diff --git a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp +index c8dddccec26b..b4d4b6221e7a 100644 +--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp ++++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp +@@ -1501,15 +1501,37 @@ webrtc::VideoDecoder* WebrtcVideoConduit::CreateDecoder( + return decoder; + } + +- switch (aType) { ++ // Attempt to create a GMP decoder. ++ { ++ nsCString tag; ++ ++ switch (aType) { + case webrtc::VideoCodecType::kVideoCodecH264: +- // get an external decoder +- decoder = GmpVideoCodec::CreateDecoder(); ++ tag = NS_LITERAL_CSTRING("h264"); ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP8: ++ tag = NS_LITERAL_CSTRING("vp8"); ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP9: ++ tag = NS_LITERAL_CSTRING("vp9"); ++ break; ++ default: ++ return nullptr; ++ } ++ ++ if (HaveGMPFor(NS_LITERAL_CSTRING(GMP_API_VIDEO_DECODER), { tag })) { ++ decoder = GmpVideoCodec::CreateDecoder(aType); + if (decoder) { + mRecvCodecPlugin = static_cast(decoder); + } +- break; ++ return decoder; ++ } ++ } + ++ switch (aType) { ++ case webrtc::VideoCodecType::kVideoCodecH264: ++ // No support for software h264. ++ return nullptr; + case webrtc::VideoCodecType::kVideoCodecVP8: + #ifdef MOZ_WEBRTC_MEDIACODEC + // attempt to get a decoder +@@ -1545,7 +1567,6 @@ webrtc::VideoDecoder* WebrtcVideoConduit::CreateDecoder( + MOZ_ASSERT(webrtc::VP9Decoder::IsSupported()); + decoder = webrtc::VP9Decoder::Create(); + break; +- + default: + break; + } +@@ -1559,15 +1580,33 @@ webrtc::VideoEncoder* WebrtcVideoConduit::CreateEncoder( + #ifdef MOZ_WEBRTC_MEDIACODEC + bool enabled = false; + #endif +- ++ nsCString tag; + switch (aType) { + case webrtc::VideoCodecType::kVideoCodecH264: +- // get an external encoder +- encoder = GmpVideoCodec::CreateEncoder(); +- if (encoder) { +- mSendCodecPlugin = static_cast(encoder); +- } ++ tag = NS_LITERAL_CSTRING("h264"); ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP8: ++ tag = NS_LITERAL_CSTRING("vp8"); ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP9: ++ tag = NS_LITERAL_CSTRING("vp9"); + break; ++ default: ++ return nullptr; ++ } ++ ++ if (HaveGMPFor(NS_LITERAL_CSTRING(GMP_API_VIDEO_ENCODER), { tag })) { ++ encoder = GmpVideoCodec::CreateEncoder(aType); ++ if (encoder) { ++ mSendCodecPlugin = static_cast(encoder); ++ } ++ return encoder; ++ } ++ ++ switch (aType) { ++ case webrtc::VideoCodecType::kVideoCodecH264: ++ // No support for software h264. ++ return nullptr; + + case webrtc::VideoCodecType::kVideoCodecVP8: + #ifdef MOZ_WEBRTC_MEDIACODEC +diff --git a/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp b/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp +index f7976636ec47..130662dd400a 100644 +--- a/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp ++++ b/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp +@@ -67,14 +67,15 @@ WebrtcGmpPCHandleSetter::~WebrtcGmpPCHandleSetter() { + std::string WebrtcGmpPCHandleSetter::sCurrentHandle = ""; + + // Encoder. +-WebrtcGmpVideoEncoder::WebrtcGmpVideoEncoder() ++WebrtcGmpVideoEncoder::WebrtcGmpVideoEncoder(webrtc::VideoCodecType aType) + : mGMP(nullptr), + mInitting(false), + mHost(nullptr), + mMaxPayloadSize(0), + mCallbackMutex("WebrtcGmpVideoEncoder encoded callback mutex"), + mCallback(nullptr), +- mCachedPluginId(0) { ++ mCachedPluginId(0), ++ mCodecType(aType) { + if (mPCHandle.empty()) { + mPCHandle = WebrtcGmpPCHandleSetter::GetCurrentHandle(); + } +@@ -153,15 +154,36 @@ int32_t WebrtcGmpVideoEncoder::InitEncode( + + memset(&mCodecSpecificInfo.codecSpecific, 0, + sizeof(mCodecSpecificInfo.codecSpecific)); +- mCodecSpecificInfo.codecType = webrtc::kVideoCodecH264; +- mCodecSpecificInfo.codecSpecific.H264.packetization_mode = +- aCodecSettings->H264().packetizationMode == 1 +- ? webrtc::H264PacketizationMode::NonInterleaved +- : webrtc::H264PacketizationMode::SingleNalUnit; ++ mCodecSpecificInfo.codecType = aCodecSettings->codecType; ++ ++ switch (aCodecSettings->codecType) { ++ case webrtc::VideoCodecType::kVideoCodecH264: { ++ codecParams.mCodecType = kGMPVideoCodecH264; + +- if (mCodecSpecificInfo.codecSpecific.H264.packetization_mode == +- webrtc::H264PacketizationMode::NonInterleaved) { +- mMaxPayloadSize = 0; // No limit, use FUAs ++ mCodecSpecificInfo.codecSpecific.H264.packetization_mode = ++ aCodecSettings->H264().packetizationMode == 1 ++ ? webrtc::H264PacketizationMode::NonInterleaved ++ : webrtc::H264PacketizationMode::SingleNalUnit; ++ ++ if (mCodecSpecificInfo.codecSpecific.H264.packetization_mode == ++ webrtc::H264PacketizationMode::NonInterleaved) { ++ mMaxPayloadSize = 0; // No limit, use FUAs ++ } ++ break; ++ } ++ case webrtc::VideoCodecType::kVideoCodecVP8: ++ codecParams.mCodecType = kGMPVideoCodecVP8; ++ mCodecSpecificInfo.codecSpecific.VP8.tl0PicIdx = -1; ++ mCodecSpecificInfo.codecSpecific.VP8.keyIdx = -1; ++ mCodecSpecificInfo.codecSpecific.VP8.temporalIdx = 1; ++ mCodecSpecificInfo.codecSpecific.VP8.simulcastIdx = 0; ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP9: ++ codecParams.mCodecType = kGMPVideoCodecVP9; ++ break; ++ default: ++ // The requested codec is not supported. ++ return WEBRTC_VIDEO_CODEC_ERROR; + } + + if (aCodecSettings->mode == webrtc::kScreensharing) { +@@ -193,7 +215,18 @@ void WebrtcGmpVideoEncoder::InitEncode_g( + const GMPVideoCodec& aCodecParams, int32_t aNumberOfCores, + uint32_t aMaxPayloadSize, const RefPtr& aInitDone) { + nsTArray tags; +- tags.AppendElement(NS_LITERAL_CSTRING("h264")); ++ switch (aCodecParams.mCodecType) { ++ case kGMPVideoCodecVP8: ++ tags.AppendElement(NS_LITERAL_CSTRING("vp8")); ++ break; ++ case kGMPVideoCodecVP9: ++ tags.AppendElement(NS_LITERAL_CSTRING("vp9")); ++ break; ++ case kGMPVideoCodecH264: ++ default: ++ tags.AppendElement(NS_LITERAL_CSTRING("h264")); ++ break; ++ } + UniquePtr callback( + new InitDoneCallback(aThis, aInitDone, aCodecParams, aMaxPayloadSize)); + aThis->mInitting = true; +@@ -312,7 +345,19 @@ void WebrtcGmpVideoEncoder::RegetEncoderForResolutionChange( + // re-init the plugin when the resolution changes + // XXX allow codec to indicate it doesn't need re-init! + nsTArray tags; +- tags.AppendElement(NS_LITERAL_CSTRING("h264")); ++ switch (mCodecType) { ++ case kGMPVideoCodecVP8: ++ tags.AppendElement(NS_LITERAL_CSTRING("vp8")); ++ break; ++ case kGMPVideoCodecVP9: ++ tags.AppendElement(NS_LITERAL_CSTRING("vp9")); ++ break; ++ case kGMPVideoCodecH264: ++ default: ++ tags.AppendElement(NS_LITERAL_CSTRING("h264")); ++ break; ++ } ++ + mInitting = true; + if (NS_WARN_IF(NS_FAILED(mMPS->GetGMPVideoEncoder( + nullptr, &tags, NS_LITERAL_CSTRING(""), Move(callback))))) { +@@ -383,7 +428,21 @@ void WebrtcGmpVideoEncoder::Encode_g( + // Bug XXXXXX: Set codecSpecific info + GMPCodecSpecificInfo info; + memset(&info, 0, sizeof(info)); +- info.mCodecType = kGMPVideoCodecH264; ++ switch (aEncoder->mCodecType) { ++ case webrtc::VideoCodecType::kVideoCodecH264: ++ info.mCodecType = kGMPVideoCodecH264; ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP8: ++ info.mCodecType = kGMPVideoCodecVP8; ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP9: ++ info.mCodecType = kGMPVideoCodecVP9; ++ break; ++ default: ++ info.mCodecType = kGMPVideoCodecInvalid; ++ break; ++ } ++ + nsTArray codecSpecificInfo; + codecSpecificInfo.AppendElements((uint8_t*)&info, + sizeof(GMPCodecSpecificInfo)); +@@ -494,127 +553,147 @@ void WebrtcGmpVideoEncoder::Encoded( + // XXX convert to FragmentationHeader format (array of offsets and sizes + // plus a buffer) in combination with H264 packetization changes in + // webrtc/trunk code +- uint8_t* buffer = aEncodedFrame->Buffer(); +- uint8_t* end = aEncodedFrame->Buffer() + aEncodedFrame->Size(); +- size_t size_bytes; +- switch (aEncodedFrame->BufferType()) { +- case GMP_BufferSingle: +- size_bytes = 0; +- break; +- case GMP_BufferLength8: +- size_bytes = 1; +- break; +- case GMP_BufferLength16: +- size_bytes = 2; +- break; +- case GMP_BufferLength24: +- size_bytes = 3; +- break; +- case GMP_BufferLength32: +- size_bytes = 4; +- break; +- default: +- // Really that it's not in the enum +- LOG(LogLevel::Error, ("GMP plugin returned incorrect type (%d)", +- aEncodedFrame->BufferType())); +- // XXX Bug 1041232 - need a better API for interfacing to the +- // plugin so we can kill it here +- return; +- } +- +- struct nal_entry { +- uint32_t offset; +- uint32_t size; +- }; +- AutoTArray nals; +- uint32_t size = 0; +- // make sure we don't read past the end of the buffer getting the size +- while (buffer + size_bytes < end) { ++ if (mCodecType == webrtc::kVideoCodecH264) { ++ uint8_t* buffer = aEncodedFrame->Buffer(); ++ uint8_t* end = aEncodedFrame->Buffer() + aEncodedFrame->Size(); ++ size_t size_bytes; + switch (aEncodedFrame->BufferType()) { + case GMP_BufferSingle: +- size = aEncodedFrame->Size(); ++ size_bytes = 0; + break; + case GMP_BufferLength8: +- size = *buffer++; ++ size_bytes = 1; + break; + case GMP_BufferLength16: +- // presumes we can do unaligned loads +- size = *(reinterpret_cast(buffer)); +- buffer += 2; ++ size_bytes = 2; + break; + case GMP_BufferLength24: +- // 24-bits is a pain, since byte-order issues make things painful +- // I'm going to define 24-bit as little-endian always; big-endian must +- // convert +- size = ((uint32_t)*buffer) | (((uint32_t) * (buffer + 1)) << 8) | +- (((uint32_t) * (buffer + 2)) << 16); +- buffer += 3; ++ size_bytes = 3; + break; + case GMP_BufferLength32: +- // presumes we can do unaligned loads +- size = *(reinterpret_cast(buffer)); +- buffer += 4; ++ size_bytes = 4; + break; + default: +- MOZ_CRASH("GMP_BufferType already handled in switch above"); +- } +- MOZ_ASSERT(size != 0 && +- buffer + size <= +- end); // in non-debug code, don't crash in this case +- if (size == 0 || buffer + size > end) { +- // XXX see above - should we kill the plugin for returning extra bytes? +- // Probably +- LOG(LogLevel::Error, ("GMP plugin returned badly formatted encoded " +- "data: buffer=%p, size=%d, end=%p", +- buffer, size, end)); +- return; ++ // Really that it's not in the enum ++ LOG(LogLevel::Error, ("GMP plugin returned incorrect type (%d)", ++ aEncodedFrame->BufferType())); ++ // XXX Bug 1041232 - need a better API for interfacing to the ++ // plugin so we can kill it here ++ return; + } +- // XXX optimize by making buffer an offset +- nal_entry nal = {((uint32_t)(buffer - aEncodedFrame->Buffer())), +- (uint32_t)size}; +- nals.AppendElement(nal); +- buffer += size; +- // on last one, buffer == end normally +- } +- if (buffer != end) { +- // At most 3 bytes can be left over, depending on buffertype +- LOGD(("GMP plugin returned %td extra bytes", end - buffer)); +- } + +- size_t num_nals = nals.Length(); +- if (num_nals > 0) { +- webrtc::RTPFragmentationHeader fragmentation; +- fragmentation.VerifyAndAllocateFragmentationHeader(num_nals); +- for (size_t i = 0; i < num_nals; i++) { +- fragmentation.fragmentationOffset[i] = nals[i].offset; +- fragmentation.fragmentationLength[i] = nals[i].size; ++ struct nal_entry { ++ uint32_t offset; ++ uint32_t size; ++ }; ++ AutoTArray nals; ++ uint32_t size = 0; ++ // make sure we don't read past the end of the buffer getting the size ++ while (buffer + size_bytes < end) { ++ switch (aEncodedFrame->BufferType()) { ++ case GMP_BufferSingle: ++ size = aEncodedFrame->Size(); ++ break; ++ case GMP_BufferLength8: ++ size = *buffer++; ++ break; ++ case GMP_BufferLength16: ++ // presumes we can do unaligned loads ++ size = *(reinterpret_cast(buffer)); ++ buffer += 2; ++ break; ++ case GMP_BufferLength24: ++ // 24-bits is a pain, since byte-order issues make things painful ++ // I'm going to define 24-bit as little-endian always; big-endian must ++ // convert ++ size = ((uint32_t)*buffer) | (((uint32_t) * (buffer + 1)) << 8) | ++ (((uint32_t) * (buffer + 2)) << 16); ++ buffer += 3; ++ break; ++ case GMP_BufferLength32: ++ // presumes we can do unaligned loads ++ size = *(reinterpret_cast(buffer)); ++ buffer += 4; ++ break; ++ default: ++ MOZ_CRASH("GMP_BufferType already handled in switch above"); ++ } ++ MOZ_ASSERT(size != 0 && ++ buffer + size <= ++ end); // in non-debug code, don't crash in this case ++ if (size == 0 || buffer + size > end) { ++ // XXX see above - should we kill the plugin for returning extra bytes? ++ // Probably ++ LOG(LogLevel::Error, ("GMP plugin returned badly formatted encoded " ++ "data: buffer=%p, size=%d, end=%p", ++ buffer, size, end)); ++ return; ++ } ++ // XXX optimize by making buffer an offset ++ nal_entry nal = {((uint32_t)(buffer - aEncodedFrame->Buffer())), ++ (uint32_t)size}; ++ nals.AppendElement(nal); ++ buffer += size; ++ // on last one, buffer == end normally ++ } ++ if (buffer != end) { ++ // At most 3 bytes can be left over, depending on buffertype ++ LOGD(("GMP plugin returned %td extra bytes", end - buffer)); + } + ++ size_t num_nals = nals.Length(); ++ if (num_nals > 0) { ++ webrtc::RTPFragmentationHeader fragmentation; ++ fragmentation.VerifyAndAllocateFragmentationHeader(num_nals); ++ for (size_t i = 0; i < num_nals; i++) { ++ fragmentation.fragmentationOffset[i] = nals[i].offset; ++ fragmentation.fragmentationLength[i] = nals[i].size; ++ } ++ ++ webrtc::EncodedImage unit(aEncodedFrame->Buffer(), size, size); ++ unit._frameType = ft; ++ unit._timeStamp = timestamp; ++ // Ensure we ignore this when calculating RTCP timestamps ++ unit.capture_time_ms_ = -1; ++ unit._completeFrame = true; ++ ++ // TODO: Currently the OpenH264 codec does not preserve any codec ++ // specific info passed into it and just returns default values. ++ // If this changes in the future, it would be nice to get rid of ++ // mCodecSpecificInfo. ++ mCallback->OnEncodedImage(unit, &mCodecSpecificInfo, &fragmentation); ++ } ++ } else { // VP8,VP9 ++ size_t size = aEncodedFrame->Size(); + webrtc::EncodedImage unit(aEncodedFrame->Buffer(), size, size); + unit._frameType = ft; + unit._timeStamp = timestamp; +- // Ensure we ignore this when calculating RTCP timestamps + unit.capture_time_ms_ = -1; + unit._completeFrame = true; ++ webrtc::CodecSpecificInfo info = mCodecSpecificInfo; ++ ++ mCodecSpecificInfo.codecSpecific.VP8.pictureId++; ++ ++ webrtc::RTPFragmentationHeader header; ++ header.VerifyAndAllocateFragmentationHeader(1); ++ header.fragmentationOffset[0] = 0; ++ header.fragmentationLength[0] = aEncodedFrame->Size(); + +- // TODO: Currently the OpenH264 codec does not preserve any codec +- // specific info passed into it and just returns default values. +- // If this changes in the future, it would be nice to get rid of +- // mCodecSpecificInfo. +- mCallback->OnEncodedImage(unit, &mCodecSpecificInfo, &fragmentation); ++ mCallback->OnEncodedImage(unit, &info, &header); + } + } + } + + // Decoder. +-WebrtcGmpVideoDecoder::WebrtcGmpVideoDecoder() ++WebrtcGmpVideoDecoder::WebrtcGmpVideoDecoder(webrtc::VideoCodecType aType) + : mGMP(nullptr), + mInitting(false), + mHost(nullptr), + mCallbackMutex("WebrtcGmpVideoDecoder decoded callback mutex"), + mCallback(nullptr), + mCachedPluginId(0), +- mDecoderStatus(GMPNoErr) { ++ mDecoderStatus(GMPNoErr), ++ mCodecType(aType) { + if (mPCHandle.empty()) { + mPCHandle = WebrtcGmpPCHandleSetter::GetCurrentHandle(); + } +@@ -653,7 +732,21 @@ int32_t WebrtcGmpVideoDecoder::InitDecode( + const webrtc::VideoCodec* aCodecSettings, int32_t aNumberOfCores, + const RefPtr& aInitDone) { + nsTArray tags; +- tags.AppendElement(NS_LITERAL_CSTRING("h264")); ++ switch (aCodecSettings->codecType) { ++ case webrtc::VideoCodecType::kVideoCodecVP8: ++ tags.AppendElement(NS_LITERAL_CSTRING("vp8")); ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP9: ++ tags.AppendElement(NS_LITERAL_CSTRING("vp9")); ++ break; ++ case webrtc::VideoCodecType::kVideoCodecH264: ++ default: ++ tags.AppendElement(NS_LITERAL_CSTRING("h264")); ++ break; ++ } ++ ++ memcpy(&aThis->mCodecSettings, aCodecSettings, sizeof(aThis->mCodecSettings)); ++ + UniquePtr callback( + new InitDoneCallback(aThis, aInitDone)); + aThis->mInitting = true; +@@ -695,11 +788,29 @@ int32_t WebrtcGmpVideoDecoder::GmpInitDone(GMPVideoDecoderProxy* aGMP, + GMPVideoCodec codec; + memset(&codec, 0, sizeof(codec)); + codec.mGMPApiVersion = 33; ++ codec.mWidth = mCodecSettings.width; ++ codec.mHeight = mCodecSettings.height; + +- // XXX this is currently a hack +- // GMPVideoCodecUnion codecSpecific; +- // memset(&codecSpecific, 0, sizeof(codecSpecific)); + nsTArray codecSpecific; ++ ++ switch (mCodecSettings.codecType) { ++ case webrtc::VideoCodecType::kVideoCodecH264: { ++ // Currently gmp-droid does not support codec-specific data ++ // TODO: Check OpenH264 requirements ++ codec.mCodecType = kGMPVideoCodecH264; ++ break; ++ } ++ case webrtc::VideoCodecType::kVideoCodecVP8: ++ codec.mCodecType = kGMPVideoCodecVP8; ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP9: ++ codec.mCodecType = kGMPVideoCodecVP9; ++ break; ++ default: ++ // The requested codec is not supported. ++ return WEBRTC_VIDEO_CODEC_ERROR; ++ } ++ + nsresult rv = mGMP->InitDecode(codec, codecSpecific, this, 1); + if (NS_FAILED(rv)) { + *aErrorOut = "GMP Decode: InitDecode failed"; +@@ -820,13 +931,10 @@ void WebrtcGmpVideoDecoder::Decode_g(const RefPtr& aThis, + return; + } + +- // XXX At this point, we only will get mode1 data (a single length and a +- // buffer) Session_info.cc/etc code needs to change to support mode 0. +- *(reinterpret_cast(frame->Buffer())) = frame->Size(); +- +- // XXX It'd be wonderful not to have to memcpy the encoded data! +- memcpy(frame->Buffer() + 4, aDecodeData->mImage._buffer + 4, +- frame->Size() - 4); ++ // Copy encoded data to shared memory. In case of h264: gmp-droid takes ++ // encoded frame as is, so do not perform conversion of NAL units here. ++ // TODO: OpenH264 may require some extra code here, need to check. ++ memcpy(frame->Buffer(), aDecodeData->mImage._buffer, frame->Size()); + + frame->SetEncodedWidth(aDecodeData->mImage._encodedWidth); + frame->SetEncodedHeight(aDecodeData->mImage._encodedHeight); +@@ -845,11 +953,26 @@ void WebrtcGmpVideoDecoder::Decode_g(const RefPtr& aThis, + return; + } + ++ frame->SetFrameType(ft); ++ + // Bug XXXXXX: Set codecSpecific info + GMPCodecSpecificInfo info; + memset(&info, 0, sizeof(info)); +- info.mCodecType = kGMPVideoCodecH264; +- info.mCodecSpecific.mH264.mSimulcastIdx = 0; ++ switch (aThis->mCodecType) { ++ case webrtc::VideoCodecType::kVideoCodecH264: ++ info.mCodecType = kGMPVideoCodecH264; ++ info.mCodecSpecific.mH264.mSimulcastIdx = 0; ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP8: ++ info.mCodecType = kGMPVideoCodecVP8; ++ break; ++ case webrtc::VideoCodecType::kVideoCodecVP9: ++ info.mCodecType = kGMPVideoCodecVP9; ++ break; ++ default: ++ info.mCodecType = kGMPVideoCodecInvalid; ++ break; ++ } + nsTArray codecSpecificInfo; + codecSpecificInfo.AppendElements((uint8_t*)&info, + sizeof(GMPCodecSpecificInfo)); +diff --git a/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.h b/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.h +index 920f7460d1a3..af18ad43816b 100644 +--- a/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.h ++++ b/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.h +@@ -143,7 +143,7 @@ class GMPDecodeData { + + class WebrtcGmpVideoEncoder : public GMPVideoEncoderCallbackProxy { + public: +- WebrtcGmpVideoEncoder(); ++ WebrtcGmpVideoEncoder(webrtc::VideoCodecType aType); + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcGmpVideoEncoder); + + // Implement VideoEncoder interface, sort of. +@@ -274,6 +274,7 @@ class WebrtcGmpVideoEncoder : public GMPVideoEncoderCallbackProxy { + webrtc::EncodedImageCallback* mCallback; + uint64_t mCachedPluginId; + std::string mPCHandle; ++ webrtc::VideoCodecType mCodecType; + }; + + // Basically a strong ref to a WebrtcGmpVideoEncoder, that also translates +@@ -283,7 +284,7 @@ class WebrtcGmpVideoEncoder : public GMPVideoEncoderCallbackProxy { + // the "real" encoder. + class WebrtcVideoEncoderProxy : public WebrtcVideoEncoder { + public: +- WebrtcVideoEncoderProxy() : mEncoderImpl(new WebrtcGmpVideoEncoder) {} ++ WebrtcVideoEncoderProxy(webrtc::VideoCodecType aType) : mEncoderImpl(new WebrtcGmpVideoEncoder(aType)) {} + + virtual ~WebrtcVideoEncoderProxy() { + RegisterEncodeCompleteCallback(nullptr); +@@ -324,7 +325,7 @@ class WebrtcVideoEncoderProxy : public WebrtcVideoEncoder { + + class WebrtcGmpVideoDecoder : public GMPVideoDecoderCallbackProxy { + public: +- WebrtcGmpVideoDecoder(); ++ WebrtcGmpVideoDecoder(webrtc::VideoCodecType aType); + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcGmpVideoDecoder); + + // Implement VideoEncoder interface, sort of. +@@ -413,6 +414,8 @@ class WebrtcGmpVideoDecoder : public GMPVideoDecoderCallbackProxy { + Atomic mCachedPluginId; + Atomic mDecoderStatus; + std::string mPCHandle; ++ webrtc::VideoCodecType mCodecType; ++ webrtc::VideoCodec mCodecSettings; + }; + + // Basically a strong ref to a WebrtcGmpVideoDecoder, that also translates +@@ -422,7 +425,7 @@ class WebrtcGmpVideoDecoder : public GMPVideoDecoderCallbackProxy { + // the "real" encoder. + class WebrtcVideoDecoderProxy : public WebrtcVideoDecoder { + public: +- WebrtcVideoDecoderProxy() : mDecoderImpl(new WebrtcGmpVideoDecoder) {} ++ WebrtcVideoDecoderProxy(webrtc::VideoCodecType aType) : mDecoderImpl(new WebrtcGmpVideoDecoder(aType)) {} + + virtual ~WebrtcVideoDecoderProxy() { + RegisterDecodeCompleteCallback(nullptr); +diff --git a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp +index 17ef17dd26d0..83cafc177361 100644 +--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp ++++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp +@@ -782,7 +782,7 @@ class ConfigureCodec { + mUseAudioFec(false), + mRedUlpfecEnabled(false), + mDtmfEnabled(false) { +- mSoftwareH264Enabled = PeerConnectionCtx::GetInstance()->gmpHasH264(); ++ mHardwareH264Supported = PeerConnectionCtx::GetInstance()->gmpHasH264(); + + mH264Enabled = mHardwareH264Supported || mSoftwareH264Enabled; + +@@ -851,12 +851,6 @@ class ConfigureCodec { + // Might disable it, but we set up other params anyway + videoCodec.mEnabled = mH264Enabled; + +- if (videoCodec.mPacketizationMode == 0 && !mSoftwareH264Enabled) { +- // We're assuming packetization mode 0 is unsupported by +- // hardware. +- videoCodec.mEnabled = false; +- } +- + if (mHardwareH264Supported) { + videoCodec.mStronglyPreferred = true; + } +-- +2.17.1 + diff --git a/rpm/0071-sailfishos-webrtc-Implement-video-capture-module.-JB.patch b/rpm/0071-sailfishos-webrtc-Implement-video-capture-module.-JB.patch new file mode 100644 index 0000000000000..cb7dc3c0bb769 --- /dev/null +++ b/rpm/0071-sailfishos-webrtc-Implement-video-capture-module.-JB.patch @@ -0,0 +1,779 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: Denis Grigorev +Date: Thu, 4 Feb 2021 21:14:28 +0300 +Subject: [PATCH] [sailfishos][webrtc] Implement video capture module. JB#53982 + +--- + dom/media/systemservices/VideoFrameUtils.cpp | 92 ++++--- + dom/media/systemservices/moz.build | 1 + + .../webrtc/modules/video_capture/BUILD.gn | 26 +- + .../video_capture/sfos/device_info_sfos.cc | 139 ++++++++++ + .../video_capture/sfos/device_info_sfos.h | 49 ++++ + .../video_capture/sfos/video_capture_sfos.cc | 244 ++++++++++++++++++ + .../video_capture/sfos/video_capture_sfos.h | 55 ++++ + .../video_capture/video_capture_impl.cc | 12 + + .../video_capture/video_capture_impl.h | 2 + + .../video_capture_internal_impl_gn/moz.build | 13 +- + 10 files changed, 579 insertions(+), 54 deletions(-) + create mode 100644 media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.cc + create mode 100644 media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.h + create mode 100644 media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.cc + create mode 100644 media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.h + +diff --git a/dom/media/systemservices/VideoFrameUtils.cpp b/dom/media/systemservices/VideoFrameUtils.cpp +index caeda9289dbd..7f713bb2e623 100644 +--- a/dom/media/systemservices/VideoFrameUtils.cpp ++++ b/dom/media/systemservices/VideoFrameUtils.cpp +@@ -7,6 +7,7 @@ + #include "VideoFrameUtils.h" + #include "webrtc/video_frame.h" + #include "mozilla/ShmemPool.h" ++#include "libyuv/rotate.h" + + namespace mozilla { + +@@ -29,59 +30,68 @@ void VideoFrameUtils::InitFrameBufferProperties( + aDestProps.ntpTimeMs() = aVideoFrame.ntp_time_ms(); + aDestProps.renderTimeMs() = aVideoFrame.render_time_ms(); + ++ // Rotation will be applied during CopyVideoFrameBuffers(). + aDestProps.rotation() = aVideoFrame.rotation(); +- + auto height = aVideoFrame.video_frame_buffer()->height(); +- aDestProps.yAllocatedSize() = +- height * aVideoFrame.video_frame_buffer()->StrideY(); +- aDestProps.uAllocatedSize() = +- ((height + 1) / 2) * aVideoFrame.video_frame_buffer()->StrideU(); +- aDestProps.vAllocatedSize() = +- ((height + 1) / 2) * aVideoFrame.video_frame_buffer()->StrideV(); +- +- aDestProps.width() = aVideoFrame.video_frame_buffer()->width(); ++ auto width = aVideoFrame.video_frame_buffer()->width(); ++ if (aVideoFrame.rotation() == webrtc::kVideoRotation_90 || ++ aVideoFrame.rotation() == webrtc::kVideoRotation_270) { ++ std::swap(width, height); ++ } + aDestProps.height() = height; ++ aDestProps.width() = width; + +- aDestProps.yStride() = aVideoFrame.video_frame_buffer()->StrideY(); +- aDestProps.uStride() = aVideoFrame.video_frame_buffer()->StrideU(); +- aDestProps.vStride() = aVideoFrame.video_frame_buffer()->StrideV(); ++ aDestProps.yStride() = width; ++ aDestProps.uStride() = (width + 1) / 2; ++ aDestProps.vStride() = (width + 1) / 2; ++ ++ aDestProps.yAllocatedSize() = height * aDestProps.yStride(); ++ aDestProps.uAllocatedSize() = ((height + 1) / 2) * aDestProps.uStride(); ++ aDestProps.vAllocatedSize() = ((height + 1) / 2) * aDestProps.vStride(); + } + ++// Performs copying to a shared memory or a temporary buffer. ++// Apply rotation here to avoid extra copying. + void VideoFrameUtils::CopyVideoFrameBuffers(uint8_t* aDestBuffer, + const size_t aDestBufferSize, + const webrtc::VideoFrame& aFrame) { +- size_t aggregateSize = TotalRequiredBufferSize(aFrame); +- +- MOZ_ASSERT(aDestBufferSize >= aggregateSize); ++ const rtc::scoped_refptr src = aFrame.video_frame_buffer(); ++ libyuv::RotationMode rotationMode; ++ int width = src->width(); ++ int height = src->height(); + +- // If planes are ordered YUV and contiguous then do a single copy +- if ((aFrame.video_frame_buffer()->DataY() != nullptr) && +- // Check that the three planes are ordered +- (aFrame.video_frame_buffer()->DataY() < +- aFrame.video_frame_buffer()->DataU()) && +- (aFrame.video_frame_buffer()->DataU() < +- aFrame.video_frame_buffer()->DataV()) && +- // Check that the last plane ends at firstPlane[totalsize] +- (&aFrame.video_frame_buffer()->DataY()[aggregateSize] == +- &aFrame.video_frame_buffer() +- ->DataV()[((aFrame.video_frame_buffer()->height() + 1) / 2) * +- aFrame.video_frame_buffer()->StrideV()])) { +- memcpy(aDestBuffer, aFrame.video_frame_buffer()->DataY(), aggregateSize); +- return; ++ switch (aFrame.rotation()) { ++ case webrtc::kVideoRotation_90: ++ rotationMode = libyuv::kRotate90; ++ std::swap(width, height); ++ break; ++ case webrtc::kVideoRotation_270: ++ rotationMode = libyuv::kRotate270; ++ std::swap(width, height); ++ break; ++ case webrtc::kVideoRotation_180: ++ rotationMode = libyuv::kRotate180; ++ break; ++ case webrtc::kVideoRotation_0: ++ default: ++ rotationMode = libyuv::kRotate0; ++ break; + } + +- // Copy each plane +- size_t offset = 0; +- size_t size; +- auto height = aFrame.video_frame_buffer()->height(); +- size = height * aFrame.video_frame_buffer()->StrideY(); +- memcpy(&aDestBuffer[offset], aFrame.video_frame_buffer()->DataY(), size); +- offset += size; +- size = ((height + 1) / 2) * aFrame.video_frame_buffer()->StrideU(); +- memcpy(&aDestBuffer[offset], aFrame.video_frame_buffer()->DataU(), size); +- offset += size; +- size = ((height + 1) / 2) * aFrame.video_frame_buffer()->StrideV(); +- memcpy(&aDestBuffer[offset], aFrame.video_frame_buffer()->DataV(), size); ++ int strideY = width; ++ int strideUV = (width + 1) / 2; ++ off_t offsetY = 0; ++ off_t offsetU = height * strideY; ++ off_t offsetV = offsetU + ((height + 1) / 2) * strideUV; ++ ++ libyuv::I420Rotate(src->DataY(), src->StrideY(), ++ src->DataU(), src->StrideU(), ++ src->DataV(), src->StrideV(), ++ &aDestBuffer[offsetY], strideY, ++ &aDestBuffer[offsetU], strideUV, ++ &aDestBuffer[offsetV], strideUV, ++ src->width(), src->height(), ++ rotationMode); + } + + void VideoFrameUtils::CopyVideoFrameBuffers( +diff --git a/dom/media/systemservices/moz.build b/dom/media/systemservices/moz.build +index cafe81bf84d0..0fbe06b497ba 100644 +--- a/dom/media/systemservices/moz.build ++++ b/dom/media/systemservices/moz.build +@@ -20,6 +20,7 @@ if CONFIG['MOZ_WEBRTC']: + 'VideoFrameUtils.cpp' + ] + LOCAL_INCLUDES += [ ++ '/media/libyuv/libyuv/include/', + '/media/webrtc/signaling', + '/media/webrtc/trunk', + ] +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/BUILD.gn b/media/webrtc/trunk/webrtc/modules/video_capture/BUILD.gn +index e4984238bac3..a2656ca22525 100644 +--- a/media/webrtc/trunk/webrtc/modules/video_capture/BUILD.gn ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/BUILD.gn +@@ -98,13 +98,24 @@ if (!build_with_chromium) { + + if (is_linux || is_bsd) { + sources = [ +- "linux/device_info_linux.cc", +- "linux/device_info_linux.h", +- "linux/video_capture_linux.cc", +- "linux/video_capture_linux.h", ++ "sfos/device_info_sfos.cc", ++ "sfos/device_info_sfos.h", ++ "sfos/video_capture_sfos.cc", ++ "sfos/video_capture_sfos.h", + ] ++ + deps += [ "../..:webrtc_common" ] ++ ++ if (rtc_build_libyuv) { ++ deps += [ "$rtc_libyuv_dir" ] ++ public_deps = [ ++ "$rtc_libyuv_dir", ++ ] ++ } else { ++ include_dirs += [ "$rtc_libyuv_dir/include" ] ++ } + } ++ + if (is_win) { + sources = [ + "windows/device_info_ds.cc", +@@ -211,13 +222,6 @@ if (!build_with_chromium) { + "-lm", + ] + } +- if (is_linux) { +- ldflags += [ +- "-lrt", +- "-lXext", +- "-lX11", +- ] +- } + + deps = [ + ":video_capture_internal_impl", +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.cc b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.cc +new file mode 100644 +index 000000000000..dcf6f1302ee6 +--- /dev/null ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.cc +@@ -0,0 +1,139 @@ ++/* This Source Code Form is subject to the terms of the Mozilla Public ++ * License, v. 2.0. If a copy of the MPL was not distributed with this file, ++ * You can obtain one at http://mozilla.org/MPL/2.0/. */ ++ ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#include "mozilla/Preferences.h" ++#include "webrtc/system_wrappers/include/logging.h" ++ ++#include "webrtc/modules/video_capture/sfos/device_info_sfos.h" ++ ++#define EXPECTED_CAPTURE_DELAY_PREF "media.getusermedia.camera.expected_capture_delay" ++ ++namespace webrtc ++{ ++namespace videocapturemodule ++{ ++VideoCaptureModule::DeviceInfo* ++VideoCaptureImpl::CreateDeviceInfo() ++{ ++ // The caller doesn't check for return value, so I can't Init() here ++ return new videocapturemodule::DeviceInfoSFOS(); ++} ++ ++DeviceInfoSFOS::DeviceInfoSFOS() ++ : DeviceInfoImpl() ++{ ++ Init(); ++} ++ ++int32_t DeviceInfoSFOS::Init() ++{ ++ cameraManager = amber_camera_manager(); ++ // Initialize parent class member ++ _lastUsedDeviceName = (char *)malloc(1); ++ _lastUsedDeviceName[0] = 0; ++ return 0; ++} ++ ++DeviceInfoSFOS::~DeviceInfoSFOS() ++{ ++} ++ ++uint32_t DeviceInfoSFOS::NumberOfDevices() ++{ ++ LOG_F(LS_VERBOSE); ++ ++ if (cameraManager) { ++ cameraList.clear(); ++ for (int i = 0; i < cameraManager->getNumberOfCameras(); i++) { ++ amber::media::CameraInfo info; ++ if (cameraManager->getCameraInfo(i, info)) { ++ cameraList.push_back(info); ++ } ++ } ++ // Put front cameras at the top of the list, they are most often used during video chat. ++ std::sort(cameraList.begin(), cameraList.end(), ++ [](const amber::media::CameraInfo& c1, const amber::media::CameraInfo& c2) { ++ bool c1front = c1.facing == amber::media::AMBER_CAMERA_FACING_FRONT; ++ bool c2front = c2.facing == amber::media::AMBER_CAMERA_FACING_FRONT; ++ return c1front > c2front; ++ }); ++ return cameraList.size(); ++ } ++ return 0; ++} ++ ++int32_t DeviceInfoSFOS::GetDeviceName( ++ uint32_t deviceNumber, ++ char* deviceNameUTF8, ++ uint32_t deviceNameLength, ++ char* deviceUniqueIdUTF8, ++ uint32_t deviceUniqueIdUTF8Length, ++ char* productUniqueIdUTF8, ++ uint32_t productUniqueIdUTF8Length, ++ pid_t* /*pid*/) ++{ ++ if (deviceNumber < cameraList.size()) { ++ amber::media::CameraInfo info = cameraList.at(deviceNumber); ++ strncpy(deviceNameUTF8, info.name.c_str(), deviceNameLength); ++ strncpy(deviceUniqueIdUTF8, info.id.c_str(), deviceUniqueIdUTF8Length); ++ strncpy(productUniqueIdUTF8, info.provider.c_str(), productUniqueIdUTF8Length); ++ return 0; ++ } ++ ++ return -1; ++} ++ ++int32_t DeviceInfoSFOS::CreateCapabilityMap(const char* deviceUniqueIdUTF8) ++{ ++ const int32_t deviceUniqueIdUTF8Length = ++ (int32_t) strlen((char*) deviceUniqueIdUTF8); ++ if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength) { ++ LOG_F(LS_ERROR) << "Device name too long"; ++ return -1; ++ } ++ ++ FillCapabilities(deviceUniqueIdUTF8); ++ ++ // Store the new used device name. The parent class needs this for some reason ++ _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length; ++ _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName, ++ _lastUsedDeviceNameLength + 1); ++ memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength + 1); ++ ++ LOG_F(LS_INFO) << "Capability map for device " << deviceUniqueIdUTF8 ++ << " size " << _captureCapabilities.size(); ++ ++ return _captureCapabilities.size(); ++} ++ ++void DeviceInfoSFOS::FillCapabilities(const char *devName) ++{ ++ std::vector caps; ++ unsigned captureDelay = mozilla::Preferences::GetUint(EXPECTED_CAPTURE_DELAY_PREF, 500); ++ ++ _captureCapabilities.clear(); ++ if (cameraManager && cameraManager->queryCapabilities(devName, caps)) { ++ for (auto cap : caps) { ++ VideoCaptureCapability vcaps; ++ vcaps.width = cap.width; ++ vcaps.height = cap.height; ++ vcaps.maxFPS = cap.fps; ++ vcaps.expectedCaptureDelay = captureDelay; ++ vcaps.rawType = kVideoI420; ++ vcaps.codecType = kVideoCodecI420; ++ _captureCapabilities.push_back(vcaps); ++ } ++ } ++} ++ ++} // namespace videocapturemodule ++} // namespace webrtc +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.h b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.h +new file mode 100644 +index 000000000000..2d8e584493b6 +--- /dev/null ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.h +@@ -0,0 +1,49 @@ ++/* This Source Code Form is subject to the terms of the Mozilla Public ++ * License, v. 2.0. If a copy of the MPL was not distributed with this file, ++ * You can obtain one at http://mozilla.org/MPL/2.0/. */ ++ ++#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_SFOS_DEVICE_INFO_SFOS_H_ ++#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_SFOS_DEVICE_INFO_SFOS_H_ ++ ++#include "webrtc/modules/video_capture/device_info_impl.h" ++#include "webrtc/modules/video_capture/video_capture_impl.h" ++#include "webrtc/base/platform_thread.h" ++ ++#include "ambercamera.h" ++ ++namespace webrtc ++{ ++namespace videocapturemodule ++{ ++class DeviceInfoSFOS: public DeviceInfoImpl ++{ ++public: ++ DeviceInfoSFOS(); ++ virtual ~DeviceInfoSFOS(); ++ virtual uint32_t NumberOfDevices(); ++ virtual int32_t GetDeviceName( ++ uint32_t deviceNumber, ++ char* deviceNameUTF8, ++ uint32_t deviceNameLength, ++ char* deviceUniqueIdUTF8, ++ uint32_t deviceUniqueIdUTF8Length, ++ char* productUniqueIdUTF8=0, ++ uint32_t productUniqueIdUTF8Length=0, ++ pid_t* pid=0); ++ virtual int32_t CreateCapabilityMap (const char* deviceUniqueIdUTF8); ++ virtual int32_t DisplayCaptureSettingsDialogBox( ++ const char* /*deviceUniqueIdUTF8*/, ++ const char* /*dialogTitleUTF8*/, ++ void* /*parentWindow*/, ++ uint32_t /*positionX*/, ++ uint32_t /*positionY*/) { return -1; } ++ void FillCapabilities(const char* devName); ++ int32_t Init(); ++ ++private: ++ amber::media::CameraManager *cameraManager; ++ std::vector cameraList; ++}; ++} // namespace videocapturemodule ++} // namespace webrtc ++#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_SFOS_DEVICE_INFO_SFOS_H_ +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.cc b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.cc +new file mode 100644 +index 000000000000..6f58637fa439 +--- /dev/null ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.cc +@@ -0,0 +1,244 @@ ++/* This Source Code Form is subject to the terms of the Mozilla Public ++ * License, v. 2.0. If a copy of the MPL was not distributed with this file, ++ * You can obtain one at http://mozilla.org/MPL/2.0/. */ ++ ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#include ++ ++#include "webrtc/system_wrappers/include/logging.h" ++#include "webrtc/system_wrappers/include/clock.h" ++#include "webrtc/api/video/i420_buffer.h" ++#include "webrtc/base/refcount.h" ++#include "webrtc/base/scoped_ref_ptr.h" ++ ++#include "libyuv/convert.h" ++ ++#include "webrtc/modules/video_capture/sfos/video_capture_sfos.h" ++ ++namespace webrtc { ++namespace videocapturemodule { ++ ++class AmberVideoBuffer : public VideoFrameBuffer { ++public: ++ static rtc::scoped_refptr Create(std::shared_ptr frame, VideoCaptureCapability& cap) { ++ return new rtc::RefCountedObject(frame, cap); ++ } ++ ++ AmberVideoBuffer(std::shared_ptr frame, VideoCaptureCapability& cap) ++ : ycbcr(frame), ++ _width(cap.width), ++ _height(cap.height) {} ++ ++ int width () const { return _width; }; ++ int height() const { return _height; }; ++ ++ const uint8_t* DataY() const { return (const uint8_t*)ycbcr->y; }; ++ const uint8_t* DataU() const { return (const uint8_t*)ycbcr->cb; }; ++ const uint8_t* DataV() const { return (const uint8_t*)ycbcr->cr; }; ++ ++ int StrideY() const { return ycbcr->yStride; }; ++ int StrideU() const { return ycbcr->cStride; }; ++ int StrideV() const { return ycbcr->cStride; }; ++ ++ void *native_handle() const { return (void *)&ycbcr; }; ++ ++ rtc::scoped_refptr NativeToI420Buffer() { ++ rtc::scoped_refptr buffer = I420Buffer::Create( ++ _width, _height); ++ ++ if (ycbcr->chromaStep == 1) { ++ libyuv::I420Copy( ++ (const uint8*)ycbcr->y, ycbcr->yStride, ++ (const uint8*)ycbcr->cb, ycbcr->cStride, ++ (const uint8*)ycbcr->cr, ycbcr->cStride, ++ buffer->MutableDataY(), buffer->StrideY(), ++ buffer->MutableDataU(), buffer->StrideU(), ++ buffer->MutableDataV(), buffer->StrideV(), ++ _width, _height); ++ } else if (ycbcr->chromaStep == 2) { ++ libyuv::NV12ToI420( ++ (const uint8*)ycbcr->y, ycbcr->yStride, ++ (const uint8*)ycbcr->cb, ycbcr->cStride, ++ buffer->MutableDataY(), buffer->StrideY(), ++ buffer->MutableDataU(), buffer->StrideU(), ++ buffer->MutableDataV(), buffer->StrideV(), ++ _width, _height); ++ } else { ++ // Unsupported format ++ I420Buffer::SetBlack(buffer.get()); ++ } ++ return buffer; ++ } ++ ++ bool isPlanar() { ++ return ycbcr->chromaStep == 1; ++ } ++ ++private: ++ std::shared_ptr ycbcr; ++ int _width; ++ int _height; ++}; ++ ++rtc::scoped_refptr VideoCaptureImpl::Create( ++ const char* deviceUniqueId) { ++ rtc::scoped_refptr implementation( ++ new rtc::RefCountedObject()); ++ ++ if (implementation->Init(deviceUniqueId) != 0) ++ return nullptr; ++ ++ return implementation; ++} ++ ++VideoCaptureModuleSFOS::VideoCaptureModuleSFOS() ++ : VideoCaptureImpl() ++{ ++ mozilla::hal::ScreenConfiguration screenConfig; ++ ++ mozilla::hal::RegisterScreenConfigurationObserver (this); ++ mozilla::hal::GetCurrentScreenConfiguration (&screenConfig); ++ _screenRotationAngle = ScreenOrientationToAngle (screenConfig.orientation()); ++} ++ ++int32_t VideoCaptureModuleSFOS::Init(const char* deviceUniqueIdUTF8) ++{ ++ /* Fill current device name for the parent class */ ++ int len = strlen(deviceUniqueIdUTF8); ++ _deviceUniqueId = new (std::nothrow) char[len + 1]; ++ if (_deviceUniqueId) { ++ memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); ++ } else { ++ return -1; ++ } ++ ++ if (amber_camera_manager()->openCamera(deviceUniqueIdUTF8, _camera)) { ++ amber::media::CameraInfo info; ++ if (_camera->getInfo(info)) { ++ _rearFacingCamera = info.facing == amber::media::AMBER_CAMERA_FACING_REAR; ++ _sensorMountAngle = info.mountAngle; ++ _camera->setListener(this); ++ return 0; ++ } ++ _camera.reset(); ++ } ++ return -1; ++} ++ ++VideoCaptureModuleSFOS::~VideoCaptureModuleSFOS() ++{ ++ mozilla::hal::UnregisterScreenConfigurationObserver(this); ++} ++ ++int32_t VideoCaptureModuleSFOS::StartCapture( ++ const VideoCaptureCapability& capability) ++{ ++ _startNtpTimeMs = webrtc::Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); ++ UpdateCaptureRotation(); ++ ++ _requestedCapability = capability; ++ _requestedCapability.codecType = kVideoCodecUnknown; ++ _requestedCapability.rawType = kVideoI420; ++ ++ amber::media::CameraCapability cap; ++ cap.width = capability.width; ++ cap.height = capability.height; ++ cap.fps = capability.maxFPS; ++ return _camera->startCapture(cap) ? 0 : -1; ++} ++ ++int32_t VideoCaptureModuleSFOS::StopCapture() ++{ ++ return _camera->stopCapture() ? 0 : -1; ++} ++ ++bool VideoCaptureModuleSFOS::CaptureStarted() ++{ ++ return _camera->captureStarted(); ++} ++ ++void VideoCaptureModuleSFOS::Notify(const mozilla::hal::ScreenConfiguration& aConfiguration) ++{ ++ LOG_F(LS_INFO) << "VideoCaptureModuleSFOS::Notify ScreenConfiguration.orientation: " << aConfiguration.orientation(); ++ _screenRotationAngle = ScreenOrientationToAngle(aConfiguration.orientation()); ++ UpdateCaptureRotation(); ++} ++ ++int VideoCaptureModuleSFOS::ScreenOrientationToAngle(mozilla::dom::ScreenOrientationInternal orientation) ++{ ++ switch (orientation) { ++ // The default orientation is portrait for Sailfish OS. ++ case mozilla::dom::eScreenOrientation_Default: ++ case mozilla::dom::eScreenOrientation_PortraitPrimary: ++ return 0; ++ case mozilla::dom::eScreenOrientation_LandscapePrimary: ++ return 90; ++ case mozilla::dom::eScreenOrientation_PortraitSecondary: ++ return 180; ++ case mozilla::dom::eScreenOrientation_LandscapeSecondary: ++ return 270; ++ default: ++ return 0; ++ } ++} ++ ++void VideoCaptureModuleSFOS::onCameraFrame(std::shared_ptr frame) ++{ ++ LOG_F(LS_VERBOSE) << "frame ts=" << (frame->timestampMs + _startNtpTimeMs); ++ ++ auto buffer = AmberVideoBuffer::Create(frame, _requestedCapability); ++ if (buffer->isPlanar()) { ++ // Do not copy planar frame, use as is ++ IncomingVideoBuffer(buffer, frame->timestampMs + _startNtpTimeMs); ++ } else { ++ // Semi-planar frame is incompatible with current implementation, so convert it to I420 ++ IncomingVideoBuffer(buffer->NativeToI420Buffer(), frame->timestampMs + _startNtpTimeMs); ++ } ++} ++ ++void VideoCaptureModuleSFOS::onCameraError(std::string errorDescription) ++{ ++ LOG_F(LS_ERROR) << "Camera error " << errorDescription << "\n"; ++} ++ ++void VideoCaptureModuleSFOS::UpdateCaptureRotation() ++{ ++ VideoRotation rotation; ++ int rotateAngle = 360 + _sensorMountAngle + (_rearFacingCamera ? -_screenRotationAngle : _screenRotationAngle); ++ ++ switch (rotateAngle % 360) { ++ case 90: ++ rotation = kVideoRotation_90; ++ break; ++ case 180: ++ rotation = kVideoRotation_180; ++ break; ++ case 270: ++ rotation = kVideoRotation_270; ++ break; ++ default: ++ rotation = kVideoRotation_0; ++ break; ++ } ++ ++ LOG_F(LS_INFO) << "Sensor mount angle=" << _sensorMountAngle ++ << " Screen rotation=" << _screenRotationAngle ++ << " Capture rotation=" << rotateAngle; ++ VideoCaptureImpl::SetCaptureRotation (rotation); ++} ++ ++int32_t VideoCaptureModuleSFOS::CaptureSettings(VideoCaptureCapability& settings) ++{ ++ settings = _requestedCapability; ++ return 0; ++} ++} // namespace videocapturemodule ++} // namespace webrtc +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.h b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.h +new file mode 100644 +index 000000000000..0b9c5911ff50 +--- /dev/null ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.h +@@ -0,0 +1,55 @@ ++/* This Source Code Form is subject to the terms of the Mozilla Public ++ * License, v. 2.0. If a copy of the MPL was not distributed with this file, ++ * You can obtain one at http://mozilla.org/MPL/2.0/. */ ++ ++#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_SFOS_VIDEO_CAPTURE_SFOS_H_ ++#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_SFOS_VIDEO_CAPTURE_SFOS_H_ ++ ++#include ++ ++#include "mozilla/Hal.h" ++#include "mozilla/dom/ScreenOrientation.h" ++ ++#include "webrtc/common_types.h" ++#include "webrtc/modules/video_capture/video_capture_impl.h" ++ ++#include "ambercamera.h" ++ ++namespace webrtc ++{ ++class CriticalSectionWrapper; ++namespace videocapturemodule ++{ ++class VideoCaptureModuleSFOS ++ : public VideoCaptureImpl, ++ public amber::media::CameraListener, ++ public mozilla::hal::ScreenConfigurationObserver ++{ ++public: ++ VideoCaptureModuleSFOS(); ++ virtual ~VideoCaptureModuleSFOS(); ++ virtual int32_t Init(const char* deviceUniqueId); ++ virtual int32_t StartCapture(const VideoCaptureCapability& capability); ++ virtual int32_t StopCapture(); ++ virtual bool CaptureStarted(); ++ virtual int32_t CaptureSettings(VideoCaptureCapability& settings); ++ ++ void onCameraFrame(std::shared_ptr frame); ++ void onCameraError(std::string errorDescription); ++ ++ virtual void Notify(const mozilla::hal::ScreenConfiguration& aConfiguration) override; ++ ++private: ++ int ScreenOrientationToAngle(mozilla::dom::ScreenOrientationInternal orientation); ++ void UpdateCaptureRotation(); ++ ++ int _screenRotationAngle = 0; ++ int _sensorMountAngle = 0; ++ bool _rearFacingCamera = false; ++ uint64_t _startNtpTimeMs = 0; ++ std::shared_ptr _camera; ++}; ++} // namespace videocapturemodule ++} // namespace webrtc ++ ++#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_SFOS_VIDEO_CAPTURE_SFOS_H_ +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc +index ad8ab3b39f35..fb9f97870955 100644 +--- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.cc +@@ -286,5 +286,17 @@ uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) + + return nrOfFrames; + } ++ ++int32_t VideoCaptureImpl::IncomingVideoBuffer(const rtc::scoped_refptr& buffer, uint64_t captureTime) ++{ ++ CriticalSectionScoped cs(&_apiCs); ++ VideoFrame captureFrame( ++ buffer, 0, rtc::TimeMillis(), ++ !apply_rotation_ ? _rotateFrame : kVideoRotation_0); ++ captureFrame.set_ntp_time_ms(captureTime); ++ captureFrame.set_rotation(_rotateFrame); ++ DeliverCapturedFrame(captureFrame); ++ return 0; ++} + } // namespace videocapturemodule + } // namespace webrtc +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h +index c5d62b4b54d7..cf30037bcfe5 100644 +--- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_impl.h +@@ -79,6 +79,8 @@ public: + const VideoCaptureCapability& frameInfo, + int64_t captureTime = 0) override; + ++ int32_t IncomingVideoBuffer(const rtc::scoped_refptr& buffer, uint64_t captureTime); ++ + // Platform dependent + int32_t StartCapture(const VideoCaptureCapability& capability) override + { +diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build +index 32d4302d2550..2ba787efd459 100644 +--- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build ++++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build +@@ -128,11 +128,20 @@ if CONFIG["OS_TARGET"] == "Linux": + "rt" + ] + ++ LOCAL_INCLUDES += [ "/media/libyuv/libyuv/include/" ] ++ OS_LIBS += CONFIG["LIBAMBERCAMERA_LIBS"] ++ + UNIFIED_SOURCES += [ +- "/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc", +- "/media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc" ++ "/media/webrtc/trunk/webrtc/modules/video_capture/sfos/device_info_sfos.cc", ++ "/media/webrtc/trunk/webrtc/modules/video_capture/sfos/video_capture_sfos.cc" + ] + ++ CXXFLAGS += CONFIG["LIBAMBERCAMERA_CFLAGS"] ++ ++ # Make Hal.h buildable ++ CXXFLAGS += CONFIG["NSPR_CFLAGS"] ++ CXXFLAGS += CONFIG["MOZ_PIXMAN_CFLAGS"] ++ + if CONFIG["OS_TARGET"] == "NetBSD": + + DEFINES["USE_X11"] = "1" +-- +2.17.1 + diff --git a/rpm/xulrunner-qt5.spec b/rpm/xulrunner-qt5.spec index e3cec87c81c58..820fbabc22efe 100644 --- a/rpm/xulrunner-qt5.spec +++ b/rpm/xulrunner-qt5.spec @@ -120,8 +120,9 @@ Patch65: 0065-Fix-flipped-FBO-textures-when-rendering-to-an-offscr.patch Patch66: 0066-sailfishos-webrtc-Adapt-build-configuration-for-Sail.patch Patch67: 0067-sailfishos-webrtc-Regenerate-moz.build-files.-JB-537.patch Patch68: 0068-sailfishos-webrtc-Disable-desktop-sharing-feature-on.patch -Patch69: 0069-sailfishos-webrtc-Disable-enumeration-of-video-devic.patch -Patch70: 0070-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch +Patch69: 0069-Do-not-flip-scissor-rects-when-rendering-to-an-offsc.patch +Patch70: 0070-sailfishos-webrtc-Enable-GMP-for-encoding.-JB-53982.patch +Patch71: 0071-sailfishos-webrtc-Implement-video-capture-module.-JB.patch BuildRequires: rust BuildRequires: rust-std-static @@ -150,6 +151,7 @@ BuildRequires: pkgconfig(libswscale) BuildRequires: pkgconfig(Qt5Positioning) BuildRequires: pkgconfig(contentaction5) BuildRequires: pkgconfig(dconf) +BuildRequires: pkgconfig(ambercamera) BuildRequires: qt5-qttools BuildRequires: qt5-default BuildRequires: autoconf213