diff --git a/dom/media/MediaData.cpp b/dom/media/MediaData.cpp index ef393760fff7..95d6f3860207 100644 --- a/dom/media/MediaData.cpp +++ b/dom/media/MediaData.cpp @@ -323,14 +323,18 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo, #if XP_WIN // We disable this code path on Windows version earlier of Windows 8 due to // intermittent crashes with old drivers. See bug 1405110. - if (IsWin8OrLater() && !XRE_IsParentProcess() && - aAllocator && aAllocator->SupportsD3D11()) { + // D3D11YCbCrImage can only handle YCbCr images using 3 non-interleaved planes + // non-zero mSkip value indicates that one of the plane would be interleaved. + if (IsWin8OrLater() && !XRE_IsParentProcess() && aAllocator && + aAllocator->SupportsD3D11() && aBuffer.mPlanes[0].mSkip == 0 && + aBuffer.mPlanes[1].mSkip == 0 && aBuffer.mPlanes[2].mSkip == 0) { RefPtr d3d11Image = new layers::D3D11YCbCrImage(); PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture); if (d3d11Image->SetData(layers::ImageBridgeChild::GetSingleton() - ? layers::ImageBridgeChild::GetSingleton().get() - : aAllocator, - aContainer, data)) { + ? layers::ImageBridgeChild::GetSingleton().get() + : aAllocator, + aContainer, + data)) { v->mImage = d3d11Image; return v.forget(); } diff --git a/dom/media/platforms/wmf/MFTDecoder.cpp b/dom/media/platforms/wmf/MFTDecoder.cpp index 3ed976deff36..dd7938e76dc7 100644 --- a/dom/media/platforms/wmf/MFTDecoder.cpp +++ b/dom/media/platforms/wmf/MFTDecoder.cpp @@ -88,7 +88,6 @@ MFTDecoder::SetMediaTypes(IMFMediaType* aInputType, std::function&& aCallback) { MOZ_ASSERT(mscom::IsCurrentThreadMTA()); - mOutputType = aOutputType; // Set the input type to the one the caller gave us... HRESULT hr = mDecoder->SetInputType(0, aInputType, 0); @@ -132,10 +131,7 @@ MFTDecoder::FindDecoderOutputType(bool aMatchAllAttributes) MOZ_ASSERT(mscom::IsCurrentThreadMTA()); MOZ_ASSERT(mOutputType, "SetDecoderTypes must have been called once"); - GUID currentSubtype = {0}; - HRESULT hr = mOutputType->GetGUID(MF_MT_SUBTYPE, ¤tSubtype); - NS_ENSURE_TRUE(SUCCEEDED(hr), hr); - return FindDecoderOutputTypeWithSubtype(currentSubtype, aMatchAllAttributes); + return FindDecoderOutputTypeWithSubtype(mOutputSubType, aMatchAllAttributes); } HRESULT @@ -191,6 +187,7 @@ MFTDecoder::SetDecoderOutputType( MFT_OUTPUT_STREAM_PROVIDES_SAMPLES); mOutputType = outputType; + mOutputSubType = outSubtype; return S_OK; } diff --git a/dom/media/platforms/wmf/MFTDecoder.h b/dom/media/platforms/wmf/MFTDecoder.h index 926eadc36ed1..fc2ae73cae26 100644 --- a/dom/media/platforms/wmf/MFTDecoder.h +++ b/dom/media/platforms/wmf/MFTDecoder.h @@ -49,6 +49,7 @@ public: // Retrieves the media type being output. This may not be valid until // the first sample is decoded. HRESULT GetOutputMediaType(RefPtr& aMediaType); + const GUID& GetOutputMediaSubType() const { return mOutputSubType; } // Submits data into the MFT for processing. // @@ -105,6 +106,7 @@ private: RefPtr mDecoder; RefPtr mOutputType; + GUID mOutputSubType; // True if the IMFTransform allocates the samples that it returns. bool mMFTProvidesOutputSamples = false; diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp index ed83fd8dee0d..06f74cd86473 100644 --- a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp +++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp @@ -899,8 +899,18 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample, stride = mVideoStride; } - // YV12, planar format: [YYYY....][VVVV....][UUUU....] + const GUID& subType = mDecoder->GetOutputMediaSubType(); + MOZ_DIAGNOSTIC_ASSERT(subType == MFVideoFormat_YV12 || + subType == MFVideoFormat_P010 || + subType == MFVideoFormat_P016); + const gfx::ColorDepth colorDepth = subType == MFVideoFormat_YV12 + ? gfx::ColorDepth::COLOR_8 + : gfx::ColorDepth::COLOR_16; + + // YV12, planar format (3 planes): [YYYY....][VVVV....][UUUU....] // i.e., Y, then V, then U. + // P010, P016 planar format (2 planes) [YYYY....][UVUV...] + // See https://docs.microsoft.com/en-us/windows/desktop/medfound/10-bit-and-16-bit-yuv-video-formats VideoData::YCbCrBuffer b; uint32_t videoWidth = mImageSize.width; @@ -922,24 +932,43 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample, uint32_t halfHeight = (videoHeight + 1) / 2; uint32_t halfWidth = (videoWidth + 1) / 2; - // U plane (Cb) - b.mPlanes[1].mData = data + y_size + v_size; - b.mPlanes[1].mStride = halfStride; - b.mPlanes[1].mHeight = halfHeight; - b.mPlanes[1].mWidth = halfWidth; - b.mPlanes[1].mOffset = 0; - b.mPlanes[1].mSkip = 0; + if (subType == MFVideoFormat_YV12) { + // U plane (Cb) + b.mPlanes[1].mData = data + y_size + v_size; + b.mPlanes[1].mStride = halfStride; + b.mPlanes[1].mHeight = halfHeight; + b.mPlanes[1].mWidth = halfWidth; + b.mPlanes[1].mOffset = 0; + b.mPlanes[1].mSkip = 0; - // V plane (Cr) - b.mPlanes[2].mData = data + y_size; - b.mPlanes[2].mStride = halfStride; - b.mPlanes[2].mHeight = halfHeight; - b.mPlanes[2].mWidth = halfWidth; - b.mPlanes[2].mOffset = 0; - b.mPlanes[2].mSkip = 0; + // V plane (Cr) + b.mPlanes[2].mData = data + y_size; + b.mPlanes[2].mStride = halfStride; + b.mPlanes[2].mHeight = halfHeight; + b.mPlanes[2].mWidth = halfWidth; + b.mPlanes[2].mOffset = 0; + b.mPlanes[2].mSkip = 0; + } else { + // U plane (Cb) + b.mPlanes[1].mData = data + y_size; + b.mPlanes[1].mStride = stride; + b.mPlanes[1].mHeight = halfHeight; + b.mPlanes[1].mWidth = halfWidth; + b.mPlanes[1].mOffset = 0; + b.mPlanes[1].mSkip = 1; + + // V plane (Cr) + b.mPlanes[2].mData = data + y_size + sizeof(short); + b.mPlanes[2].mStride = stride; + b.mPlanes[2].mHeight = halfHeight; + b.mPlanes[2].mWidth = halfWidth; + b.mPlanes[2].mOffset = 0; + b.mPlanes[2].mSkip = 1; + } // YuvColorSpace b.mYUVColorSpace = mYUVColorSpace; + b.mColorDepth = colorDepth; TimeUnit pts = GetSampleTime(aSample); NS_ENSURE_TRUE(pts.IsValid(), E_FAIL); @@ -948,7 +977,8 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample, gfx::IntRect pictureRegion = mVideoInfo.ScaledImageRect(videoWidth, videoHeight); - if (!mKnowsCompositor || !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) { + if (colorDepth != gfx::ColorDepth::COLOR_8 || !mKnowsCompositor || + !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) { RefPtr v = VideoData::CreateAndCopyData(mVideoInfo, mImageContainer, @@ -1065,15 +1095,14 @@ WMFVideoMFTManager::Output(int64_t aStreamOffset, // Attempt to find an appropriate OutputType, trying in order: // if HW accelerated: NV12, P010, P016 - // if SW: YV12 + // if SW: YV12, P010, P016 if (FAILED((hr = (mDecoder->FindDecoderOutputTypeWithSubtype( mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12, false)))) && - (!mUseHwAccel || - (FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( - MFVideoFormat_P010, false))) && - FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( - MFVideoFormat_P016, false)))))) { + FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( + MFVideoFormat_P010, false))) && + FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( + MFVideoFormat_P016, false)))) { LOG("No suitable output format found"); return hr; } diff --git a/gfx/layers/client/TextureClient.cpp b/gfx/layers/client/TextureClient.cpp index 5b16b64b672c..7b29a829c1ad 100644 --- a/gfx/layers/client/TextureClient.cpp +++ b/gfx/layers/client/TextureClient.cpp @@ -1895,13 +1895,6 @@ MappedYCbCrChannelData::CopyInto(MappedYCbCrChannelData& aDst) if (bytesPerPixel == 1) { copyData(aDst.data, aDst, data, *this); } else if (bytesPerPixel == 2) { - if (skip != 0) { - // The skip value definition doesn't specify if it's in bytes, or in - // "pixels". We will assume the later. There are currently no decoders - // returning HDR content with a skip value different than zero anyway. - NS_WARNING("skip value non zero for HDR content, please verify code " - "(see bug 1421187)"); - } copyData(reinterpret_cast(aDst.data), aDst, reinterpret_cast(data),