Bug 1841195 - Use VideoColorSpaceInternal in VideoFrame for greater flexibility r=media-playback-reviewers,padenot

This patch replaces `VideoColorSpaceInit` with `VideoColorSpaceInternal` in
`VideoFrame`, enabling more flexible utilities and conversion functions. By
adopting `VideoColorSpaceInternal`, we can more easily log and map color space
details to underlying graphics properties, simplifying future color-related
enhancements and ensuring consistent handling of color space information
throughout the pipeline.

Differential Revision: https://phabricator.services.mozilla.com/D246533
This commit is contained in:
Chun-Min Chang
2025-04-28 16:04:04 +00:00
parent a88340e8c9
commit 3ca6a203c6
6 changed files with 111 additions and 109 deletions

View File

@@ -76,7 +76,7 @@ void ImageTrack::OnDecodeFramesSuccess(
aResult.mFrames.Length());
for (const auto& f : aResult.mFrames) {
VideoColorSpaceInit colorSpace;
VideoColorSpaceInternal colorSpace;
gfx::IntSize size = f.mSurface->GetSize();
gfx::IntRect rect(gfx::IntPoint(0, 0), size);

View File

@@ -559,10 +559,10 @@ static RefPtr<VideoFrame> CreateVideoFrame(
}
}
return MakeRefPtr<VideoFrame>(
aGlobalObject, aData->mImage, format, aData->mImage->GetSize(),
aData->mImage->GetPictureRect(), displaySize, Some(aDuration), aTimestamp,
aColorSpace.ToColorSpaceInit());
return MakeRefPtr<VideoFrame>(aGlobalObject, aData->mImage, format,
aData->mImage->GetSize(),
aData->mImage->GetPictureRect(), displaySize,
Some(aDuration), aTimestamp, aColorSpace);
}
/* static */

View File

@@ -14,7 +14,6 @@
#include "ImageConversion.h"
#include "MediaResult.h"
#include "VideoColorSpace.h"
#include "WebCodecsUtils.h"
#include "js/StructuredClone.h"
#include "mozilla/Maybe.h"
#include "mozilla/ResultVariant.h"
@@ -287,8 +286,9 @@ static Result<RefPtr<layers::Image>, MediaResult> CreateRGBAImageFromBuffer(
}
static Result<RefPtr<layers::Image>, MediaResult> CreateYUVImageFromBuffer(
const VideoFrame::Format& aFormat, const VideoColorSpaceInit& aColorSpace,
const gfx::IntSize& aSize, const Span<uint8_t>& aBuffer) {
const VideoFrame::Format& aFormat,
const VideoColorSpaceInternal& aColorSpace, const gfx::IntSize& aSize,
const Span<uint8_t>& aBuffer) {
if (aFormat.PixelFormat() == VideoPixelFormat::I420 ||
aFormat.PixelFormat() == VideoPixelFormat::I420A) {
UniquePtr<I420BufferReader> reader;
@@ -327,17 +327,17 @@ static Result<RefPtr<layers::Image>, MediaResult> CreateYUVImageFromBuffer(
data.mCbCrStride = reader->mStrideU;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
// Color settings.
if (!aColorSpace.mFullRange.IsNull()) {
data.mColorRange = ToColorRange(aColorSpace.mFullRange.Value());
if (aColorSpace.mFullRange) {
data.mColorRange = ToColorRange(aColorSpace.mFullRange.value());
}
MOZ_RELEASE_ASSERT(!aColorSpace.mMatrix.IsNull());
data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.Value());
if (!aColorSpace.mTransfer.IsNull()) {
MOZ_RELEASE_ASSERT(aColorSpace.mMatrix);
data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.value());
if (aColorSpace.mTransfer) {
data.mTransferFunction =
ToTransferFunction(aColorSpace.mTransfer.Value());
ToTransferFunction(aColorSpace.mTransfer.value());
}
if (!aColorSpace.mPrimaries.IsNull()) {
data.mColorPrimaries = ToPrimaries(aColorSpace.mPrimaries.Value());
if (aColorSpace.mPrimaries) {
data.mColorPrimaries = ToPrimaries(aColorSpace.mPrimaries.value());
}
RefPtr<layers::PlanarYCbCrImage> image =
@@ -374,17 +374,17 @@ static Result<RefPtr<layers::Image>, MediaResult> CreateYUVImageFromBuffer(
data.mCbCrStride = reader.mStrideUV;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
// Color settings.
if (!aColorSpace.mFullRange.IsNull()) {
data.mColorRange = ToColorRange(aColorSpace.mFullRange.Value());
if (aColorSpace.mFullRange) {
data.mColorRange = ToColorRange(aColorSpace.mFullRange.value());
}
MOZ_RELEASE_ASSERT(!aColorSpace.mMatrix.IsNull());
data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.Value());
if (!aColorSpace.mTransfer.IsNull()) {
MOZ_RELEASE_ASSERT(aColorSpace.mMatrix);
data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.value());
if (aColorSpace.mTransfer) {
data.mTransferFunction =
ToTransferFunction(aColorSpace.mTransfer.Value());
ToTransferFunction(aColorSpace.mTransfer.value());
}
if (!aColorSpace.mPrimaries.IsNull()) {
data.mColorPrimaries = ToPrimaries(aColorSpace.mPrimaries.Value());
if (aColorSpace.mPrimaries) {
data.mColorPrimaries = ToPrimaries(aColorSpace.mPrimaries.value());
}
RefPtr<layers::NVImage> image = new layers::NVImage();
@@ -403,8 +403,9 @@ static Result<RefPtr<layers::Image>, MediaResult> CreateYUVImageFromBuffer(
}
static Result<RefPtr<layers::Image>, MediaResult> CreateImageFromBuffer(
const VideoFrame::Format& aFormat, const VideoColorSpaceInit& aColorSpace,
const gfx::IntSize& aSize, const Span<uint8_t>& aBuffer) {
const VideoFrame::Format& aFormat,
const VideoColorSpaceInternal& aColorSpace, const gfx::IntSize& aSize,
const Span<uint8_t>& aBuffer) {
switch (aFormat.PixelFormat()) {
case VideoPixelFormat::I420:
case VideoPixelFormat::I420A:
@@ -914,16 +915,16 @@ static bool IsYUVFormat(const VideoPixelFormat& aFormat) {
}
// https://w3c.github.io/webcodecs/#videoframe-pick-color-space
static VideoColorSpaceInit PickColorSpace(
static VideoColorSpaceInternal PickColorSpace(
const VideoColorSpaceInit* aInitColorSpace,
const VideoPixelFormat& aFormat) {
VideoColorSpaceInit colorSpace;
VideoColorSpaceInternal colorSpace;
if (aInitColorSpace) {
colorSpace = *aInitColorSpace;
colorSpace = VideoColorSpaceInternal(*aInitColorSpace);
// By spec, we MAY replace null members of aInitColorSpace with guessed
// values so we can always use these in CreateYUVImageFromBuffer.
if (IsYUVFormat(aFormat) && colorSpace.mMatrix.IsNull()) {
colorSpace.mMatrix.SetValue(VideoMatrixCoefficients::Bt709);
if (IsYUVFormat(aFormat) && colorSpace.mMatrix.isNothing()) {
colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Bt709);
}
return colorSpace;
}
@@ -949,20 +950,20 @@ static VideoColorSpaceInit PickColorSpace(
case VideoPixelFormat::I444AP12:
case VideoPixelFormat::NV12:
// https://w3c.github.io/webcodecs/#rec709-color-space
colorSpace.mFullRange.SetValue(false);
colorSpace.mMatrix.SetValue(VideoMatrixCoefficients::Bt709);
colorSpace.mPrimaries.SetValue(VideoColorPrimaries::Bt709);
colorSpace.mTransfer.SetValue(VideoTransferCharacteristics::Bt709);
colorSpace.mFullRange.emplace(false);
colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Bt709);
colorSpace.mPrimaries.emplace(VideoColorPrimaries::Bt709);
colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Bt709);
break;
case VideoPixelFormat::RGBA:
case VideoPixelFormat::RGBX:
case VideoPixelFormat::BGRA:
case VideoPixelFormat::BGRX:
// https://w3c.github.io/webcodecs/#srgb-color-space
colorSpace.mFullRange.SetValue(true);
colorSpace.mMatrix.SetValue(VideoMatrixCoefficients::Rgb);
colorSpace.mPrimaries.SetValue(VideoColorPrimaries::Bt709);
colorSpace.mTransfer.SetValue(VideoTransferCharacteristics::Iec61966_2_1);
colorSpace.mFullRange.emplace(true);
colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Rgb);
colorSpace.mPrimaries.emplace(VideoColorPrimaries::Bt709);
colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Iec61966_2_1);
break;
}
@@ -1041,7 +1042,7 @@ static Result<RefPtr<VideoFrame>, MediaResult> CreateVideoFrameFromBuffer(
Maybe<uint64_t> duration = OptionalToMaybe(aInit.mDuration);
VideoColorSpaceInit colorSpace =
VideoColorSpaceInternal colorSpace =
PickColorSpace(OptionalToPointer(aInit.mColorSpace), aInit.mFormat);
RefPtr<layers::Image> data;
@@ -1159,7 +1160,7 @@ InitializeFrameWithResourceAndSize(nsIGlobalObject* aGlobal,
Maybe<uint64_t> duration = OptionalToMaybe(aInit.mDuration);
VideoColorSpaceInit colorSpace{};
VideoColorSpaceInternal colorSpace;
if (IsYUVFormat(
SurfaceFormatToVideoPixelFormat(surface->GetFormat()).ref())) {
colorSpace = FallbackColorSpaceForVideoContent();
@@ -1292,25 +1293,25 @@ static Result<RefPtr<layers::Image>, MediaResult> ConvertToRGBAImage(
surfaceFormat, data);
}
static VideoColorSpaceInit ConvertToColorSpace(
static VideoColorSpaceInternal ConvertToColorSpace(
const PredefinedColorSpace& aColorSpace) {
VideoColorSpaceInit colorSpace;
VideoColorSpaceInternal colorSpace;
switch (aColorSpace) {
case PredefinedColorSpace::Srgb:
// https://w3c.github.io/webcodecs/#srgb-color-space
colorSpace.mFullRange.SetValue(true);
colorSpace.mMatrix.SetValue(VideoMatrixCoefficients::Rgb);
colorSpace.mPrimaries.SetValue(VideoColorPrimaries::Bt709);
colorSpace.mTransfer.SetValue(VideoTransferCharacteristics::Iec61966_2_1);
colorSpace.mFullRange.emplace(true);
colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Rgb);
colorSpace.mPrimaries.emplace(VideoColorPrimaries::Bt709);
colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Iec61966_2_1);
break;
case PredefinedColorSpace::Display_p3:
colorSpace.mFullRange.SetValue(true);
colorSpace.mMatrix.SetValue(VideoMatrixCoefficients::Rgb);
colorSpace.mPrimaries.SetValue(VideoColorPrimaries::Smpte432);
colorSpace.mTransfer.SetValue(VideoTransferCharacteristics::Iec61966_2_1);
colorSpace.mFullRange.emplace(true);
colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Rgb);
colorSpace.mPrimaries.emplace(VideoColorPrimaries::Smpte432);
colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Iec61966_2_1);
break;
}
MOZ_ASSERT(!colorSpace.mFullRange.IsNull());
MOZ_ASSERT(colorSpace.mFullRange.isSome());
return colorSpace;
}
@@ -1323,7 +1324,7 @@ VideoFrameData::VideoFrameData(layers::Image* aImage,
gfx::IntRect aVisibleRect,
gfx::IntSize aDisplaySize,
Maybe<uint64_t> aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace)
const VideoColorSpaceInternal& aColorSpace)
: mImage(aImage),
mFormat(aFormat),
mVisibleRect(aVisibleRect),
@@ -1346,7 +1347,7 @@ VideoFrame::VideoFrame(nsIGlobalObject* aParent,
gfx::IntSize aCodedSize, gfx::IntRect aVisibleRect,
gfx::IntSize aDisplaySize,
const Maybe<uint64_t>& aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace)
const VideoColorSpaceInternal& aColorSpace)
: mParent(aParent),
mCodedSize(aCodedSize),
mVisibleRect(aVisibleRect),
@@ -1872,7 +1873,8 @@ int64_t VideoFrame::Timestamp() const {
already_AddRefed<VideoColorSpace> VideoFrame::ColorSpace() const {
AssertIsOnOwningThread();
return MakeAndAddRef<VideoColorSpace>(mParent, mColorSpace);
return MakeAndAddRef<VideoColorSpace>(mParent,
mColorSpace.ToColorSpaceInit());
}
// https://w3c.github.io/webcodecs/#dom-videoframe-allocationsize
@@ -1953,7 +1955,7 @@ already_AddRefed<Promise> VideoFrame::CopyTo(
: PredefinedColorSpace::Srgb;
if (mResource->mFormat->PixelFormat() != aOptions.mFormat.Value() ||
!IsSameColorSpace(ConvertToColorSpace(colorSpace), mColorSpace)) {
mColorSpace != ConvertToColorSpace(colorSpace)) {
AutoJSAPI jsapi;
if (!jsapi.Init(mParent.get())) {
p->MaybeRejectWithTypeError("Failed to get JS context");
@@ -2047,7 +2049,7 @@ void VideoFrame::Close() {
mCodedSize = gfx::IntSize();
mVisibleRect = gfx::IntRect();
mDisplaySize = gfx::IntSize();
mColorSpace = VideoColorSpaceInit();
mColorSpace = VideoColorSpaceInternal();
StopAutoClose();
}
@@ -2079,7 +2081,7 @@ nsCString VideoFrame::ToString() const {
format ? dom::GetEnumString(*format).get() : "unknown pixel format",
mCodedSize.width, mCodedSize.height, mVisibleRect.width,
mVisibleRect.height, mDisplaySize.width, mDisplaySize.height,
ColorSpaceInitToString(mColorSpace).get());
mColorSpace.ToString().get());
if (mDuration) {
rv.AppendPrintf(" dur: %" PRId64, mDuration.value());

View File

@@ -15,6 +15,7 @@
#include "mozilla/dom/BindingDeclarations.h"
#include "mozilla/dom/TypedArray.h"
#include "mozilla/dom/VideoColorSpaceBinding.h"
#include "mozilla/dom/WebCodecsUtils.h"
#include "mozilla/gfx/Point.h"
#include "mozilla/gfx/Rect.h"
#include "mozilla/media/MediaUtils.h"
@@ -58,7 +59,7 @@ struct VideoFrameData {
VideoFrameData(layers::Image* aImage, const Maybe<VideoPixelFormat>& aFormat,
gfx::IntRect aVisibleRect, gfx::IntSize aDisplaySize,
Maybe<uint64_t> aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace);
const VideoColorSpaceInternal& aColorSpace);
VideoFrameData(const VideoFrameData& aData) = default;
const RefPtr<layers::Image> mImage;
@@ -67,7 +68,7 @@ struct VideoFrameData {
const gfx::IntSize mDisplaySize;
const Maybe<uint64_t> mDuration;
const int64_t mTimestamp;
const VideoColorSpaceInit mColorSpace;
const VideoColorSpaceInternal mColorSpace;
};
struct VideoFrameSerializedData : VideoFrameData {
@@ -89,7 +90,7 @@ class VideoFrame final : public nsISupports,
const Maybe<VideoPixelFormat>& aFormat, gfx::IntSize aCodedSize,
gfx::IntRect aVisibleRect, gfx::IntSize aDisplaySize,
const Maybe<uint64_t>& aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace);
const VideoColorSpaceInternal& aColorSpace);
VideoFrame(nsIGlobalObject* aParent, const VideoFrameSerializedData& aData);
VideoFrame(const VideoFrame& aOther);
@@ -260,7 +261,7 @@ class VideoFrame final : public nsISupports,
Maybe<uint64_t> mDuration;
int64_t mTimestamp;
VideoColorSpaceInit mColorSpace;
VideoColorSpaceInternal mColorSpace;
// The following are used to help monitoring mResource release.
RefPtr<media::ShutdownWatcher> mShutdownWatcher = nullptr;

View File

@@ -101,12 +101,6 @@ Maybe<nsString> ParseCodecString(const nsAString& aCodec) {
return Some(codecs[0]);
}
bool IsSameColorSpace(const VideoColorSpaceInit& aLhs,
const VideoColorSpaceInit& aRhs) {
return aLhs.mFullRange == aRhs.mFullRange && aLhs.mMatrix == aRhs.mMatrix &&
aLhs.mPrimaries == aRhs.mPrimaries && aLhs.mTransfer == aRhs.mTransfer;
}
/*
* The below are helpers to operate ArrayBuffer or ArrayBufferView.
*/
@@ -214,6 +208,21 @@ VideoColorSpaceInit VideoColorSpaceInternal::ToColorSpaceInit() const {
return init;
}
nsCString VideoColorSpaceInternal::ToString() const {
nsCString rv("VideoColorSpace");
rv.AppendPrintf(" range: %s",
mFullRange ? mFullRange.value() ? "true" : "false" : "none");
rv.AppendPrintf(" matrix: %s",
mMatrix ? GetEnumString(mMatrix.value()).get() : "none");
rv.AppendPrintf(
" primaries: %s",
mPrimaries ? GetEnumString(mPrimaries.value()).get() : "none");
rv.AppendPrintf(" transfer: %s",
mTransfer ? GetEnumString(mTransfer.value()).get() : "none");
return rv;
}
gfx::ColorRange ToColorRange(bool aIsFullRange) {
return aIsFullRange ? gfx::ColorRange::FULL : gfx::ColorRange::LIMITED;
}
@@ -527,60 +536,30 @@ WebCodecsConfigurationChangeList::ToPEMChangeList() const {
return rv.forget();
}
nsCString ColorSpaceInitToString(
const dom::VideoColorSpaceInit& aColorSpaceInit) {
nsCString rv("VideoColorSpace");
if (!aColorSpaceInit.mFullRange.IsNull()) {
rv.AppendPrintf(" range: %s",
aColorSpaceInit.mFullRange.Value() ? "true" : "false");
}
if (!aColorSpaceInit.mMatrix.IsNull()) {
rv.AppendPrintf(" matrix: %s",
GetEnumString(aColorSpaceInit.mMatrix.Value()).get());
}
if (!aColorSpaceInit.mTransfer.IsNull()) {
rv.AppendPrintf(" transfer: %s",
GetEnumString(aColorSpaceInit.mTransfer.Value()).get());
}
if (!aColorSpaceInit.mPrimaries.IsNull()) {
rv.AppendPrintf(" primaries: %s",
GetEnumString(aColorSpaceInit.mPrimaries.Value()).get());
}
return rv;
}
RefPtr<TaskQueue> GetWebCodecsEncoderTaskQueue() {
return TaskQueue::Create(
GetMediaThreadPool(MediaThreadType::PLATFORM_ENCODER),
"WebCodecs encoding", false);
}
VideoColorSpaceInit FallbackColorSpaceForVideoContent() {
VideoColorSpaceInternal FallbackColorSpaceForVideoContent() {
// If we're unable to determine the color space, but we think this is video
// content (e.g. because it's in YUV or NV12 or something like that,
// consider it's in BT709).
// This is step 3 of
// https://w3c.github.io/webcodecs/#videoframe-pick-color-space
VideoColorSpaceInit colorSpace;
colorSpace.mFullRange = false;
colorSpace.mMatrix = VideoMatrixCoefficients::Bt709;
colorSpace.mTransfer = VideoTransferCharacteristics::Bt709;
colorSpace.mPrimaries = VideoColorPrimaries::Bt709;
return colorSpace;
return VideoColorSpaceInternal(false, VideoMatrixCoefficients::Bt709,
VideoColorPrimaries::Bt709,
VideoTransferCharacteristics::Bt709);
}
VideoColorSpaceInit FallbackColorSpaceForWebContent() {
VideoColorSpaceInternal FallbackColorSpaceForWebContent() {
// If we're unable to determine the color space, but we think this is from
// Web content (canvas, image, svg, etc.), consider it's in sRGB.
// This is step 2 of
// https://w3c.github.io/webcodecs/#videoframe-pick-color-space
VideoColorSpaceInit colorSpace;
colorSpace.mFullRange = true;
colorSpace.mMatrix = VideoMatrixCoefficients::Rgb;
colorSpace.mTransfer = VideoTransferCharacteristics::Iec61966_2_1;
colorSpace.mPrimaries = VideoColorPrimaries::Bt709;
return colorSpace;
return VideoColorSpaceInternal(true, VideoMatrixCoefficients::Rgb,
VideoColorPrimaries::Bt709,
VideoTransferCharacteristics::Iec61966_2_1);
}
Maybe<CodecType> CodecStringToCodecType(const nsAString& aCodecString) {

View File

@@ -170,12 +170,32 @@ bool CopyExtradataToDescription(JSContext* aCx, Span<const uint8_t>& aSrc,
struct VideoColorSpaceInternal {
explicit VideoColorSpaceInternal(const VideoColorSpaceInit& aColorSpaceInit);
VideoColorSpaceInternal() = default;
VideoColorSpaceInit ToColorSpaceInit() const;
VideoColorSpaceInternal(const bool& aFullRange,
const VideoMatrixCoefficients& aMatrix,
const VideoColorPrimaries& aPrimaries,
const VideoTransferCharacteristics& aTransfer)
: mFullRange(Some(aFullRange)),
mMatrix(Some(aMatrix)),
mPrimaries(Some(aPrimaries)),
mTransfer(Some(aTransfer)) {}
VideoColorSpaceInternal(const VideoColorSpaceInternal& aOther) = default;
VideoColorSpaceInternal(VideoColorSpaceInternal&& aOther) = default;
VideoColorSpaceInternal& operator=(const VideoColorSpaceInternal& aOther) =
default;
VideoColorSpaceInternal& operator=(VideoColorSpaceInternal&& aOther) =
default;
bool operator==(const VideoColorSpaceInternal& aOther) const {
return mFullRange == aOther.mFullRange && mMatrix == aOther.mMatrix &&
mPrimaries == aOther.mPrimaries && mTransfer == aOther.mTransfer;
}
bool operator!=(const VideoColorSpaceInternal& aOther) const {
return !(*this == aOther);
}
VideoColorSpaceInit ToColorSpaceInit() const;
nsCString ToString() const;
Maybe<bool> mFullRange;
Maybe<VideoMatrixCoefficients> mMatrix;
@@ -311,8 +331,8 @@ nsCString ColorSpaceInitToString(
const dom::VideoColorSpaceInit& aColorSpaceInit);
RefPtr<TaskQueue> GetWebCodecsEncoderTaskQueue();
VideoColorSpaceInit FallbackColorSpaceForVideoContent();
VideoColorSpaceInit FallbackColorSpaceForWebContent();
VideoColorSpaceInternal FallbackColorSpaceForVideoContent();
VideoColorSpaceInternal FallbackColorSpaceForWebContent();
Maybe<CodecType> CodecStringToCodecType(const nsAString& aCodecString);