Bug 761018 - GStreamer video buffer handling optimization; r=cdouble

This commit is contained in:
Alessandro Decina
2013-03-13 16:11:15 -04:00
parent 52deb46b82
commit 22d1883693
6 changed files with 388 additions and 165 deletions

View File

@@ -142,6 +142,7 @@ VideoData::~VideoData()
VideoData* VideoData::Create(VideoInfo& aInfo, VideoData* VideoData::Create(VideoInfo& aInfo,
ImageContainer* aContainer, ImageContainer* aContainer,
Image* aImage,
int64_t aOffset, int64_t aOffset,
int64_t aTime, int64_t aTime,
int64_t aEndTime, int64_t aEndTime,
@@ -150,7 +151,7 @@ VideoData* VideoData::Create(VideoInfo& aInfo,
int64_t aTimecode, int64_t aTimecode,
nsIntRect aPicture) nsIntRect aPicture)
{ {
if (!aContainer) { if (!aImage && !aContainer) {
// Create a dummy VideoData with no image. This gives us something to // Create a dummy VideoData with no image. This gives us something to
// send to media streams if necessary. // send to media streams if necessary.
nsAutoPtr<VideoData> v(new VideoData(aOffset, nsAutoPtr<VideoData> v(new VideoData(aOffset,
@@ -204,6 +205,7 @@ VideoData* VideoData::Create(VideoInfo& aInfo,
const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1]; const YCbCrBuffer::Plane &Cb = aBuffer.mPlanes[1];
const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2]; const YCbCrBuffer::Plane &Cr = aBuffer.mPlanes[2];
if (!aImage) {
// Currently our decoder only knows how to output to PLANAR_YCBCR // Currently our decoder only knows how to output to PLANAR_YCBCR
// format. // format.
ImageFormat format[2] = {PLANAR_YCBCR, GRALLOC_PLANAR_YCBCR}; ImageFormat format[2] = {PLANAR_YCBCR, GRALLOC_PLANAR_YCBCR};
@@ -212,6 +214,10 @@ VideoData* VideoData::Create(VideoInfo& aInfo,
} else { } else {
v->mImage = aContainer->CreateImage(format, 1); v->mImage = aContainer->CreateImage(format, 1);
} }
} else {
v->mImage = aImage;
}
if (!v->mImage) { if (!v->mImage) {
return nullptr; return nullptr;
} }
@@ -237,10 +243,43 @@ VideoData* VideoData::Create(VideoInfo& aInfo,
data.mStereoMode = aInfo.mStereoMode; data.mStereoMode = aInfo.mStereoMode;
videoImage->SetDelayedConversion(true); videoImage->SetDelayedConversion(true);
if (!aImage) {
videoImage->SetData(data); videoImage->SetData(data);
} else {
videoImage->SetDataNoCopy(data);
}
return v.forget(); return v.forget();
} }
VideoData* VideoData::Create(VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
int64_t aEndTime,
const YCbCrBuffer& aBuffer,
bool aKeyframe,
int64_t aTimecode,
nsIntRect aPicture)
{
return Create(aInfo, aContainer, nullptr, aOffset, aTime, aEndTime, aBuffer,
aKeyframe, aTimecode, aPicture);
}
VideoData* VideoData::Create(VideoInfo& aInfo,
Image* aImage,
int64_t aOffset,
int64_t aTime,
int64_t aEndTime,
const YCbCrBuffer& aBuffer,
bool aKeyframe,
int64_t aTimecode,
nsIntRect aPicture)
{
return Create(aInfo, nullptr, aImage, aOffset, aTime, aEndTime, aBuffer,
aKeyframe, aTimecode, aPicture);
}
VideoData* VideoData::CreateFromImage(VideoInfo& aInfo, VideoData* VideoData::CreateFromImage(VideoInfo& aInfo,
ImageContainer* aContainer, ImageContainer* aContainer,
int64_t aOffset, int64_t aOffset,

View File

@@ -132,14 +132,39 @@ public:
Plane mPlanes[3]; Plane mPlanes[3];
}; };
// Constructs a VideoData object. Makes a copy of YCbCr data in aBuffer. // Constructs a VideoData object. If aImage is NULL, creates a new Image
// aTimecode is a codec specific number representing the timestamp of // holding a copy of the YCbCr data passed in aBuffer. If aImage is not NULL,
// the frame of video data. Returns nullptr if an error occurs. This may // it's stored as the underlying video image and aBuffer is assumed to point
// indicate that memory couldn't be allocated to create the VideoData // to memory within aImage so no copy is made. aTimecode is a codec specific
// object, or it may indicate some problem with the input data (e.g. // number representing the timestamp of the frame of video data. Returns
// negative stride). // nsnull if an error occurs. This may indicate that memory couldn't be
// allocated to create the VideoData object, or it may indicate some problem
// with the input data (e.g. negative stride).
static VideoData* Create(VideoInfo& aInfo, static VideoData* Create(VideoInfo& aInfo,
ImageContainer* aContainer, ImageContainer* aContainer,
Image* aImage,
int64_t aOffset,
int64_t aTime,
int64_t aEndTime,
const YCbCrBuffer &aBuffer,
bool aKeyframe,
int64_t aTimecode,
nsIntRect aPicture);
// Variant that always makes a copy of aBuffer
static VideoData* Create(VideoInfo& aInfo,
ImageContainer* aContainer,
int64_t aOffset,
int64_t aTime,
int64_t aEndTime,
const YCbCrBuffer &aBuffer,
bool aKeyframe,
int64_t aTimecode,
nsIntRect aPicture);
// Variant to create a VideoData instance given an existing aImage
static VideoData* Create(VideoInfo& aInfo,
Image* aImage,
int64_t aOffset, int64_t aOffset,
int64_t aTime, int64_t aTime,
int64_t aEndTime, int64_t aEndTime,

View File

@@ -27,12 +27,19 @@ extern PRLogModuleInfo* gMediaDecoderLog;
#define LOG(type, msg) #define LOG(type, msg)
#endif #endif
extern bool
IsYV12Format(const VideoData::YCbCrBuffer::Plane& aYPlane,
const VideoData::YCbCrBuffer::Plane& aCbPlane,
const VideoData::YCbCrBuffer::Plane& aCrPlane);
static const int MAX_CHANNELS = 4; static const int MAX_CHANNELS = 4;
// Let the demuxer work in pull mode for short files // Let the demuxer work in pull mode for short files
static const int SHORT_FILE_SIZE = 1024 * 1024; static const int SHORT_FILE_SIZE = 1024 * 1024;
// The default resource->Read() size when working in push mode // The default resource->Read() size when working in push mode
static const int DEFAULT_SOURCE_READ_SIZE = 50 * 1024; static const int DEFAULT_SOURCE_READ_SIZE = 50 * 1024;
G_DEFINE_BOXED_TYPE(BufferData, buffer_data, BufferData::Copy, BufferData::Free);
typedef enum { typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), GST_PLAY_FLAG_VIDEO = (1 << 0),
GST_PLAY_FLAG_AUDIO = (1 << 1), GST_PLAY_FLAG_AUDIO = (1 << 1),
@@ -49,13 +56,13 @@ typedef enum {
GStreamerReader::GStreamerReader(AbstractMediaDecoder* aDecoder) GStreamerReader::GStreamerReader(AbstractMediaDecoder* aDecoder)
: MediaDecoderReader(aDecoder), : MediaDecoderReader(aDecoder),
mPlayBin(NULL), mPlayBin(nullptr),
mBus(NULL), mBus(nullptr),
mSource(NULL), mSource(nullptr),
mVideoSink(NULL), mVideoSink(nullptr),
mVideoAppSink(NULL), mVideoAppSink(nullptr),
mAudioSink(NULL), mAudioSink(nullptr),
mAudioAppSink(NULL), mAudioAppSink(nullptr),
mFormat(GST_VIDEO_FORMAT_UNKNOWN), mFormat(GST_VIDEO_FORMAT_UNKNOWN),
mVideoSinkBufferCount(0), mVideoSinkBufferCount(0),
mAudioSinkBufferCount(0), mAudioSinkBufferCount(0),
@@ -75,7 +82,7 @@ GStreamerReader::GStreamerReader(AbstractMediaDecoder* aDecoder)
mSinkCallbacks.eos = GStreamerReader::EosCb; mSinkCallbacks.eos = GStreamerReader::EosCb;
mSinkCallbacks.new_preroll = GStreamerReader::NewPrerollCb; mSinkCallbacks.new_preroll = GStreamerReader::NewPrerollCb;
mSinkCallbacks.new_buffer = GStreamerReader::NewBufferCb; mSinkCallbacks.new_buffer = GStreamerReader::NewBufferCb;
mSinkCallbacks.new_buffer_list = NULL; mSinkCallbacks.new_buffer_list = nullptr;
gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED); gst_segment_init(&mVideoSegment, GST_FORMAT_UNDEFINED);
gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED); gst_segment_init(&mAudioSegment, GST_FORMAT_UNDEFINED);
@@ -92,66 +99,70 @@ GStreamerReader::~GStreamerReader()
gst_object_unref(mSource); gst_object_unref(mSource);
gst_element_set_state(mPlayBin, GST_STATE_NULL); gst_element_set_state(mPlayBin, GST_STATE_NULL);
gst_object_unref(mPlayBin); gst_object_unref(mPlayBin);
mPlayBin = NULL; mPlayBin = nullptr;
mVideoSink = NULL; mVideoSink = nullptr;
mVideoAppSink = NULL; mVideoAppSink = nullptr;
mAudioSink = NULL; mAudioSink = nullptr;
mAudioAppSink = NULL; mAudioAppSink = nullptr;
gst_object_unref(mBus); gst_object_unref(mBus);
mBus = NULL; mBus = nullptr;
} }
} }
nsresult GStreamerReader::Init(MediaDecoderReader* aCloneDonor) nsresult GStreamerReader::Init(MediaDecoderReader* aCloneDonor)
{ {
GError *error = NULL; GError* error = nullptr;
if (!gst_init_check(0, 0, &error)) { if (!gst_init_check(0, 0, &error)) {
LOG(PR_LOG_ERROR, ("gst initialization failed: %s", error->message)); LOG(PR_LOG_ERROR, ("gst initialization failed: %s", error->message));
g_error_free(error); g_error_free(error);
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
} }
mPlayBin = gst_element_factory_make("playbin2", NULL); mPlayBin = gst_element_factory_make("playbin2", nullptr);
if (mPlayBin == NULL) { if (!mPlayBin) {
LOG(PR_LOG_ERROR, ("couldn't create playbin2")); LOG(PR_LOG_ERROR, ("couldn't create playbin2"));
return NS_ERROR_FAILURE; return NS_ERROR_FAILURE;
} }
g_object_set(mPlayBin, "buffer-size", 0, NULL); g_object_set(mPlayBin, "buffer-size", 0, nullptr);
mBus = gst_pipeline_get_bus(GST_PIPELINE(mPlayBin)); mBus = gst_pipeline_get_bus(GST_PIPELINE(mPlayBin));
mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! " mVideoSink = gst_parse_bin_from_description("capsfilter name=filter ! "
"appsink name=videosink sync=true max-buffers=1 " "appsink name=videosink sync=true max-buffers=1 "
"caps=video/x-raw-yuv,format=(fourcc)I420" "caps=video/x-raw-yuv,format=(fourcc)I420"
, TRUE, NULL); , TRUE, nullptr);
mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink), mVideoAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mVideoSink),
"videosink")); "videosink"));
gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks, gst_app_sink_set_callbacks(mVideoAppSink, &mSinkCallbacks,
(gpointer) this, NULL); (gpointer) this, nullptr);
GstPad* sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink"); GstPad* sinkpad = gst_element_get_pad(GST_ELEMENT(mVideoAppSink), "sink");
gst_pad_add_event_probe(sinkpad, gst_pad_add_event_probe(sinkpad,
G_CALLBACK(&GStreamerReader::EventProbeCb), this); G_CALLBACK(&GStreamerReader::EventProbeCb), this);
gst_object_unref(sinkpad); gst_object_unref(sinkpad);
#if GST_VERSION_MICRO >= 36
gst_pad_set_bufferalloc_function(sinkpad, GStreamerReader::AllocateVideoBufferCb);
#endif
gst_pad_set_element_private(sinkpad, this);
mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! " mAudioSink = gst_parse_bin_from_description("capsfilter name=filter ! "
#ifdef MOZ_SAMPLE_TYPE_FLOAT32 #ifdef MOZ_SAMPLE_TYPE_FLOAT32
"appsink name=audiosink sync=true caps=audio/x-raw-float," "appsink name=audiosink sync=true caps=audio/x-raw-float,"
#ifdef IS_LITTLE_ENDIAN #ifdef IS_LITTLE_ENDIAN
"channels={1,2},width=32,endianness=1234", TRUE, NULL); "channels={1,2},width=32,endianness=1234", TRUE, nullptr);
#else #else
"channels={1,2},width=32,endianness=4321", TRUE, NULL); "channels={1,2},width=32,endianness=4321", TRUE, nullptr);
#endif #endif
#else #else
"appsink name=audiosink sync=true caps=audio/x-raw-int," "appsink name=audiosink sync=true caps=audio/x-raw-int,"
#ifdef IS_LITTLE_ENDIAN #ifdef IS_LITTLE_ENDIAN
"channels={1,2},width=16,endianness=1234", TRUE, NULL); "channels={1,2},width=16,endianness=1234", TRUE, nullptr);
#else #else
"channels={1,2},width=16,endianness=4321", TRUE, NULL); "channels={1,2},width=16,endianness=4321", TRUE, nullptr);
#endif #endif
#endif #endif
mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink), mAudioAppSink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(mAudioSink),
"audiosink")); "audiosink"));
gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks, gst_app_sink_set_callbacks(mAudioAppSink, &mSinkCallbacks,
(gpointer) this, NULL); (gpointer) this, nullptr);
sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink"); sinkpad = gst_element_get_pad(GST_ELEMENT(mAudioAppSink), "sink");
gst_pad_add_event_probe(sinkpad, gst_pad_add_event_probe(sinkpad,
G_CALLBACK(&GStreamerReader::EventProbeCb), this); G_CALLBACK(&GStreamerReader::EventProbeCb), this);
@@ -160,10 +171,10 @@ nsresult GStreamerReader::Init(MediaDecoderReader* aCloneDonor)
g_object_set(mPlayBin, "uri", "appsrc://", g_object_set(mPlayBin, "uri", "appsrc://",
"video-sink", mVideoSink, "video-sink", mVideoSink,
"audio-sink", mAudioSink, "audio-sink", mAudioSink,
NULL); nullptr);
g_signal_connect(G_OBJECT(mPlayBin), "notify::source", g_object_connect(mPlayBin, "signal::source-setup",
G_CALLBACK(GStreamerReader::PlayBinSourceSetupCb), this); GStreamerReader::PlayBinSourceSetupCb, this, nullptr);
return NS_OK; return NS_OK;
} }
@@ -182,7 +193,7 @@ void GStreamerReader::PlayBinSourceSetupCb(GstElement *aPlayBin,
void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource) void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource)
{ {
mSource = GST_APP_SRC(aSource); mSource = GST_APP_SRC(aSource);
gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, NULL); gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, nullptr);
MediaResource* resource = mDecoder->GetResource(); MediaResource* resource = mDecoder->GetResource();
/* do a short read to trigger a network request so that GetLength() below /* do a short read to trigger a network request so that GetLength() below
@@ -194,19 +205,19 @@ void GStreamerReader::PlayBinSourceSetup(GstAppSrc *aSource)
resource->Seek(SEEK_SET, 0); resource->Seek(SEEK_SET, 0);
/* now we should have a length */ /* now we should have a length */
int64_t len = resource->GetLength(); int64_t resourceLength = resource->GetLength();
gst_app_src_set_size(mSource, len); gst_app_src_set_size(mSource, resourceLength);
if (resource->IsDataCachedToEndOfResource(0) || if (resource->IsDataCachedToEndOfResource(0) ||
(len != -1 && len <= SHORT_FILE_SIZE)) { (resourceLength != -1 && resourceLength <= SHORT_FILE_SIZE)) {
/* let the demuxer work in pull mode for local files (or very short files) /* let the demuxer work in pull mode for local files (or very short files)
* so that we get optimal seeking accuracy/performance * so that we get optimal seeking accuracy/performance
*/ */
LOG(PR_LOG_DEBUG, ("configuring random access, len %lld", len)); LOG(PR_LOG_DEBUG, ("configuring random access, len %lld", resourceLength));
gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS); gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS);
} else { } else {
/* make the demuxer work in push mode so that seeking is kept to a minimum /* make the demuxer work in push mode so that seeking is kept to a minimum
*/ */
LOG(PR_LOG_DEBUG, ("configuring push mode, len %lld", len)); LOG(PR_LOG_DEBUG, ("configuring push mode, len %lld", resourceLength));
gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE); gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE);
} }
} }
@@ -224,24 +235,24 @@ nsresult GStreamerReader::ReadMetadata(VideoInfo* aInfo,
guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO, guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)}; static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
guint default_flags, current_flags; guint default_flags, current_flags;
g_object_get(mPlayBin, "flags", &default_flags, NULL); g_object_get(mPlayBin, "flags", &default_flags, nullptr);
GstMessage *message = NULL; GstMessage* message = nullptr;
for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) { for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
current_flags = default_flags & flags[i]; current_flags = default_flags & flags[i];
g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, NULL); g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);
/* reset filter caps to ANY */ /* reset filter caps to ANY */
GstCaps* caps = gst_caps_new_any(); GstCaps* caps = gst_caps_new_any();
GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
g_object_set(filter, "caps", caps, NULL); g_object_set(filter, "caps", caps, nullptr);
gst_object_unref(filter); gst_object_unref(filter);
filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter"); filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
g_object_set(filter, "caps", caps, NULL); g_object_set(filter, "caps", caps, nullptr);
gst_object_unref(filter); gst_object_unref(filter);
gst_caps_unref(caps); gst_caps_unref(caps);
filter = NULL; filter = nullptr;
if (!(current_flags & GST_PLAY_FLAG_AUDIO)) if (!(current_flags & GST_PLAY_FLAG_AUDIO))
filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter"); filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
@@ -252,8 +263,8 @@ nsresult GStreamerReader::ReadMetadata(VideoInfo* aInfo,
/* Little trick: set the target caps to "skip" so that playbin2 fails to /* Little trick: set the target caps to "skip" so that playbin2 fails to
* find a decoder for the stream we want to skip. * find a decoder for the stream we want to skip.
*/ */
GstCaps *filterCaps = gst_caps_new_simple ("skip", NULL); GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr);
g_object_set(filter, "caps", filterCaps, NULL); g_object_set(filter, "caps", filterCaps, nullptr);
gst_caps_unref(filterCaps); gst_caps_unref(filterCaps);
gst_object_unref(filter); gst_object_unref(filter);
} }
@@ -316,7 +327,7 @@ nsresult GStreamerReader::ReadMetadata(VideoInfo* aInfo,
} }
int n_video = 0, n_audio = 0; int n_video = 0, n_audio = 0;
g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, NULL); g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
mInfo.mHasVideo = n_video != 0; mInfo.mHasVideo = n_video != 0;
mInfo.mHasAudio = n_audio != 0; mInfo.mHasAudio = n_audio != 0;
@@ -415,7 +426,7 @@ bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
{ {
NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
GstBuffer *buffer = NULL; GstBuffer* buffer = nullptr;
int64_t timestamp, nextTimestamp; int64_t timestamp, nextTimestamp;
while (true) while (true)
{ {
@@ -429,7 +440,7 @@ bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT); bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT);
if ((aKeyFrameSkip && !isKeyframe)) { if ((aKeyFrameSkip && !isKeyframe)) {
gst_buffer_unref(buffer); gst_buffer_unref(buffer);
buffer = NULL; buffer = nullptr;
continue; continue;
} }
@@ -453,17 +464,44 @@ bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
" threshold %" GST_TIME_FORMAT, " threshold %" GST_TIME_FORMAT,
GST_TIME_ARGS(timestamp), GST_TIME_ARGS(aTimeThreshold))); GST_TIME_ARGS(timestamp), GST_TIME_ARGS(aTimeThreshold)));
gst_buffer_unref(buffer); gst_buffer_unref(buffer);
buffer = NULL; buffer = nullptr;
continue; continue;
} }
break; break;
} }
if (buffer == NULL) if (!buffer)
/* no more frames */ /* no more frames */
return false; return false;
nsRefPtr<PlanarYCbCrImage> image;
#if GST_VERSION_MICRO >= 36
const GstStructure* structure = gst_buffer_get_qdata(buffer,
g_quark_from_string("moz-reader-data"));
const GValue* value = gst_structure_get_value(structure, "image");
if (value) {
BufferData* data = reinterpret_cast<BufferData*>(g_value_get_boxed(value));
image = data->mImage;
}
#endif
if (!image) {
/* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
* allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
*/
GstBuffer* tmp = nullptr;
AllocateVideoBufferFull(nullptr, GST_BUFFER_OFFSET(buffer),
GST_BUFFER_SIZE(buffer), nullptr, &tmp, image);
/* copy */
gst_buffer_copy_metadata(tmp, buffer, GST_BUFFER_COPY_ALL);
memcpy(GST_BUFFER_DATA(tmp), GST_BUFFER_DATA(buffer),
GST_BUFFER_SIZE(tmp));
gst_buffer_unref(buffer);
buffer = tmp;
}
guint8* data = GST_BUFFER_DATA(buffer); guint8* data = GST_BUFFER_DATA(buffer);
int width = mPicture.width; int width = mPicture.width;
@@ -487,15 +525,9 @@ bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
GST_BUFFER_FLAG_DELTA_UNIT); GST_BUFFER_FLAG_DELTA_UNIT);
/* XXX ? */ /* XXX ? */
int64_t offset = 0; int64_t offset = 0;
VideoData *video = VideoData::Create(mInfo, VideoData* video = VideoData::Create(mInfo, image, offset,
mDecoder->GetImageContainer(), timestamp, nextTimestamp, b,
offset, isKeyframe, -1, mPicture);
timestamp,
nextTimestamp,
b,
isKeyframe,
-1,
mPicture);
mVideoQueue.Push(video); mVideoQueue.Push(video);
gst_buffer_unref(buffer); gst_buffer_unref(buffer);
@@ -532,7 +564,6 @@ nsresult GStreamerReader::GetBuffered(TimeRanges* aBuffered,
GstFormat format = GST_FORMAT_TIME; GstFormat format = GST_FORMAT_TIME;
MediaResource* resource = mDecoder->GetResource(); MediaResource* resource = mDecoder->GetResource();
gint64 resourceLength = resource->GetLength();
nsTArray<MediaByteRange> ranges; nsTArray<MediaByteRange> ranges;
resource->GetCachedRanges(ranges); resource->GetCachedRanges(ranges);
@@ -549,7 +580,7 @@ nsresult GStreamerReader::GetBuffered(TimeRanges* aBuffered,
duration = QueryDuration(); duration = QueryDuration();
double end = (double) duration / GST_MSECOND; double end = (double) duration / GST_MSECOND;
LOG(PR_LOG_DEBUG, ("complete range [0, %f] for [0, %li]", LOG(PR_LOG_DEBUG, ("complete range [0, %f] for [0, %li]",
end, resourceLength)); end, resource->GetLength()));
aBuffered->Add(0, end); aBuffered->Add(0, end);
return NS_OK; return NS_OK;
} }
@@ -569,7 +600,7 @@ nsresult GStreamerReader::GetBuffered(TimeRanges* aBuffered,
double start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND; double start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND;
double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND; double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND;
LOG(PR_LOG_DEBUG, ("adding range [%f, %f] for [%li %li] size %li", LOG(PR_LOG_DEBUG, ("adding range [%f, %f] for [%li %li] size %li",
start, end, startOffset, endOffset, resourceLength)); start, end, startOffset, endOffset, resource->GetLength()));
aBuffered->Add(start, end); aBuffered->Add(start, end);
} }
@@ -598,12 +629,14 @@ void GStreamerReader::ReadAndPushData(guint aLength)
mByteOffset += bytesRead; mByteOffset += bytesRead;
GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer)); GstFlowReturn ret = gst_app_src_push_buffer(mSource, gst_buffer_ref(buffer));
if (ret != GST_FLOW_OK) if (ret != GST_FLOW_OK) {
LOG(PR_LOG_ERROR, ("ReadAndPushData push ret %s", gst_flow_get_name(ret))); LOG(PR_LOG_ERROR, ("ReadAndPushData push ret %s", gst_flow_get_name(ret)));
}
if (GST_BUFFER_SIZE (buffer) < aLength) if (GST_BUFFER_SIZE (buffer) < aLength) {
/* If we read less than what we wanted, we reached the end */ /* If we read less than what we wanted, we reached the end */
gst_app_src_end_of_stream(mSource); gst_app_src_end_of_stream(mSource);
}
gst_buffer_unref(buffer); gst_buffer_unref(buffer);
} }
@@ -636,20 +669,20 @@ void GStreamerReader::NeedDataCb(GstAppSrc *aSrc,
guint aLength, guint aLength,
gpointer aUserData) gpointer aUserData)
{ {
GStreamerReader *reader = (GStreamerReader *) aUserData; GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
reader->NeedData(aSrc, aLength); reader->NeedData(aSrc, aLength);
} }
void GStreamerReader::NeedData(GstAppSrc* aSrc, guint aLength) void GStreamerReader::NeedData(GstAppSrc* aSrc, guint aLength)
{ {
if (aLength == -1) if (aLength == static_cast<guint>(-1))
aLength = DEFAULT_SOURCE_READ_SIZE; aLength = DEFAULT_SOURCE_READ_SIZE;
ReadAndPushData(aLength); ReadAndPushData(aLength);
} }
void GStreamerReader::EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData) void GStreamerReader::EnoughDataCb(GstAppSrc* aSrc, gpointer aUserData)
{ {
GStreamerReader *reader = (GStreamerReader *) aUserData; GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
reader->EnoughData(aSrc); reader->EnoughData(aSrc);
} }
@@ -661,7 +694,7 @@ gboolean GStreamerReader::SeekDataCb(GstAppSrc *aSrc,
guint64 aOffset, guint64 aOffset,
gpointer aUserData) gpointer aUserData)
{ {
GStreamerReader *reader = (GStreamerReader *) aUserData; GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
return reader->SeekData(aSrc, aOffset); return reader->SeekData(aSrc, aOffset);
} }
@@ -669,21 +702,25 @@ gboolean GStreamerReader::SeekData(GstAppSrc *aSrc, guint64 aOffset)
{ {
ReentrantMonitorAutoEnter mon(mGstThreadsMonitor); ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
MediaResource* resource = mDecoder->GetResource(); MediaResource* resource = mDecoder->GetResource();
int64_t resourceLength = resource->GetLength();
if (gst_app_src_get_size(mSource) == -1) if (gst_app_src_get_size(mSource) == -1) {
/* It's possible that we didn't know the length when we initialized mSource /* It's possible that we didn't know the length when we initialized mSource
* but maybe we do now * but maybe we do now
*/ */
gst_app_src_set_size(mSource, resource->GetLength()); gst_app_src_set_size(mSource, resourceLength);
}
nsresult rv = NS_ERROR_FAILURE; nsresult rv = NS_ERROR_FAILURE;
if (aOffset < resource->GetLength()) if (aOffset < static_cast<guint64>(resourceLength)) {
rv = resource->Seek(SEEK_SET, aOffset); rv = resource->Seek(SEEK_SET, aOffset);
}
if (NS_SUCCEEDED(rv)) if (NS_SUCCEEDED(rv)) {
mByteOffset = mLastReportedByteOffset = aOffset; mByteOffset = mLastReportedByteOffset = aOffset;
else } else {
LOG(PR_LOG_ERROR, ("seek at %lu failed", aOffset)); LOG(PR_LOG_ERROR, ("seek at %lu failed", aOffset));
}
return NS_SUCCEEDED(rv); return NS_SUCCEEDED(rv);
} }
@@ -692,7 +729,7 @@ gboolean GStreamerReader::EventProbeCb(GstPad *aPad,
GstEvent* aEvent, GstEvent* aEvent,
gpointer aUserData) gpointer aUserData)
{ {
GStreamerReader *reader = (GStreamerReader *) aUserData; GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
return reader->EventProbe(aPad, aEvent); return reader->EventProbe(aPad, aEvent);
} }
@@ -734,10 +771,72 @@ gboolean GStreamerReader::EventProbe(GstPad *aPad, GstEvent *aEvent)
return TRUE; return TRUE;
} }
GstFlowReturn GStreamerReader::AllocateVideoBufferFull(GstPad* aPad,
guint64 aOffset,
guint aSize,
GstCaps* aCaps,
GstBuffer** aBuf,
nsRefPtr<PlanarYCbCrImage>& aImage)
{
/* allocate an image using the container */
ImageContainer* container = mDecoder->GetImageContainer();
ImageFormat format = PLANAR_YCBCR;
PlanarYCbCrImage* img = reinterpret_cast<PlanarYCbCrImage*>(container->CreateImage(&format, 1).get());
nsRefPtr<PlanarYCbCrImage> image = dont_AddRef(img);
/* prepare a GstBuffer pointing to the underlying PlanarYCbCrImage buffer */
GstBuffer* buf = gst_buffer_new();
GST_BUFFER_SIZE(buf) = aSize;
/* allocate the actual YUV buffer */
GST_BUFFER_DATA(buf) = image->AllocateAndGetNewBuffer(aSize);
aImage = image;
#if GST_VERSION_MICRO >= 36
/* create a GBoxed handle to hold the image */
BufferData* data = new BufferData(image);
/* store it in a GValue so we can put it in a GstStructure */
GValue value = {0,};
g_value_init(&value, buffer_data_get_type());
g_value_take_boxed(&value, data);
/* store the value in the structure */
GstStructure* structure = gst_structure_new("moz-reader-data", nullptr);
gst_structure_take_value(structure, "image", &value);
/* and attach the structure to the buffer */
gst_buffer_set_qdata(buf, g_quark_from_string("moz-reader-data"), structure);
#endif
*aBuf = buf;
return GST_FLOW_OK;
}
GstFlowReturn GStreamerReader::AllocateVideoBufferCb(GstPad* aPad,
guint64 aOffset,
guint aSize,
GstCaps* aCaps,
GstBuffer** aBuf)
{
GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(gst_pad_get_element_private(aPad));
return reader->AllocateVideoBuffer(aPad, aOffset, aSize, aCaps, aBuf);
}
GstFlowReturn GStreamerReader::AllocateVideoBuffer(GstPad* aPad,
guint64 aOffset,
guint aSize,
GstCaps* aCaps,
GstBuffer** aBuf)
{
nsRefPtr<PlanarYCbCrImage> image;
return AllocateVideoBufferFull(aPad, aOffset, aSize, aCaps, aBuf, image);
}
GstFlowReturn GStreamerReader::NewPrerollCb(GstAppSink* aSink, GstFlowReturn GStreamerReader::NewPrerollCb(GstAppSink* aSink,
gpointer aUserData) gpointer aUserData)
{ {
GStreamerReader *reader = (GStreamerReader *) aUserData; GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
if (aSink == reader->mVideoAppSink) if (aSink == reader->mVideoAppSink)
reader->VideoPreroll(); reader->VideoPreroll();
@@ -784,7 +883,7 @@ void GStreamerReader::VideoPreroll()
GstFlowReturn GStreamerReader::NewBufferCb(GstAppSink* aSink, GstFlowReturn GStreamerReader::NewBufferCb(GstAppSink* aSink,
gpointer aUserData) gpointer aUserData)
{ {
GStreamerReader *reader = (GStreamerReader *) aUserData; GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
if (aSink == reader->mVideoAppSink) if (aSink == reader->mVideoAppSink)
reader->NewVideoBuffer(); reader->NewVideoBuffer();
@@ -817,7 +916,7 @@ void GStreamerReader::NewAudioBuffer()
void GStreamerReader::EosCb(GstAppSink* aSink, gpointer aUserData) void GStreamerReader::EosCb(GstAppSink* aSink, gpointer aUserData)
{ {
GStreamerReader *reader = (GStreamerReader *) aUserData; GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
reader->Eos(aSink); reader->Eos(aSink);
} }

View File

@@ -9,6 +9,7 @@
#include <gst/app/gstappsrc.h> #include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h> #include <gst/app/gstappsink.h>
#include <gst/video/video.h> #include <gst/video/video.h>
#include <map>
#include "MediaDecoderReader.h" #include "MediaDecoderReader.h"
namespace mozilla { namespace mozilla {
@@ -81,6 +82,18 @@ private:
static gboolean EventProbeCb(GstPad* aPad, GstEvent* aEvent, gpointer aUserData); static gboolean EventProbeCb(GstPad* aPad, GstEvent* aEvent, gpointer aUserData);
gboolean EventProbe(GstPad* aPad, GstEvent* aEvent); gboolean EventProbe(GstPad* aPad, GstEvent* aEvent);
/* Called when elements in the video branch of the pipeline call
* gst_pad_alloc_buffer(). Used to provide PlanarYCbCrImage backed GstBuffers
* to the pipeline so that a memory copy can be avoided when handling YUV
* buffers from the pipeline to the gfx side.
*/
static GstFlowReturn AllocateVideoBufferCb(GstPad* aPad, guint64 aOffset, guint aSize,
GstCaps* aCaps, GstBuffer** aBuf);
GstFlowReturn AllocateVideoBufferFull(GstPad* aPad, guint64 aOffset, guint aSize,
GstCaps* aCaps, GstBuffer** aBuf, nsRefPtr<layers::PlanarYCbCrImage>& aImage);
GstFlowReturn AllocateVideoBuffer(GstPad* aPad, guint64 aOffset, guint aSize,
GstCaps* aCaps, GstBuffer** aBuf);
/* Called when the pipeline is prerolled, that is when at start or after a /* Called when the pipeline is prerolled, that is when at start or after a
* seek, the first audio and video buffers are queued in the sinks. * seek, the first audio and video buffers are queued in the sinks.
*/ */
@@ -135,6 +148,21 @@ private:
int fpsDen; int fpsDen;
}; };
class BufferData {
public:
BufferData(layers::PlanarYCbCrImage* aImage) : mImage(aImage) {}
static void* Copy(void* aData) {
return new BufferData(reinterpret_cast<BufferData*>(aData)->mImage);
}
static void Free(void* aData) {
delete reinterpret_cast<BufferData*>(aData);
}
nsRefPtr<layers::PlanarYCbCrImage> mImage;
};
} // namespace mozilla } // namespace mozilla
#endif #endif

View File

@@ -486,6 +486,24 @@ PlanarYCbCrImage::GetOffscreenFormat()
mOffscreenFormat; mOffscreenFormat;
} }
void
PlanarYCbCrImage::SetDataNoCopy(const Data &aData)
{
mData = aData;
mSize = aData.mPicSize;
}
uint8_t*
PlanarYCbCrImage::AllocateAndGetNewBuffer(uint32_t aSize)
{
// update buffer size
mBufferSize = aSize;
// get new buffer
mBuffer = AllocateBuffer(mBufferSize);
return mBuffer;
}
already_AddRefed<gfxASurface> already_AddRefed<gfxASurface>
PlanarYCbCrImage::GetAsSurface() PlanarYCbCrImage::GetAsSurface()
{ {

View File

@@ -688,6 +688,20 @@ public:
*/ */
virtual void SetData(const Data& aData); virtual void SetData(const Data& aData);
/**
* This doesn't make a copy of the data buffers. Can be used when mBuffer is
* pre allocated with AllocateAndGetNewBuffer(size) and then SetDataNoCopy is
* called to only update the picture size, planes etc. fields in mData.
* The GStreamer media backend uses this to decode into PlanarYCbCrImage(s)
* directly.
*/
virtual void SetDataNoCopy(const Data &aData);
/**
* This allocates and returns a new buffer
*/
virtual uint8_t* AllocateAndGetNewBuffer(uint32_t aSize);
/** /**
* Ask this Image to not convert YUV to RGB during SetData, and make * Ask this Image to not convert YUV to RGB during SetData, and make
* the original data available through GetData. This is optional, * the original data available through GetData. This is optional,