Bug 1932772 - part1 : add d3d11va2 vp9 decode path from ffmpeg 7.0 to bundled ffvpx on Windows. r=media-playback-reviewers,chunmin

Differential Revision: https://phabricator.services.mozilla.com/D226067
This commit is contained in:
alwu
2024-12-08 15:20:14 +00:00
parent 9321893df5
commit 21d4d59774
15 changed files with 3283 additions and 11 deletions

View File

@@ -11,7 +11,7 @@ All source files match their path from the library's source archive.
Currently, we only use the vp8, vp9, av1 (via libdav1d), mp3, flac, vorbis (via
libvorbis), opus (via libopus) and PCM portion of the library. If this changes,
configuration files will most likely need to be updated.
configuration files will most likely need to be updated. On Windows, we use d3d11va for vp9 and av1.
Decoding AV1 via libdav1d and libvorbis is supported, although the decoder
libraries are vendored separately, `ffvpx` only contains the code to use

View File

@@ -8,11 +8,11 @@
#if HAVE_WINDOWS_H
#include <windows.h>
#endif
-#include "time.h"
+#include "fftime.h"
#include "error.h"
int64_t av_gettime(void)
{
#if HAVE_GETTIMEOFDAY
@@ -22,9 +22,9 @@
--- a/libavutil/parseutils.c 2024-03-26 14:03:12.080640731 +0100
+++ b/libavutil/parseutils.c 2024-04-05 14:44:56.508766832 +0200
@@ -23,20 +23,20 @@
#include <time.h>
#include "avstring.h"
#include "avutil.h"
#include "common.h"
@@ -36,14 +36,14 @@
#include "parseutils.h"
-#include "time.h"
+#include "fftime.h"
#ifdef TEST
#define av_get_random_seed av_get_random_seed_deterministic
static uint32_t av_get_random_seed_deterministic(void);
#define av_gettime() 1331972053200000
@@ -370,17 +370,17 @@
av_strlcpy(color_string2, color_string + hex_offset,
FFMIN(slen-hex_offset+1, sizeof(color_string2)));
@@ -51,7 +51,7 @@
*tail++ = 0;
len = strlen(color_string2);
rgba_color[3] = 255;
if (!av_strcasecmp(color_string2, "random") || !av_strcasecmp(color_string2, "bikeshed")) {
- int rgba = av_get_random_seed();
+ int rgba = 0xffffffff; /* av_get_random_seed(); */
@@ -78,9 +78,30 @@
+#include "fftime.h"
#include "avstring.h"
#include "reverse.h"
typedef struct Parser {
const AVClass *class;
int stack_index;
char *s;
const double *const_values;
--- a/media/ffvpx/libavcodec/dxva2.c
+++ b/media/ffvpx/libavcodec/dxva2.c
@@ -27,7 +27,7 @@
#include "libavutil/common.h"
#include "libavutil/log.h"
#include "libavutil/mem.h"
-#include "libavutil/time.h"
+#include "libavutil/fftime.h"
#include "avcodec.h"
#include "decode.h"
--- a/media/ffvpx/libavcodec/dxva2_internal.h
+++ b/media/ffvpx/libavcodec/dxva2_internal.h
@@ -65,6 +65,7 @@
#endif
#include "libavutil/hwcontext.h"
+#include "libavutil/mem.h"
#include "avcodec.h"
#include "internal.h"

View File

@@ -0,0 +1,150 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef COMPAT_W32DLFCN_H
#define COMPAT_W32DLFCN_H
#ifdef _WIN32
#include <stdint.h>
#include <windows.h>
#include "config.h"
#include "libavutil/macros.h"
#include "libavutil/wchar_filename.h"
static inline wchar_t *get_module_filename(HMODULE module)
{
wchar_t *path = NULL, *new_path;
DWORD path_size = 0, path_len;
do {
path_size = path_size ? FFMIN(2 * path_size, INT16_MAX + 1) : MAX_PATH;
new_path = av_realloc_array(path, path_size, sizeof *path);
if (!new_path) {
av_free(path);
return NULL;
}
path = new_path;
// Returns path_size in case of insufficient buffer.
// Whether the error is set or not and whether the output
// is null-terminated or not depends on the version of Windows.
path_len = GetModuleFileNameW(module, path, path_size);
} while (path_len && path_size <= INT16_MAX && path_size <= path_len);
if (!path_len) {
av_free(path);
return NULL;
}
return path;
}
/**
* Safe function used to open dynamic libs. This attempts to improve program security
* by removing the current directory from the dll search path. Only dll's found in the
* executable or system directory are allowed to be loaded.
* @param name The dynamic lib name.
* @return A handle to the opened lib.
*/
static inline HMODULE win32_dlopen(const char *name)
{
wchar_t *name_w;
HMODULE module = NULL;
if (utf8towchar(name, &name_w))
name_w = NULL;
#if _WIN32_WINNT < 0x0602
// On Win7 and earlier we check if KB2533623 is available
if (!GetProcAddress(GetModuleHandleW(L"kernel32.dll"), "SetDefaultDllDirectories")) {
wchar_t *path = NULL, *new_path;
DWORD pathlen, pathsize, namelen;
if (!name_w)
goto exit;
namelen = wcslen(name_w);
// Try local directory first
path = get_module_filename(NULL);
if (!path)
goto exit;
new_path = wcsrchr(path, '\\');
if (!new_path)
goto exit;
pathlen = new_path - path;
pathsize = pathlen + namelen + 2;
new_path = av_realloc_array(path, pathsize, sizeof *path);
if (!new_path)
goto exit;
path = new_path;
wcscpy(path + pathlen + 1, name_w);
module = LoadLibraryExW(path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
if (module == NULL) {
// Next try System32 directory
pathlen = GetSystemDirectoryW(path, pathsize);
if (!pathlen)
goto exit;
// Buffer is not enough in two cases:
// 1. system directory + \ + module name
// 2. system directory even without the module name.
if (pathlen + namelen + 2 > pathsize) {
pathsize = pathlen + namelen + 2;
new_path = av_realloc_array(path, pathsize, sizeof *path);
if (!new_path)
goto exit;
path = new_path;
// Query again to handle the case #2.
pathlen = GetSystemDirectoryW(path, pathsize);
if (!pathlen)
goto exit;
}
path[pathlen] = L'\\';
wcscpy(path + pathlen + 1, name_w);
module = LoadLibraryExW(path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH);
}
exit:
av_free(path);
av_free(name_w);
return module;
}
#endif
#ifndef LOAD_LIBRARY_SEARCH_APPLICATION_DIR
# define LOAD_LIBRARY_SEARCH_APPLICATION_DIR 0x00000200
#endif
#ifndef LOAD_LIBRARY_SEARCH_SYSTEM32
# define LOAD_LIBRARY_SEARCH_SYSTEM32 0x00000800
#endif
#if HAVE_WINRT
if (!name_w)
return NULL;
module = LoadPackagedLibrary(name_w, 0);
#else
#define LOAD_FLAGS (LOAD_LIBRARY_SEARCH_APPLICATION_DIR | LOAD_LIBRARY_SEARCH_SYSTEM32)
/* filename may be be in CP_ACP */
if (!name_w)
return LoadLibraryExA(name, NULL, LOAD_FLAGS);
module = LoadLibraryExW(name_w, NULL, LOAD_FLAGS);
#undef LOAD_FLAGS
#endif
av_free(name_w);
return module;
}
#define dlopen(name, flags) win32_dlopen(name)
#define dlclose FreeLibrary
#define dlsym GetProcAddress
#else
#include <dlfcn.h>
#endif
#endif /* COMPAT_W32DLFCN_H */

View File

@@ -12,6 +12,9 @@
#define EXTERN_ASM
#endif
/**
* Linux Hardware Video Decoding
**/
#undef CONFIG_VAAPI
#undef CONFIG_VAAPI_1
#undef CONFIG_VP8_VAAPI_HWACCEL
@@ -31,3 +34,20 @@
#define CONFIG_VP9_VAAPI_HWACCEL 0
#define CONFIG_AV1_VAAPI_HWACCEL 0
#endif
/**
* Windows Hardware Video Decoding
**/
#undef CONFIG_D3D11VA
#undef CONFIG_VP9_D3D11VA_HWACCEL
#undef CONFIG_VP9_D3D11VA2_HWACCEL
#if defined (XP_WIN) && !defined(MOZ_FFVPX_AUDIOONLY)
#define CONFIG_D3D11VA 1
#define CONFIG_VP9_D3D11VA_HWACCEL 1
#define CONFIG_VP9_D3D11VA2_HWACCEL 1
#else
#define CONFIG_D3D11VA 0
#define CONFIG_VP9_D3D11VA_HWACCEL 0
#define CONFIG_VP9_D3D11VA2_HWACCEL 0
#endif

View File

@@ -0,0 +1,48 @@
/*
* Direct3D11 HW acceleration
*
* copyright (c) 2015 Steve Lhomme
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <stddef.h>
#include "config.h"
#if CONFIG_D3D11VA
#include "libavutil/error.h"
#include "libavutil/mem.h"
#include "d3d11va.h"
AVD3D11VAContext *av_d3d11va_alloc_context(void)
{
AVD3D11VAContext* res = av_mallocz(sizeof(AVD3D11VAContext));
if (!res)
return NULL;
res->context_mutex = INVALID_HANDLE_VALUE;
return res;
}
#else
struct AVD3D11VAContext *av_d3d11va_alloc_context(void);
struct AVD3D11VAContext *av_d3d11va_alloc_context(void)
{
return NULL;
}
#endif /* CONFIG_D3D11VA */

View File

@@ -0,0 +1,109 @@
/*
* Direct3D11 HW acceleration
*
* copyright (c) 2009 Laurent Aimar
* copyright (c) 2015 Steve Lhomme
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_D3D11VA_H
#define AVCODEC_D3D11VA_H
/**
* @file
* @ingroup lavc_codec_hwaccel_d3d11va
* Public libavcodec D3D11VA header.
*/
#if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0602
#undef _WIN32_WINNT
#define _WIN32_WINNT 0x0602
#endif
#include <stdint.h>
#include <d3d11.h>
/**
* @defgroup lavc_codec_hwaccel_d3d11va Direct3D11
* @ingroup lavc_codec_hwaccel
*
* @{
*/
/**
* This structure is used to provides the necessary configurations and data
* to the Direct3D11 FFmpeg HWAccel implementation.
*
* The application must make it available as AVCodecContext.hwaccel_context.
*
* Use av_d3d11va_alloc_context() exclusively to allocate an AVD3D11VAContext.
*/
typedef struct AVD3D11VAContext {
/**
* D3D11 decoder object
*/
ID3D11VideoDecoder *decoder;
/**
* D3D11 VideoContext
*/
ID3D11VideoContext *video_context;
/**
* D3D11 configuration used to create the decoder
*/
D3D11_VIDEO_DECODER_CONFIG *cfg;
/**
* The number of surface in the surface array
*/
unsigned surface_count;
/**
* The array of Direct3D surfaces used to create the decoder
*/
ID3D11VideoDecoderOutputView **surface;
/**
* A bit field configuring the workarounds needed for using the decoder
*/
uint64_t workaround;
/**
* Private to the FFmpeg AVHWAccel implementation
*/
unsigned report_id;
/**
* Mutex to access video_context
*/
HANDLE context_mutex;
} AVD3D11VAContext;
/**
* Allocate an AVD3D11VAContext.
*
* @return Newly-allocated AVD3D11VAContext or NULL on failure.
*/
AVD3D11VAContext *av_d3d11va_alloc_context(void);
/**
* @}
*/
#endif /* AVCODEC_D3D11VA_H */

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,90 @@
/*
* DXVA2 HW acceleration
*
* copyright (c) 2009 Laurent Aimar
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_DXVA2_H
#define AVCODEC_DXVA2_H
/**
* @file
* @ingroup lavc_codec_hwaccel_dxva2
* Public libavcodec DXVA2 header.
*/
#if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0602
#undef _WIN32_WINNT
#define _WIN32_WINNT 0x0602
#endif
#include <stdint.h>
#include <d3d9.h>
#include <dxva2api.h>
/**
* @defgroup lavc_codec_hwaccel_dxva2 DXVA2
* @ingroup lavc_codec_hwaccel
*
* @{
*/
/**
* This structure is used to provides the necessary configurations and data
* to the DXVA2 FFmpeg HWAccel implementation.
*
* The application must make it available as AVCodecContext.hwaccel_context.
*/
struct dxva_context {
/**
* DXVA2 decoder object
*/
IDirectXVideoDecoder *decoder;
/**
* DXVA2 configuration used to create the decoder
*/
const DXVA2_ConfigPictureDecode *cfg;
/**
* The number of surface in the surface array
*/
unsigned surface_count;
/**
* The array of Direct3D surfaces used to create the decoder
*/
LPDIRECT3DSURFACE9 *surface;
/**
* A bit field configuring the workarounds needed for using the decoder
*/
uint64_t workaround;
/**
* Private to the FFmpeg AVHWAccel implementation
*/
unsigned report_id;
};
/**
* @}
*/
#endif /* AVCODEC_DXVA2_H */

View File

@@ -0,0 +1,195 @@
/*
* DXVA2 HW acceleration
*
* copyright (c) 2010 Laurent Aimar
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_DXVA2_INTERNAL_H
#define AVCODEC_DXVA2_INTERNAL_H
#define COBJMACROS
#include "config.h"
#include "config_components.h"
/* define the proper COM entries before forcing desktop APIs */
#include <objbase.h>
#define FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG 1 ///< Work around for DXVA2/Direct3D11 and old UVD/UVD+ ATI video cards
#define FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO 2 ///< Work around for DXVA2/Direct3D11 and old Intel GPUs with ClearVideo interface
#if CONFIG_DXVA2
#include "dxva2.h"
#include "libavutil/hwcontext_dxva2.h"
#define DXVA2_VAR(ctx, var) ctx->dxva2.var
#else
#define DXVA2_VAR(ctx, var) 0
#endif
#if CONFIG_D3D11VA
#include "d3d11va.h"
#include "libavutil/hwcontext_d3d11va.h"
#define D3D11VA_VAR(ctx, var) ctx->d3d11va.var
#else
#define D3D11VA_VAR(ctx, var) 0
#endif
#if CONFIG_D3D12VA
#include "d3d12va_decode.h"
#endif
#if HAVE_DXVA_H
/* When targeting WINAPI_FAMILY_PHONE_APP or WINAPI_FAMILY_APP, dxva.h
* defines nothing. Force the struct definitions to be visible. */
#undef WINAPI_FAMILY
#define WINAPI_FAMILY WINAPI_FAMILY_DESKTOP_APP
#undef _CRT_BUILD_DESKTOP_APP
#define _CRT_BUILD_DESKTOP_APP 0
#include <dxva.h>
#endif
#include "libavutil/hwcontext.h"
#include "libavutil/mem.h"
#include "avcodec.h"
#include "internal.h"
typedef void DECODER_BUFFER_DESC;
typedef union {
#if CONFIG_D3D11VA
struct AVD3D11VAContext d3d11va;
#endif
#if CONFIG_DXVA2
struct dxva_context dxva2;
#endif
#if CONFIG_D3D12VA
struct D3D12VADecodeContext d3d12va;
#endif
} AVDXVAContext;
typedef struct FFDXVASharedContext {
AVBufferRef *decoder_ref;
// FF_DXVA2_WORKAROUND_* flags
uint64_t workaround;
// E.g. AV_PIX_FMT_D3D11 (same as AVCodecContext.pix_fmt, except during init)
enum AVPixelFormat pix_fmt;
AVHWDeviceContext *device_ctx;
#if CONFIG_D3D11VA
ID3D11VideoDecoder *d3d11_decoder;
D3D11_VIDEO_DECODER_CONFIG d3d11_config;
ID3D11VideoDecoderOutputView **d3d11_views;
int nb_d3d11_views;
ID3D11Texture2D *d3d11_texture;
#endif
#if CONFIG_DXVA2
IDirectXVideoDecoder *dxva2_decoder;
IDirectXVideoDecoderService *dxva2_service;
DXVA2_ConfigPictureDecode dxva2_config;
#endif
// Legacy (but used by code outside of setup)
// In generic mode, DXVA_CONTEXT() will return a pointer to this.
AVDXVAContext ctx;
} FFDXVASharedContext;
#define DXVA_SHARED_CONTEXT(avctx) ((FFDXVASharedContext *)((avctx)->internal->hwaccel_priv_data))
#define DXVA_CONTEXT(avctx) (AVDXVAContext *)((avctx)->hwaccel_context ? (avctx)->hwaccel_context : (&(DXVA_SHARED_CONTEXT(avctx)->ctx)))
#define D3D11VA_CONTEXT(ctx) (&ctx->d3d11va)
#define DXVA2_CONTEXT(ctx) (&ctx->dxva2)
#define DXVA2_CONTEXT_VAR(avctx, ctx, var) (avctx->pix_fmt == AV_PIX_FMT_D3D12 ? 0 : (ff_dxva2_is_d3d11(avctx) ? D3D11VA_VAR(ctx, var) : DXVA2_VAR(ctx, var)))
#define DXVA_CONTEXT_REPORT_ID(avctx, ctx) (*ff_dxva2_get_report_id(avctx, ctx))
#define DXVA_CONTEXT_WORKAROUND(avctx, ctx) DXVA2_CONTEXT_VAR(avctx, ctx, workaround)
#define DXVA_CONTEXT_COUNT(avctx, ctx) DXVA2_CONTEXT_VAR(avctx, ctx, surface_count)
#define DXVA_CONTEXT_DECODER(avctx, ctx) (avctx->pix_fmt == AV_PIX_FMT_D3D12 ? 0 : (ff_dxva2_is_d3d11(avctx) ? (void *)D3D11VA_VAR(ctx, decoder) : (void *)DXVA2_VAR(ctx, decoder)))
#define DXVA_CONTEXT_CFG(avctx, ctx) (avctx->pix_fmt == AV_PIX_FMT_D3D12 ? 0 : (ff_dxva2_is_d3d11(avctx) ? (void *)D3D11VA_VAR(ctx, cfg) : (void *)DXVA2_VAR(ctx, cfg)))
#define DXVA_CONTEXT_CFG_BITSTREAM(avctx, ctx) DXVA2_CONTEXT_VAR(avctx, ctx, cfg->ConfigBitstreamRaw)
#define DXVA_CONTEXT_CFG_INTRARESID(avctx, ctx) DXVA2_CONTEXT_VAR(avctx, ctx, cfg->ConfigIntraResidUnsigned)
#define DXVA_CONTEXT_CFG_RESIDACCEL(avctx, ctx) DXVA2_CONTEXT_VAR(avctx, ctx, cfg->ConfigResidDiffAccelerator)
#define DXVA_CONTEXT_VALID(avctx, ctx) (DXVA_CONTEXT_DECODER(avctx, ctx) && \
DXVA_CONTEXT_CFG(avctx, ctx) && \
(ff_dxva2_is_d3d11(avctx) || DXVA2_VAR(ctx, surface_count)))
#if CONFIG_D3D12VA
unsigned ff_d3d12va_get_surface_index(const AVCodecContext *avctx,
D3D12VADecodeContext *ctx, const AVFrame *frame,
int curr);
#endif
unsigned ff_dxva2_get_surface_index(const AVCodecContext *avctx,
AVDXVAContext *, const AVFrame *frame, int curr);
int ff_dxva2_commit_buffer(AVCodecContext *, AVDXVAContext *,
DECODER_BUFFER_DESC *,
unsigned type, const void *data, unsigned size,
unsigned mb_count);
int ff_dxva2_common_end_frame(AVCodecContext *, AVFrame *,
const void *pp, unsigned pp_size,
const void *qm, unsigned qm_size,
int (*commit_bs_si)(AVCodecContext *,
DECODER_BUFFER_DESC *bs,
DECODER_BUFFER_DESC *slice));
int ff_dxva2_decode_init(AVCodecContext *avctx);
int ff_dxva2_decode_uninit(AVCodecContext *avctx);
int ff_dxva2_common_frame_params(AVCodecContext *avctx,
AVBufferRef *hw_frames_ctx);
int ff_dxva2_is_d3d11(const AVCodecContext *avctx);
unsigned *ff_dxva2_get_report_id(const AVCodecContext *avctx, AVDXVAContext *ctx);
void ff_dxva2_h264_fill_picture_parameters(const AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_PicParams_H264 *pp);
void ff_dxva2_h264_fill_scaling_lists(const AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_Qmatrix_H264 *qm);
void ff_dxva2_hevc_fill_picture_parameters(const AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_PicParams_HEVC *pp);
void ff_dxva2_hevc_fill_scaling_lists(const AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_Qmatrix_HEVC *qm);
int ff_dxva2_vp9_fill_picture_parameters(const AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_PicParams_VP9 *pp);
#if CONFIG_AV1_D3D12VA_HWACCEL || CONFIG_AV1_D3D11VA_HWACCEL || CONFIG_AV1_D3D11VA2_HWACCEL || CONFIG_AV1_DXVA2_HWACCEL
int ff_dxva2_av1_fill_picture_parameters(const AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_PicParams_AV1 *pp);
#endif
void ff_dxva2_mpeg2_fill_picture_parameters(AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_PictureParameters *pp);
void ff_dxva2_mpeg2_fill_quantization_matrices(AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_QmatrixData *qm);
void ff_dxva2_mpeg2_fill_slice(AVCodecContext *avctx, DXVA_SliceInfo *slice, unsigned position, const uint8_t *buffer, unsigned size);
void ff_dxva2_vc1_fill_picture_parameters(AVCodecContext *avctx, AVDXVAContext *ctx, DXVA_PictureParameters *pp);
void ff_dxva2_vc1_fill_slice(AVCodecContext *avctx, DXVA_SliceInfo *slice, unsigned position, unsigned size);
#endif /* AVCODEC_DXVA2_INTERNAL_H */

View File

@@ -0,0 +1,360 @@
/*
* DXVA2 VP9 HW acceleration.
*
* copyright (c) 2015 Hendrik Leppkes
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config_components.h"
#include "libavutil/avassert.h"
#include "libavutil/pixdesc.h"
#include "dxva2_internal.h"
#include "hwaccel_internal.h"
#include "vp9shared.h"
struct vp9_dxva2_picture_context {
DXVA_PicParams_VP9 pp;
DXVA_Slice_VPx_Short slice;
const uint8_t *bitstream;
unsigned bitstream_size;
};
static void fill_picture_entry(DXVA_PicEntry_VPx *pic,
unsigned index, unsigned flag)
{
av_assert0((index & 0x7f) == index && (flag & 0x01) == flag);
pic->bPicEntry = index | (flag << 7);
}
int ff_dxva2_vp9_fill_picture_parameters(const AVCodecContext *avctx, AVDXVAContext *ctx,
DXVA_PicParams_VP9 *pp)
{
const VP9SharedContext *h = avctx->priv_data;
const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
int i;
if (!pixdesc)
return -1;
memset(pp, 0, sizeof(*pp));
pp->profile = h->h.profile;
pp->wFormatAndPictureInfoFlags = ((h->h.keyframe == 0) << 0) |
((h->h.invisible == 0) << 1) |
(h->h.errorres << 2) |
(pixdesc->log2_chroma_w << 3) | /* subsampling_x */
(pixdesc->log2_chroma_h << 4) | /* subsampling_y */
(0 << 5) | /* extra_plane */
(h->h.refreshctx << 6) |
(h->h.parallelmode << 7) |
(h->h.intraonly << 8) |
(h->h.framectxid << 9) |
(h->h.resetctx << 11) |
((h->h.keyframe ? 0 : h->h.highprecisionmvs) << 13) |
(0 << 14); /* ReservedFormatInfo2Bits */
pp->width = avctx->width;
pp->height = avctx->height;
pp->BitDepthMinus8Luma = pixdesc->comp[0].depth - 8;
pp->BitDepthMinus8Chroma = pixdesc->comp[1].depth - 8;
/* swap 0/1 to match the reference */
pp->interp_filter = h->h.filtermode ^ (h->h.filtermode <= 1);
pp->Reserved8Bits = 0;
for (i = 0; i < 8; i++) {
if (h->refs[i].f->buf[0]) {
fill_picture_entry(&pp->ref_frame_map[i], ff_dxva2_get_surface_index(avctx, ctx, h->refs[i].f, 0), 0);
pp->ref_frame_coded_width[i] = h->refs[i].f->width;
pp->ref_frame_coded_height[i] = h->refs[i].f->height;
} else
pp->ref_frame_map[i].bPicEntry = 0xFF;
}
for (i = 0; i < 3; i++) {
uint8_t refidx = h->h.refidx[i];
if (h->refs[refidx].f->buf[0])
fill_picture_entry(&pp->frame_refs[i], ff_dxva2_get_surface_index(avctx, ctx, h->refs[refidx].f, 0), 0);
else
pp->frame_refs[i].bPicEntry = 0xFF;
pp->ref_frame_sign_bias[i + 1] = h->h.signbias[i];
}
fill_picture_entry(&pp->CurrPic, ff_dxva2_get_surface_index(avctx, ctx, h->frames[CUR_FRAME].tf.f, 1), 0);
pp->filter_level = h->h.filter.level;
pp->sharpness_level = h->h.filter.sharpness;
pp->wControlInfoFlags = (h->h.lf_delta.enabled << 0) |
(h->h.lf_delta.updated << 1) |
(h->h.use_last_frame_mvs << 2) |
(0 << 3); /* ReservedControlInfo5Bits */
for (i = 0; i < 4; i++)
pp->ref_deltas[i] = h->h.lf_delta.ref[i];
for (i = 0; i < 2; i++)
pp->mode_deltas[i] = h->h.lf_delta.mode[i];
pp->base_qindex = h->h.yac_qi;
pp->y_dc_delta_q = h->h.ydc_qdelta;
pp->uv_dc_delta_q = h->h.uvdc_qdelta;
pp->uv_ac_delta_q = h->h.uvac_qdelta;
/* segmentation data */
pp->stVP9Segments.wSegmentInfoFlags = (h->h.segmentation.enabled << 0) |
(h->h.segmentation.update_map << 1) |
(h->h.segmentation.temporal << 2) |
(h->h.segmentation.absolute_vals << 3) |
(0 << 4); /* ReservedSegmentFlags4Bits */
for (i = 0; i < 7; i++)
pp->stVP9Segments.tree_probs[i] = h->h.segmentation.prob[i];
if (h->h.segmentation.temporal)
for (i = 0; i < 3; i++)
pp->stVP9Segments.pred_probs[i] = h->h.segmentation.pred_prob[i];
else
memset(pp->stVP9Segments.pred_probs, 255, sizeof(pp->stVP9Segments.pred_probs));
for (i = 0; i < 8; i++) {
pp->stVP9Segments.feature_mask[i] = (h->h.segmentation.feat[i].q_enabled << 0) |
(h->h.segmentation.feat[i].lf_enabled << 1) |
(h->h.segmentation.feat[i].ref_enabled << 2) |
(h->h.segmentation.feat[i].skip_enabled << 3);
pp->stVP9Segments.feature_data[i][0] = h->h.segmentation.feat[i].q_val;
pp->stVP9Segments.feature_data[i][1] = h->h.segmentation.feat[i].lf_val;
pp->stVP9Segments.feature_data[i][2] = h->h.segmentation.feat[i].ref_val;
pp->stVP9Segments.feature_data[i][3] = 0; /* no data for skip */
}
pp->log2_tile_cols = h->h.tiling.log2_tile_cols;
pp->log2_tile_rows = h->h.tiling.log2_tile_rows;
pp->uncompressed_header_size_byte_aligned = h->h.uncompressed_header_size;
pp->first_partition_size = h->h.compressed_header_size;
pp->StatusReportFeedbackNumber = 1 + DXVA_CONTEXT_REPORT_ID(avctx, ctx)++;
return 0;
}
static void fill_slice_short(DXVA_Slice_VPx_Short *slice,
unsigned position, unsigned size)
{
memset(slice, 0, sizeof(*slice));
slice->BSNALunitDataLocation = position;
slice->SliceBytesInBuffer = size;
slice->wBadSliceChopping = 0;
}
static int commit_bitstream_and_slice_buffer(AVCodecContext *avctx,
DECODER_BUFFER_DESC *bs,
DECODER_BUFFER_DESC *sc)
{
const VP9SharedContext *h = avctx->priv_data;
AVDXVAContext *ctx = DXVA_CONTEXT(avctx);
struct vp9_dxva2_picture_context *ctx_pic = h->frames[CUR_FRAME].hwaccel_picture_private;
void *dxva_data_ptr;
uint8_t *dxva_data;
unsigned dxva_size;
unsigned padding;
unsigned type;
#if CONFIG_D3D11VA
if (ff_dxva2_is_d3d11(avctx)) {
type = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
if (FAILED(ID3D11VideoContext_GetDecoderBuffer(D3D11VA_CONTEXT(ctx)->video_context,
D3D11VA_CONTEXT(ctx)->decoder,
type,
&dxva_size, &dxva_data_ptr)))
return -1;
}
#endif
#if CONFIG_DXVA2
if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD) {
type = DXVA2_BitStreamDateBufferType;
if (FAILED(IDirectXVideoDecoder_GetBuffer(DXVA2_CONTEXT(ctx)->decoder,
type,
&dxva_data_ptr, &dxva_size)))
return -1;
}
#endif
dxva_data = dxva_data_ptr;
if (ctx_pic->slice.SliceBytesInBuffer > dxva_size) {
av_log(avctx, AV_LOG_ERROR, "Failed to build bitstream");
return -1;
}
memcpy(dxva_data, ctx_pic->bitstream, ctx_pic->slice.SliceBytesInBuffer);
padding = FFMIN(128 - ((ctx_pic->slice.SliceBytesInBuffer) & 127), dxva_size - ctx_pic->slice.SliceBytesInBuffer);
if (padding > 0) {
memset(dxva_data + ctx_pic->slice.SliceBytesInBuffer, 0, padding);
ctx_pic->slice.SliceBytesInBuffer += padding;
}
#if CONFIG_D3D11VA
if (ff_dxva2_is_d3d11(avctx))
if (FAILED(ID3D11VideoContext_ReleaseDecoderBuffer(D3D11VA_CONTEXT(ctx)->video_context, D3D11VA_CONTEXT(ctx)->decoder, type)))
return -1;
#endif
#if CONFIG_DXVA2
if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD)
if (FAILED(IDirectXVideoDecoder_ReleaseBuffer(DXVA2_CONTEXT(ctx)->decoder, type)))
return -1;
#endif
#if CONFIG_D3D11VA
if (ff_dxva2_is_d3d11(avctx)) {
D3D11_VIDEO_DECODER_BUFFER_DESC *dsc11 = bs;
memset(dsc11, 0, sizeof(*dsc11));
dsc11->BufferType = type;
dsc11->DataSize = ctx_pic->slice.SliceBytesInBuffer;
dsc11->NumMBsInBuffer = 0;
type = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
}
#endif
#if CONFIG_DXVA2
if (avctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD) {
DXVA2_DecodeBufferDesc *dsc2 = bs;
memset(dsc2, 0, sizeof(*dsc2));
dsc2->CompressedBufferType = type;
dsc2->DataSize = ctx_pic->slice.SliceBytesInBuffer;
dsc2->NumMBsInBuffer = 0;
type = DXVA2_SliceControlBufferType;
}
#endif
return ff_dxva2_commit_buffer(avctx, ctx, sc,
type,
&ctx_pic->slice, sizeof(ctx_pic->slice), 0);
}
static int dxva2_vp9_start_frame(AVCodecContext *avctx,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
AVDXVAContext *ctx = DXVA_CONTEXT(avctx);
struct vp9_dxva2_picture_context *ctx_pic = h->frames[CUR_FRAME].hwaccel_picture_private;
if (!DXVA_CONTEXT_VALID(avctx, ctx))
return -1;
av_assert0(ctx_pic);
/* Fill up DXVA_PicParams_VP9 */
if (ff_dxva2_vp9_fill_picture_parameters(avctx, ctx, &ctx_pic->pp) < 0)
return -1;
ctx_pic->bitstream_size = 0;
ctx_pic->bitstream = NULL;
return 0;
}
static int dxva2_vp9_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer,
uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
struct vp9_dxva2_picture_context *ctx_pic = h->frames[CUR_FRAME].hwaccel_picture_private;
unsigned position;
if (!ctx_pic->bitstream)
ctx_pic->bitstream = buffer;
ctx_pic->bitstream_size += size;
position = buffer - ctx_pic->bitstream;
fill_slice_short(&ctx_pic->slice, position, size);
return 0;
}
static int dxva2_vp9_end_frame(AVCodecContext *avctx)
{
VP9SharedContext *h = avctx->priv_data;
struct vp9_dxva2_picture_context *ctx_pic = h->frames[CUR_FRAME].hwaccel_picture_private;
int ret;
if (ctx_pic->bitstream_size <= 0)
return -1;
ret = ff_dxva2_common_end_frame(avctx, h->frames[CUR_FRAME].tf.f,
&ctx_pic->pp, sizeof(ctx_pic->pp),
NULL, 0,
commit_bitstream_and_slice_buffer);
return ret;
}
#if CONFIG_VP9_DXVA2_HWACCEL
const FFHWAccel ff_vp9_dxva2_hwaccel = {
.p.name = "vp9_dxva2",
.p.type = AVMEDIA_TYPE_VIDEO,
.p.id = AV_CODEC_ID_VP9,
.p.pix_fmt = AV_PIX_FMT_DXVA2_VLD,
.init = ff_dxva2_decode_init,
.uninit = ff_dxva2_decode_uninit,
.start_frame = dxva2_vp9_start_frame,
.decode_slice = dxva2_vp9_decode_slice,
.end_frame = dxva2_vp9_end_frame,
.frame_params = ff_dxva2_common_frame_params,
.frame_priv_data_size = sizeof(struct vp9_dxva2_picture_context),
.priv_data_size = sizeof(FFDXVASharedContext),
};
#endif
#if CONFIG_VP9_D3D11VA_HWACCEL
const FFHWAccel ff_vp9_d3d11va_hwaccel = {
.p.name = "vp9_d3d11va",
.p.type = AVMEDIA_TYPE_VIDEO,
.p.id = AV_CODEC_ID_VP9,
.p.pix_fmt = AV_PIX_FMT_D3D11VA_VLD,
.init = ff_dxva2_decode_init,
.uninit = ff_dxva2_decode_uninit,
.start_frame = dxva2_vp9_start_frame,
.decode_slice = dxva2_vp9_decode_slice,
.end_frame = dxva2_vp9_end_frame,
.frame_params = ff_dxva2_common_frame_params,
.frame_priv_data_size = sizeof(struct vp9_dxva2_picture_context),
.priv_data_size = sizeof(FFDXVASharedContext),
};
#endif
#if CONFIG_VP9_D3D11VA2_HWACCEL
const FFHWAccel ff_vp9_d3d11va2_hwaccel = {
.p.name = "vp9_d3d11va2",
.p.type = AVMEDIA_TYPE_VIDEO,
.p.id = AV_CODEC_ID_VP9,
.p.pix_fmt = AV_PIX_FMT_D3D11,
.init = ff_dxva2_decode_init,
.uninit = ff_dxva2_decode_uninit,
.start_frame = dxva2_vp9_start_frame,
.decode_slice = dxva2_vp9_decode_slice,
.end_frame = dxva2_vp9_end_frame,
.frame_params = ff_dxva2_common_frame_params,
.frame_priv_data_size = sizeof(struct vp9_dxva2_picture_context),
.priv_data_size = sizeof(FFDXVASharedContext),
};
#endif

View File

@@ -134,6 +134,12 @@ if not CONFIG['MOZ_FFVPX_AUDIOONLY']:
USE_LIBS += [
'mozva'
]
elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows":
SOURCES += [
'd3d11va.c',
'dxva2.c',
'dxva2_vp9.c',
]
LOCAL_INCLUDES += [
'/media/libopus/include',

View File

@@ -0,0 +1,727 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config.h"
#include <windows.h>
#define COBJMACROS
#include <initguid.h>
#include <d3d11.h>
#include <dxgi1_2.h>
#if HAVE_DXGIDEBUG_H
#include <dxgidebug.h>
#endif
#include "avassert.h"
#include "common.h"
#include "hwcontext.h"
#include "hwcontext_d3d11va.h"
#include "hwcontext_internal.h"
#include "imgutils.h"
#include "pixdesc.h"
#include "pixfmt.h"
#include "thread.h"
#include "compat/w32dlfcn.h"
typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
static AVOnce functions_loaded = AV_ONCE_INIT;
static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice;
static av_cold void load_functions(void)
{
#if !HAVE_UWP
// We let these "leak" - this is fine, as unloading has no great benefit, and
// Windows will mark a DLL as loaded forever if its internal refcount overflows
// from too many LoadLibrary calls.
HANDLE d3dlib, dxgilib;
d3dlib = dlopen("d3d11.dll", 0);
dxgilib = dlopen("dxgi.dll", 0);
if (!d3dlib || !dxgilib)
return;
mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, "D3D11CreateDevice");
mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory1");
if (!mCreateDXGIFactory)
mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory");
#else
// In UWP (which lacks LoadLibrary), CreateDXGIFactory isn't available,
// only CreateDXGIFactory1
mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) D3D11CreateDevice;
mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) CreateDXGIFactory1;
#endif
}
typedef struct D3D11VAFramesContext {
/**
* The public AVD3D11VAFramesContext. See hwcontext_d3d11va.h for it.
*/
AVD3D11VAFramesContext p;
int nb_surfaces;
int nb_surfaces_used;
DXGI_FORMAT format;
ID3D11Texture2D *staging_texture;
} D3D11VAFramesContext;
static const struct {
DXGI_FORMAT d3d_format;
enum AVPixelFormat pix_fmt;
} supported_formats[] = {
{ DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
{ DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
{ DXGI_FORMAT_B8G8R8A8_UNORM, AV_PIX_FMT_BGRA },
{ DXGI_FORMAT_R10G10B10A2_UNORM, AV_PIX_FMT_X2BGR10 },
{ DXGI_FORMAT_R16G16B16A16_FLOAT, AV_PIX_FMT_RGBAF16 },
{ DXGI_FORMAT_AYUV, AV_PIX_FMT_VUYX },
{ DXGI_FORMAT_YUY2, AV_PIX_FMT_YUYV422 },
{ DXGI_FORMAT_Y210, AV_PIX_FMT_Y210 },
{ DXGI_FORMAT_Y410, AV_PIX_FMT_XV30 },
{ DXGI_FORMAT_P016, AV_PIX_FMT_P012 },
{ DXGI_FORMAT_Y216, AV_PIX_FMT_Y212 },
{ DXGI_FORMAT_Y416, AV_PIX_FMT_XV36 },
// Special opaque formats. The pix_fmt is merely a place holder, as the
// opaque format cannot be accessed directly.
{ DXGI_FORMAT_420_OPAQUE, AV_PIX_FMT_YUV420P },
};
static void d3d11va_default_lock(void *ctx)
{
WaitForSingleObjectEx(ctx, INFINITE, FALSE);
}
static void d3d11va_default_unlock(void *ctx)
{
ReleaseMutex(ctx);
}
static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
{
D3D11VAFramesContext *s = ctx->hwctx;
AVD3D11VAFramesContext *frames_hwctx = &s->p;
if (frames_hwctx->texture)
ID3D11Texture2D_Release(frames_hwctx->texture);
frames_hwctx->texture = NULL;
if (s->staging_texture)
ID3D11Texture2D_Release(s->staging_texture);
s->staging_texture = NULL;
av_freep(&frames_hwctx->texture_infos);
}
static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx,
const void *hwconfig,
AVHWFramesConstraints *constraints)
{
AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
int nb_sw_formats = 0;
HRESULT hr;
int i;
constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_formats) + 1,
sizeof(*constraints->valid_sw_formats));
if (!constraints->valid_sw_formats)
return AVERROR(ENOMEM);
for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
UINT format_support = 0;
hr = ID3D11Device_CheckFormatSupport(device_hwctx->device, supported_formats[i].d3d_format, &format_support);
if (SUCCEEDED(hr) && (format_support & D3D11_FORMAT_SUPPORT_TEXTURE2D))
constraints->valid_sw_formats[nb_sw_formats++] = supported_formats[i].pix_fmt;
}
constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
if (!constraints->valid_hw_formats)
return AVERROR(ENOMEM);
constraints->valid_hw_formats[0] = AV_PIX_FMT_D3D11;
constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
return 0;
}
static void free_texture(void *opaque, uint8_t *data)
{
ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
av_free(data);
}
static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx, ID3D11Texture2D *tex, int index)
{
AVBufferRef *buf;
AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
D3D11VAFramesContext *s = ctx->hwctx;
AVD3D11VAFramesContext *frames_hwctx = &s->p;
if (!desc) {
ID3D11Texture2D_Release(tex);
return NULL;
}
if (s->nb_surfaces <= s->nb_surfaces_used) {
frames_hwctx->texture_infos = av_realloc_f(frames_hwctx->texture_infos,
s->nb_surfaces_used + 1,
sizeof(*frames_hwctx->texture_infos));
if (!frames_hwctx->texture_infos) {
ID3D11Texture2D_Release(tex);
av_free(desc);
return NULL;
}
s->nb_surfaces = s->nb_surfaces_used + 1;
}
frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
s->nb_surfaces_used++;
desc->texture = tex;
desc->index = index;
buf = av_buffer_create((uint8_t *)desc, sizeof(*desc), free_texture, tex, 0);
if (!buf) {
ID3D11Texture2D_Release(tex);
av_free(desc);
return NULL;
}
return buf;
}
static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
{
D3D11VAFramesContext *s = ctx->hwctx;
AVD3D11VAFramesContext *hwctx = &s->p;
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
HRESULT hr;
ID3D11Texture2D *tex;
D3D11_TEXTURE2D_DESC texDesc = {
.Width = ctx->width,
.Height = ctx->height,
.MipLevels = 1,
.Format = s->format,
.SampleDesc = { .Count = 1 },
.ArraySize = 1,
.Usage = D3D11_USAGE_DEFAULT,
.BindFlags = hwctx->BindFlags,
.MiscFlags = hwctx->MiscFlags,
};
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &tex);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
return NULL;
}
return wrap_texture_buf(ctx, tex, 0);
}
static AVBufferRef *d3d11va_pool_alloc(void *opaque, size_t size)
{
AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
D3D11VAFramesContext *s = ctx->hwctx;
AVD3D11VAFramesContext *hwctx = &s->p;
D3D11_TEXTURE2D_DESC texDesc;
if (!hwctx->texture) {
return d3d11va_alloc_single(ctx);
}
ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc);
if (s->nb_surfaces_used >= texDesc.ArraySize) {
av_log(ctx, AV_LOG_ERROR, "Static surface pool size exceeded.\n");
return NULL;
}
ID3D11Texture2D_AddRef(hwctx->texture);
return wrap_texture_buf(ctx, hwctx->texture, s->nb_surfaces_used);
}
static int d3d11va_frames_init(AVHWFramesContext *ctx)
{
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
D3D11VAFramesContext *s = ctx->hwctx;
AVD3D11VAFramesContext *hwctx = &s->p;
int i;
HRESULT hr;
D3D11_TEXTURE2D_DESC texDesc;
for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
if (ctx->sw_format == supported_formats[i].pix_fmt) {
s->format = supported_formats[i].d3d_format;
break;
}
}
if (i == FF_ARRAY_ELEMS(supported_formats)) {
av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
av_get_pix_fmt_name(ctx->sw_format));
return AVERROR(EINVAL);
}
texDesc = (D3D11_TEXTURE2D_DESC){
.Width = ctx->width,
.Height = ctx->height,
.MipLevels = 1,
.Format = s->format,
.SampleDesc = { .Count = 1 },
.ArraySize = ctx->initial_pool_size,
.Usage = D3D11_USAGE_DEFAULT,
.BindFlags = hwctx->BindFlags,
.MiscFlags = hwctx->MiscFlags,
};
if (hwctx->texture) {
D3D11_TEXTURE2D_DESC texDesc2;
ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc2);
if (texDesc.Width != texDesc2.Width ||
texDesc.Height != texDesc2.Height ||
texDesc.Format != texDesc2.Format) {
av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
return AVERROR(EINVAL);
}
ctx->initial_pool_size = texDesc2.ArraySize;
hwctx->BindFlags = texDesc2.BindFlags;
hwctx->MiscFlags = texDesc2.MiscFlags;
} else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) && texDesc.ArraySize > 0) {
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
return AVERROR_UNKNOWN;
}
}
hwctx->texture_infos = av_realloc_f(NULL, ctx->initial_pool_size, sizeof(*hwctx->texture_infos));
if (!hwctx->texture_infos)
return AVERROR(ENOMEM);
s->nb_surfaces = ctx->initial_pool_size;
ffhwframesctx(ctx)->pool_internal =
av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
ctx, d3d11va_pool_alloc, NULL);
if (!ffhwframesctx(ctx)->pool_internal)
return AVERROR(ENOMEM);
return 0;
}
static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
{
AVD3D11FrameDescriptor *desc;
frame->buf[0] = av_buffer_pool_get(ctx->pool);
if (!frame->buf[0])
return AVERROR(ENOMEM);
desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data;
frame->data[0] = (uint8_t *)desc->texture;
frame->data[1] = (uint8_t *)desc->index;
frame->format = AV_PIX_FMT_D3D11;
frame->width = ctx->width;
frame->height = ctx->height;
return 0;
}
static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
enum AVHWFrameTransferDirection dir,
enum AVPixelFormat **formats)
{
D3D11VAFramesContext *s = ctx->hwctx;
enum AVPixelFormat *fmts;
fmts = av_malloc_array(2, sizeof(*fmts));
if (!fmts)
return AVERROR(ENOMEM);
fmts[0] = ctx->sw_format;
fmts[1] = AV_PIX_FMT_NONE;
// Don't signal support for opaque formats. Actual access would fail.
if (s->format == DXGI_FORMAT_420_OPAQUE)
fmts[0] = AV_PIX_FMT_NONE;
*formats = fmts;
return 0;
}
static int d3d11va_create_staging_texture(AVHWFramesContext *ctx, DXGI_FORMAT format)
{
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
D3D11VAFramesContext *s = ctx->hwctx;
HRESULT hr;
D3D11_TEXTURE2D_DESC texDesc = {
.Width = ctx->width,
.Height = ctx->height,
.MipLevels = 1,
.Format = format,
.SampleDesc = { .Count = 1 },
.ArraySize = 1,
.Usage = D3D11_USAGE_STAGING,
.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
};
hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &s->staging_texture);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture (%lx)\n", (long)hr);
return AVERROR_UNKNOWN;
}
return 0;
}
static void fill_texture_ptrs(uint8_t *data[4], int linesize[4],
AVHWFramesContext *ctx,
D3D11_TEXTURE2D_DESC *desc,
D3D11_MAPPED_SUBRESOURCE *map)
{
int i;
for (i = 0; i < 4; i++)
linesize[i] = map->RowPitch;
av_image_fill_pointers(data, ctx->sw_format, desc->Height,
(uint8_t*)map->pData, linesize);
}
static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
const AVFrame *src)
{
AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
D3D11VAFramesContext *s = ctx->hwctx;
int download = src->format == AV_PIX_FMT_D3D11;
const AVFrame *frame = download ? src : dst;
const AVFrame *other = download ? dst : src;
// (The interface types are compatible.)
ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)frame->data[0];
int index = (intptr_t)frame->data[1];
ID3D11Resource *staging;
int w = FFMIN(dst->width, src->width);
int h = FFMIN(dst->height, src->height);
uint8_t *map_data[4];
int map_linesize[4];
D3D11_TEXTURE2D_DESC desc;
D3D11_MAPPED_SUBRESOURCE map;
HRESULT hr;
int res;
if (frame->hw_frames_ctx->data != (uint8_t *)ctx || other->format != ctx->sw_format)
return AVERROR(EINVAL);
device_hwctx->lock(device_hwctx->lock_ctx);
if (!s->staging_texture) {
ID3D11Texture2D_GetDesc((ID3D11Texture2D *)texture, &desc);
res = d3d11va_create_staging_texture(ctx, desc.Format);
if (res < 0)
return res;
}
staging = (ID3D11Resource *)s->staging_texture;
ID3D11Texture2D_GetDesc(s->staging_texture, &desc);
if (download) {
ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
staging, 0, 0, 0, 0,
texture, index, NULL);
hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
staging, 0, D3D11_MAP_READ, 0, &map);
if (FAILED(hr))
goto map_failed;
fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
av_image_copy2(dst->data, dst->linesize, map_data, map_linesize,
ctx->sw_format, w, h);
ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
} else {
hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
staging, 0, D3D11_MAP_WRITE, 0, &map);
if (FAILED(hr))
goto map_failed;
fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
av_image_copy2(map_data, map_linesize, src->data, src->linesize,
ctx->sw_format, w, h);
ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
texture, index, 0, 0, 0,
staging, 0, NULL);
}
device_hwctx->unlock(device_hwctx->lock_ctx);
return 0;
map_failed:
av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface (%lx)\n", (long)hr);
device_hwctx->unlock(device_hwctx->lock_ctx);
return AVERROR_UNKNOWN;
}
static int d3d11va_device_init(AVHWDeviceContext *hwdev)
{
AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
HRESULT hr;
if (!device_hwctx->lock) {
device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
return AVERROR(EINVAL);
}
device_hwctx->lock = d3d11va_default_lock;
device_hwctx->unlock = d3d11va_default_unlock;
}
if (!device_hwctx->device_context) {
ID3D11Device_GetImmediateContext(device_hwctx->device, &device_hwctx->device_context);
if (!device_hwctx->device_context)
return AVERROR_UNKNOWN;
}
if (!device_hwctx->video_device) {
hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device, &IID_ID3D11VideoDevice,
(void **)&device_hwctx->video_device);
if (FAILED(hr))
return AVERROR_UNKNOWN;
}
if (!device_hwctx->video_context) {
hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device_context, &IID_ID3D11VideoContext,
(void **)&device_hwctx->video_context);
if (FAILED(hr))
return AVERROR_UNKNOWN;
}
return 0;
}
static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
{
AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
if (device_hwctx->device) {
ID3D11Device_Release(device_hwctx->device);
device_hwctx->device = NULL;
}
if (device_hwctx->device_context) {
ID3D11DeviceContext_Release(device_hwctx->device_context);
device_hwctx->device_context = NULL;
}
if (device_hwctx->video_device) {
ID3D11VideoDevice_Release(device_hwctx->video_device);
device_hwctx->video_device = NULL;
}
if (device_hwctx->video_context) {
ID3D11VideoContext_Release(device_hwctx->video_context);
device_hwctx->video_context = NULL;
}
if (device_hwctx->lock == d3d11va_default_lock) {
CloseHandle(device_hwctx->lock_ctx);
device_hwctx->lock_ctx = INVALID_HANDLE_VALUE;
device_hwctx->lock = NULL;
}
}
static int d3d11va_device_find_adapter_by_vendor_id(AVHWDeviceContext *ctx, uint32_t flags, const char *vendor_id)
{
HRESULT hr;
IDXGIAdapter *adapter = NULL;
IDXGIFactory2 *factory;
int adapter_id = 0;
long int id = strtol(vendor_id, NULL, 0);
hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&factory);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "CreateDXGIFactory returned error\n");
return -1;
}
while (IDXGIFactory2_EnumAdapters(factory, adapter_id++, &adapter) != DXGI_ERROR_NOT_FOUND) {
ID3D11Device* device = NULL;
DXGI_ADAPTER_DESC adapter_desc;
hr = mD3D11CreateDevice(adapter, D3D_DRIVER_TYPE_UNKNOWN, NULL, flags, NULL, 0, D3D11_SDK_VERSION, &device, NULL, NULL);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_DEBUG, "D3D11CreateDevice returned error, try next adapter\n");
IDXGIAdapter_Release(adapter);
continue;
}
hr = IDXGIAdapter2_GetDesc(adapter, &adapter_desc);
ID3D11Device_Release(device);
IDXGIAdapter_Release(adapter);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_DEBUG, "IDXGIAdapter2_GetDesc returned error, try next adapter\n");
continue;
} else if (adapter_desc.VendorId == id) {
IDXGIFactory2_Release(factory);
return adapter_id - 1;
}
}
IDXGIFactory2_Release(factory);
return -1;
}
static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
AVDictionary *opts, int flags)
{
AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
HRESULT hr;
IDXGIAdapter *pAdapter = NULL;
ID3D10Multithread *pMultithread;
UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
int ret;
int adapter = -1;
if (is_debug) {
creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
av_log(ctx, AV_LOG_INFO, "Enabling d3d11 debugging.\n");
}
if ((ret = ff_thread_once(&functions_loaded, load_functions)) != 0)
return AVERROR_UNKNOWN;
if (!mD3D11CreateDevice || !mCreateDXGIFactory) {
av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library or its functions\n");
return AVERROR_UNKNOWN;
}
if (device) {
adapter = atoi(device);
} else {
AVDictionaryEntry *e = av_dict_get(opts, "vendor_id", NULL, 0);
if (e && e->value) {
adapter = d3d11va_device_find_adapter_by_vendor_id(ctx, creationFlags, e->value);
if (adapter < 0) {
av_log(ctx, AV_LOG_ERROR, "Failed to find d3d11va adapter by "
"vendor id %s\n", e->value);
return AVERROR_UNKNOWN;
}
}
}
if (adapter >= 0) {
IDXGIFactory2 *pDXGIFactory;
av_log(ctx, AV_LOG_VERBOSE, "Selecting d3d11va adapter %d\n", adapter);
hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
if (SUCCEEDED(hr)) {
if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
pAdapter = NULL;
IDXGIFactory2_Release(pDXGIFactory);
}
}
if (pAdapter) {
DXGI_ADAPTER_DESC desc;
hr = IDXGIAdapter2_GetDesc(pAdapter, &desc);
if (!FAILED(hr)) {
av_log(ctx, AV_LOG_INFO, "Using device %04x:%04x (%ls).\n",
desc.VendorId, desc.DeviceId, desc.Description);
}
}
hr = mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
D3D11_SDK_VERSION, &device_hwctx->device, NULL, NULL);
if (pAdapter)
IDXGIAdapter_Release(pAdapter);
if (FAILED(hr)) {
av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device (%lx)\n", (long)hr);
return AVERROR_UNKNOWN;
}
hr = ID3D11Device_QueryInterface(device_hwctx->device, &IID_ID3D10Multithread, (void **)&pMultithread);
if (SUCCEEDED(hr)) {
ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
ID3D10Multithread_Release(pMultithread);
}
#if !HAVE_UWP && HAVE_DXGIDEBUG_H
if (is_debug) {
HANDLE dxgidebug_dll = LoadLibrary("dxgidebug.dll");
if (dxgidebug_dll) {
HRESULT (WINAPI * pf_DXGIGetDebugInterface)(const GUID *riid, void **ppDebug)
= (void *)GetProcAddress(dxgidebug_dll, "DXGIGetDebugInterface");
if (pf_DXGIGetDebugInterface) {
IDXGIDebug *dxgi_debug = NULL;
hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (void**)&dxgi_debug);
if (SUCCEEDED(hr) && dxgi_debug) {
IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
av_log(ctx, AV_LOG_INFO, "Enabled dxgi debugging.\n");
} else {
av_log(ctx, AV_LOG_WARNING, "Failed enabling dxgi debugging.\n");
}
} else {
av_log(ctx, AV_LOG_WARNING, "Failed getting dxgi debug interface.\n");
}
} else {
av_log(ctx, AV_LOG_WARNING, "Failed loading dxgi debug library.\n");
}
}
#endif
return 0;
}
const HWContextType ff_hwcontext_type_d3d11va = {
.type = AV_HWDEVICE_TYPE_D3D11VA,
.name = "D3D11VA",
.device_hwctx_size = sizeof(AVD3D11VADeviceContext),
.frames_hwctx_size = sizeof(D3D11VAFramesContext),
.device_create = d3d11va_device_create,
.device_init = d3d11va_device_init,
.device_uninit = d3d11va_device_uninit,
.frames_get_constraints = d3d11va_frames_get_constraints,
.frames_init = d3d11va_frames_init,
.frames_uninit = d3d11va_frames_uninit,
.frames_get_buffer = d3d11va_get_buffer,
.transfer_get_formats = d3d11va_transfer_get_formats,
.transfer_data_to = d3d11va_transfer_data,
.transfer_data_from = d3d11va_transfer_data,
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11, AV_PIX_FMT_NONE },
};

View File

@@ -0,0 +1,178 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_HWCONTEXT_D3D11VA_H
#define AVUTIL_HWCONTEXT_D3D11VA_H
/**
* @file
* An API-specific header for AV_HWDEVICE_TYPE_D3D11VA.
*
* The default pool implementation will be fixed-size if initial_pool_size is
* set (and allocate elements from an array texture). Otherwise it will allocate
* individual textures. Be aware that decoding requires a single array texture.
*
* Using sw_format==AV_PIX_FMT_YUV420P has special semantics, and maps to
* DXGI_FORMAT_420_OPAQUE. av_hwframe_transfer_data() is not supported for
* this format. Refer to MSDN for details.
*
* av_hwdevice_ctx_create() for this device type supports a key named "debug"
* for the AVDictionary entry. If this is set to any value, the device creation
* code will try to load various supported D3D debugging layers.
*/
#include <d3d11.h>
#include <stdint.h>
/**
* This struct is allocated as AVHWDeviceContext.hwctx
*/
typedef struct AVD3D11VADeviceContext {
/**
* Device used for texture creation and access. This can also be used to
* set the libavcodec decoding device.
*
* Must be set by the user. This is the only mandatory field - the other
* device context fields are set from this and are available for convenience.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11Device *device;
/**
* If unset, this will be set from the device field on init.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11DeviceContext *device_context;
/**
* If unset, this will be set from the device field on init.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11VideoDevice *video_device;
/**
* If unset, this will be set from the device_context field on init.
*
* Deallocating the AVHWDeviceContext will always release this interface,
* and it does not matter whether it was user-allocated.
*/
ID3D11VideoContext *video_context;
/**
* Callbacks for locking. They protect accesses to device_context and
* video_context calls. They also protect access to the internal staging
* texture (for av_hwframe_transfer_data() calls). They do NOT protect
* access to hwcontext or decoder state in general.
*
* If unset on init, the hwcontext implementation will set them to use an
* internal mutex.
*
* The underlying lock must be recursive. lock_ctx is for free use by the
* locking implementation.
*/
void (*lock)(void *lock_ctx);
void (*unlock)(void *lock_ctx);
void *lock_ctx;
} AVD3D11VADeviceContext;
/**
* D3D11 frame descriptor for pool allocation.
*
* In user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
* with the data pointer pointing at an object of this type describing the
* planes of the frame.
*
* This has no use outside of custom allocation, and AVFrame AVBufferRef do not
* necessarily point to an instance of this struct.
*/
typedef struct AVD3D11FrameDescriptor {
/**
* The texture in which the frame is located. The reference count is
* managed by the AVBufferRef, and destroying the reference will release
* the interface.
*
* Normally stored in AVFrame.data[0].
*/
ID3D11Texture2D *texture;
/**
* The index into the array texture element representing the frame, or 0
* if the texture is not an array texture.
*
* Normally stored in AVFrame.data[1] (cast from intptr_t).
*/
intptr_t index;
} AVD3D11FrameDescriptor;
/**
* This struct is allocated as AVHWFramesContext.hwctx
*/
typedef struct AVD3D11VAFramesContext {
/**
* The canonical texture used for pool allocation. If this is set to NULL
* on init, the hwframes implementation will allocate and set an array
* texture if initial_pool_size > 0.
*
* The only situation when the API user should set this is:
* - the user wants to do manual pool allocation (setting
* AVHWFramesContext.pool), instead of letting AVHWFramesContext
* allocate the pool
* - of an array texture
* - and wants it to use it for decoding
* - this has to be done before calling av_hwframe_ctx_init()
*
* Deallocating the AVHWFramesContext will always release this interface,
* and it does not matter whether it was user-allocated.
*
* This is in particular used by the libavcodec D3D11VA hwaccel, which
* requires a single array texture. It will create ID3D11VideoDecoderOutputView
* objects for each array texture element on decoder initialization.
*/
ID3D11Texture2D *texture;
/**
* D3D11_TEXTURE2D_DESC.BindFlags used for texture creation. The user must
* at least set D3D11_BIND_DECODER if the frames context is to be used for
* video decoding.
* This field is ignored/invalid if a user-allocated texture is provided.
*/
UINT BindFlags;
/**
* D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
* This field is ignored/invalid if a user-allocated texture is provided.
*/
UINT MiscFlags;
/**
* In case if texture structure member above is not NULL contains the same texture
* pointer for all elements and different indexes into the array texture.
* In case if texture structure member above is NULL, all elements contains
* pointers to separate non-array textures and 0 indexes.
* This field is ignored/invalid if a user-allocated texture is provided.
*/
AVD3D11FrameDescriptor *texture_infos;
} AVD3D11VAFramesContext;
#endif /* AVUTIL_HWCONTEXT_D3D11VA_H */

View File

@@ -73,6 +73,10 @@ if not CONFIG['MOZ_FFVPX_AUDIOONLY']:
'hwcontext_vaapi.c',
]
USE_LIBS += ['mozva']
elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows":
SOURCES += [
'hwcontext_d3d11va.c',
]
EXPORTS.ffvpx = [
"tx.h"

View File

@@ -0,0 +1,281 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_WCHAR_FILENAME_H
#define AVUTIL_WCHAR_FILENAME_H
#ifdef _WIN32
#include <errno.h>
#include <stddef.h>
#include <windows.h>
#include "mem.h"
av_warn_unused_result
static inline int utf8towchar(const char *filename_utf8, wchar_t **filename_w)
{
int num_chars;
num_chars = MultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, filename_utf8, -1, NULL, 0);
if (num_chars <= 0) {
*filename_w = NULL;
errno = EINVAL;
return -1;
}
*filename_w = (wchar_t *)av_calloc(num_chars, sizeof(wchar_t));
if (!*filename_w) {
errno = ENOMEM;
return -1;
}
MultiByteToWideChar(CP_UTF8, 0, filename_utf8, -1, *filename_w, num_chars);
return 0;
}
av_warn_unused_result
static inline int wchartocp(unsigned int code_page, const wchar_t *filename_w,
char **filename)
{
DWORD flags = code_page == CP_UTF8 ? WC_ERR_INVALID_CHARS : 0;
int num_chars = WideCharToMultiByte(code_page, flags, filename_w, -1,
NULL, 0, NULL, NULL);
if (num_chars <= 0) {
*filename = NULL;
errno = EINVAL;
return -1;
}
*filename = (char*)av_malloc_array(num_chars, sizeof *filename);
if (!*filename) {
errno = ENOMEM;
return -1;
}
WideCharToMultiByte(code_page, flags, filename_w, -1,
*filename, num_chars, NULL, NULL);
return 0;
}
av_warn_unused_result
static inline int wchartoutf8(const wchar_t *filename_w, char **filename)
{
return wchartocp(CP_UTF8, filename_w, filename);
}
av_warn_unused_result
static inline int wchartoansi(const wchar_t *filename_w, char **filename)
{
return wchartocp(CP_ACP, filename_w, filename);
}
av_warn_unused_result
static inline int utf8toansi(const char *filename_utf8, char **filename)
{
wchar_t *filename_w = NULL;
int ret = -1;
if (utf8towchar(filename_utf8, &filename_w))
return -1;
if (!filename_w) {
*filename = NULL;
return 0;
}
ret = wchartoansi(filename_w, filename);
av_free(filename_w);
return ret;
}
/**
* Checks for extended path prefixes for which normalization needs to be skipped.
* see .NET6: PathInternal.IsExtended()
* https://github.com/dotnet/runtime/blob/9260c249140ef90b4299d0fe1aa3037e25228518/src/libraries/Common/src/System/IO/PathInternal.Windows.cs#L165
*/
static inline int path_is_extended(const wchar_t *path)
{
if (path[0] == L'\\' && (path[1] == L'\\' || path[1] == L'?') && path[2] == L'?' && path[3] == L'\\')
return 1;
return 0;
}
/**
* Checks for a device path prefix.
* see .NET6: PathInternal.IsDevice()
* we don't check forward slashes and extended paths (as already done)
* https://github.com/dotnet/runtime/blob/9260c249140ef90b4299d0fe1aa3037e25228518/src/libraries/Common/src/System/IO/PathInternal.Windows.cs#L132
*/
static inline int path_is_device_path(const wchar_t *path)
{
if (path[0] == L'\\' && path[1] == L'\\' && path[2] == L'.' && path[3] == L'\\')
return 1;
return 0;
}
/**
* Performs path normalization by calling GetFullPathNameW().
* see .NET6: PathHelper.GetFullPathName()
* https://github.com/dotnet/runtime/blob/2a99e18eedabcf1add064c099da59d9301ce45e0/src/libraries/System.Private.CoreLib/src/System/IO/PathHelper.Windows.cs#L70
*/
static inline int get_full_path_name(wchar_t **ppath_w)
{
int num_chars;
wchar_t *temp_w;
num_chars = GetFullPathNameW(*ppath_w, 0, NULL, NULL);
if (num_chars <= 0) {
errno = EINVAL;
return -1;
}
temp_w = (wchar_t *)av_calloc(num_chars, sizeof(wchar_t));
if (!temp_w) {
errno = ENOMEM;
return -1;
}
num_chars = GetFullPathNameW(*ppath_w, num_chars, temp_w, NULL);
if (num_chars <= 0) {
av_free(temp_w);
errno = EINVAL;
return -1;
}
av_freep(ppath_w);
*ppath_w = temp_w;
return 0;
}
/**
* Normalizes a Windows file or folder path.
* Expansion of short paths (with 8.3 path components) is currently omitted
* as it is not required for accessing long paths.
* see .NET6: PathHelper.Normalize()
* https://github.com/dotnet/runtime/blob/2a99e18eedabcf1add064c099da59d9301ce45e0/src/libraries/System.Private.CoreLib/src/System/IO/PathHelper.Windows.cs#L25
*/
static inline int path_normalize(wchar_t **ppath_w)
{
int ret;
if ((ret = get_full_path_name(ppath_w)) < 0)
return ret;
/* What .NET does at this point is to call PathHelper.TryExpandShortFileName()
* in case the path contains a '~' character.
* We don't need to do this as we don't need to normalize the file name
* for presentation, and the extended path prefix works with 8.3 path
* components as well
*/
return 0;
}
/**
* Adds an extended path or UNC prefix to longs paths or paths ending
* with a space or a dot. (' ' or '.').
* This function expects that the path has been normalized before by
* calling path_normalize() and it doesn't check whether the path is
* actually long (> MAX_PATH).
* see .NET6: PathInternal.EnsureExtendedPrefix()
* https://github.com/dotnet/runtime/blob/9260c249140ef90b4299d0fe1aa3037e25228518/src/libraries/Common/src/System/IO/PathInternal.Windows.cs#L107
*/
static inline int add_extended_prefix(wchar_t **ppath_w)
{
const wchar_t *unc_prefix = L"\\\\?\\UNC\\";
const wchar_t *extended_path_prefix = L"\\\\?\\";
const wchar_t *path_w = *ppath_w;
const size_t len = wcslen(path_w);
wchar_t *temp_w;
/* We're skipping the check IsPartiallyQualified() because
* we expect to have called GetFullPathNameW() already. */
if (len < 2 || path_is_extended(*ppath_w) || path_is_device_path(*ppath_w)) {
return 0;
}
if (path_w[0] == L'\\' && path_w[1] == L'\\') {
/* unc_prefix length is 8 plus 1 for terminating zeros,
* we subtract 2 for the leading '\\' of the original path */
temp_w = (wchar_t *)av_calloc(len - 2 + 8 + 1, sizeof(wchar_t));
if (!temp_w) {
errno = ENOMEM;
return -1;
}
wcscpy(temp_w, unc_prefix);
wcscat(temp_w, path_w + 2);
} else {
// The length of extended_path_prefix is 4 plus 1 for terminating zeros
temp_w = (wchar_t *)av_calloc(len + 4 + 1, sizeof(wchar_t));
if (!temp_w) {
errno = ENOMEM;
return -1;
}
wcscpy(temp_w, extended_path_prefix);
wcscat(temp_w, path_w);
}
av_freep(ppath_w);
*ppath_w = temp_w;
return 0;
}
/**
* Converts a file or folder path to wchar_t for use with Windows file
* APIs. Paths with extended path prefix (either '\\?\' or \??\') are
* left unchanged.
* All other paths are normalized and converted to absolute paths.
* Longs paths (>= MAX_PATH) are prefixed with the extended path or extended
* UNC path prefix.
* see .NET6: Path.GetFullPath() and Path.GetFullPathInternal()
* https://github.com/dotnet/runtime/blob/2a99e18eedabcf1add064c099da59d9301ce45e0/src/libraries/System.Private.CoreLib/src/System/IO/Path.Windows.cs#L126
*/
static inline int get_extended_win32_path(const char *path, wchar_t **ppath_w)
{
int ret;
size_t len;
if ((ret = utf8towchar(path, ppath_w)) < 0)
return ret;
if (path_is_extended(*ppath_w)) {
/* Paths prefixed with '\\?\' or \??\' are considered normalized by definition.
* Windows doesn't normalize those paths and neither should we.
*/
return 0;
}
if ((ret = path_normalize(ppath_w)) < 0) {
av_freep(ppath_w);
return ret;
}
/* see .NET6: PathInternal.EnsureExtendedPrefixIfNeeded()
* https://github.com/dotnet/runtime/blob/9260c249140ef90b4299d0fe1aa3037e25228518/src/libraries/Common/src/System/IO/PathInternal.Windows.cs#L92
*/
len = wcslen(*ppath_w);
if (len >= MAX_PATH) {
if ((ret = add_extended_prefix(ppath_w)) < 0) {
av_freep(ppath_w);
return ret;
}
}
return 0;
}
#endif
#endif /* AVUTIL_WCHAR_FILENAME_H */