第一步:創建ID3D11Texture2D
ID3D11Texture2D是DirectX11中的一個紋理對象用于存儲和處理圖像數據,它允許開發者在GPU上直接操作和處理二維圖像數據,從而實現高效的圖形渲染和紋理映射。ID3D11Texture2D不僅可以用于存儲靜態的圖像數據,還支持動態地更新紋理數據,這在實時圖形處理中尤為重要。通過調整ID3D11Texture2D的大小可以實現圖像的縮放和紋理重建以及動態紋理更新等操作,從而滿足不同的圖形處理需求。
public Renderer(VideoDecoder videoDecoder, IntPtr handle = new IntPtr(), int uniqueId = -1)
{
UniqueId = uniqueId == -1 ? Utils.GetUniqueId() : uniqueId;
VideoDecoder= videoDecoder;
Config = videoDecoder.Config;
singleStageDesc = new Texture2DDescription()
{
Usage = ResourceUsage.Staging,
Format = Format.B8G8R8A8_UNorm,
ArraySize = 1,
MipLevels = 1,
BindFlags = BindFlags.None,
CPUAccessFlags = CpuAccessFlags.Read,
SampleDescription = new SampleDescription(1, 0),
Width = -1,
Height = -1
};
singleGpuDesc = new Texture2DDescription()
{
Usage = ResourceUsage.Default,
Format = Format.B8G8R8A8_UNorm,
ArraySize = 1,
MipLevels = 1,
BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource,
SampleDescription = new SampleDescription(1, 0)
};
wndProcDelegate = new(WndProc);
wndProcDelegatePtr = Marshal.GetFunctionPointerForDelegate(wndProcDelegate);
ControlHandle = handle;
Initialize();
}
第二步:創建D3D11CreateDevice
D3D11CreateDevice函數在Direct3D11編程中扮演著至關重要的角色,它用于創建設備Device和設備上下文DeviceContext。這個函數是Direct3D11初始化階段的關鍵部分,通過它開發者可以指定硬件加速HAL或軟件模擬REF作為驅動器類型,從而選擇使用硬件加速功能還是參考光柵化器進行渲染。此外D3D11CreateDevice還允許開發者指定特定的顯示器適配器,通常是主要的顯示器適配器,以及選擇設備的功能級別,這包括了對所有功能級別的支持或者特定功能級別的選擇。設備Device在Direct3D11中是一個核心概念,它提供了創建資源、著色器對象、狀態對象、查詢對象等功能,并且能夠檢查硬件功能、進行調試等,可以將其視為資源的提供者,通過ID3D11Device接口實現。設備上下文DeviceContext則用于實際使用這些資源并操縱渲染管道本身,包括綁定資源、著色器對象、狀態對象到渲染管道,以及控制渲染和計算管道的執行。ID3D11DeviceContext接口支持兩種類型的上下文:即時上下文ImmediateContext和延遲上下文DeferredContext,前者直接鏈接到渲染管線,后者提供線程安全機制,用于異步線程模型。總的來說D3D11CreateDevice不僅是創建Direct3D11設備的入口點,而且也是配置渲染環境和指定硬件使用方式的關鍵步驟,對于實現高效的圖形渲染和游戲開發至關重要。
public void Initialize(bool swapChain = true)
{
ID3D11Device tempDevice;
IDXGIAdapter1 adapter = null;
var creationFlags = DeviceCreationFlags.BgraSupport /*| DeviceCreationFlags.VideoSupport*/; // Let FFmpeg failed for VA if does not support it
var creationFlagsWarp = DeviceCreationFlags.None;
if (Environment.OSVersion.Version.Major <= 7)
{
for (int i = 0; Engine.Video.Factory.EnumAdapters1(i, out adapter).Success; i++)
{
break;
}
if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevelsAll, out tempDevice).Failure)
{
if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevels, out tempDevice).Failure)
{
Config.Video.GPUAdapter = "WARP";
D3D11.D3D11CreateDevice(null, DriverType.Warp, creationFlagsWarp, featureLevels, out tempDevice).CheckError();
}
}
Device = tempDevice.QueryInterface<ID3D11Device1>();
}
else
{
if (!string.IsNullOrWhiteSpace(Config.Video.GPUAdapter) && Config.Video.GPUAdapter.ToUpper() != "WARP")
{
for (int i = 0; Engine.Video.Factory.EnumAdapters1(i, out adapter).Success; i++)
{
if (adapter.Description1.Description == Config.Video.GPUAdapter)
break;
if (Regex.IsMatch(adapter.Description1.Description + " luid=" + adapter.Description1.Luid, Config.Video.GPUAdapter, RegexOptions.IgnoreCase))
break;
adapter.Dispose();
}
if (adapter == null)
{
Config.Video.GPUAdapter = null;
}
}
if (!string.IsNullOrWhiteSpace(Config.Video.GPUAdapter) && Config.Video.GPUAdapter.ToUpper() == "WARP")
{
D3D11.D3D11CreateDevice(null, DriverType.Warp, creationFlagsWarp, featureLevels, out tempDevice).CheckError();
}
else
{
if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevelsAll, out tempDevice).Failure)
{
if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevels, out tempDevice).Failure)
{
Config.Video.GPUAdapter = "WARP";
D3D11.D3D11CreateDevice(null, DriverType.Warp, creationFlagsWarp, featureLevels, out tempDevice).CheckError();
}
}
}
Device = tempDevice.QueryInterface<ID3D11Device1>();
}
context = Device.ImmediateContext;
if (adapter == null)
{
Device.Tag = new Luid().ToString();
using var deviceTmp = Device.QueryInterface<IDXGIDevice1>();
using var adapterTmp = deviceTmp.GetAdapter();
adapter = adapterTmp.QueryInterface<IDXGIAdapter1>();
}
else
{
Device.Tag = adapter.Description.Luid.ToString();
}
GPUAdapter = Engine.Video.GPUAdapters[adapter.Description1.Luid];
Config.Video.MaxVerticalResolutionAuto = GPUAdapter.MaxHeight;
tempDevice.Dispose();
adapter.Dispose();
using (var mthread = Device.QueryInterface<ID3D11Multithread>()) mthread.SetMultithreadProtected(true);
using (var dxgidevice = Device.QueryInterface<IDXGIDevice1>()) dxgidevice.MaximumFrameLatency = 1;
ReadOnlySpan<float> vertexBufferData = new float[]
{
-1.0f, -1.0f, 0, 0.0f, 1.0f,
-1.0f, 1.0f, 0, 0.0f, 0.0f,
1.0f, -1.0f, 0, 1.0f, 1.0f,
1.0f, -1.0f, 0, 1.0f, 1.0f,
-1.0f, 1.0f, 0, 0.0f, 0.0f,
1.0f, 1.0f, 0, 1.0f, 0.0f
};
vertexBuffer = Device.CreateBuffer(vertexBufferData, new BufferDescription() { BindFlags = BindFlags.VertexBuffer });
context.IASetVertexBuffer(0, vertexBuffer, sizeof(float) * 5);
InitPS();
ShaderVS = Device.CreateVertexShader(ShaderCompiler.VSBlob);
vertexLayout = Device.CreateInputLayout(inputElements, ShaderCompiler.VSBlob);
context.IASetInputLayout(vertexLayout);
context.IASetPrimitiveTopology(PrimitiveTopology.TriangleList);
context.VSSetShader(ShaderVS);
psBuffer = Device.CreateBuffer(new BufferDescription()
{
Usage = ResourceUsage.Default,
BindFlags = BindFlags.ConstantBuffer,
CPUAccessFlags = CpuAccessFlags.None,
ByteWidth = sizeof(PSBufferType) + (16 - (sizeof(PSBufferType) % 16))
});
context.PSSetConstantBuffer(0, psBuffer);
psBufferData.hdrmethod = HDRtoSDRMethod.None;
context.UpdateSubresource(psBufferData, psBuffer);
vsBuffer = Device.CreateBuffer(new BufferDescription()
{
Usage = ResourceUsage.Default,
BindFlags = BindFlags.ConstantBuffer,
CPUAccessFlags = CpuAccessFlags.None,
ByteWidth = sizeof(VSBufferType) + (16 - (sizeof(VSBufferType) % 16))
});
context.VSSetConstantBuffer(0, vsBuffer);
vsBufferData.mat = Matrix4x4.Identity;
context.UpdateSubresource(vsBufferData, vsBuffer);
InitializeVideoProcessor();
if (swapChain)
{
if (ControlHandle != IntPtr.Zero)
InitializeSwapChain(ControlHandle);
else if (SwapChainWinUIClbk != null)
InitializeWinUISwapChain();
}
InitializeChildSwapChain();
}
第三步:創建CreateVideoProcessor
CreateVideoProcessor是Microsoft Media Foundation中的一個功能,用于創建視頻處理設備,封裝了用于處理未壓縮視頻圖像的圖形硬件的功能。此外它還涉及到視頻進程位塊傳送圖像構成等操作,以及封裝了用于處理未壓縮視頻圖像的圖形硬件的功能,進一步擴展了其在視頻處理領域的應用范圍。
void InitializeVideoProcessor()
{
vpcd.InputWidth = 1;
vpcd.InputHeight= 1;
vpcd.OutputWidth = vpcd.InputWidth;
vpcd.OutputHeight= vpcd.InputHeight;
outputColorSpace = new VideoProcessorColorSpace()
{
Usage = 0,
RGB_Range = 0,
YCbCr_Matrix = 1,
YCbCr_xvYCC = 0,
Nominal_Range = 2
};
if (VideoProcessorsCapsCache.ContainsKey(Device.Tag.ToString()))
{
if (VideoProcessorsCapsCache[Device.Tag.ToString()].Failed)
{
InitializeFilters();
return;
}
vd1 = Device.QueryInterface<ID3D11VideoDevice1>();
vc = context.QueryInterface<ID3D11VideoContext1>();
vd1.CreateVideoProcessorEnumerator(ref vpcd, out vpe);
if (vpe == null)
{
VPFailed();
return;
}
vd1.CreateVideoProcessor(vpe, VideoProcessorsCapsCache[Device.Tag.ToString()].TypeIndex, out vp);
InitializeFilters();
return;
}
VideoProcessorCapsCache cache = new();
VideoProcessorsCapsCache.Add(Device.Tag.ToString(), cache);
vd1 = Device.QueryInterface<ID3D11VideoDevice1>();
vc = context.QueryInterface<ID3D11VideoContext>();
vd1.CreateVideoProcessorEnumerator(ref vpcd, out vpe);
if (vpe == null || Device.FeatureLevel < Vortice.Direct3D.FeatureLevel.Level_10_0)
{
VPFailed();
return;
}
var vpe1 = vpe.QueryInterface<ID3D11VideoProcessorEnumerator1>();
bool supportHLG = vpe1.CheckVideoProcessorFormatConversion(Format.P010, ColorSpaceType.YcbcrStudioGhlgTopLeftP2020, Format.B8G8R8A8_UNorm, ColorSpaceType.RgbFullG22NoneP709);
bool supportHDR10Limited = vpe1.CheckVideoProcessorFormatConversion(Format.P010, ColorSpaceType.YcbcrStudioG2084TopLeftP2020, Format.B8G8R8A8_UNorm, ColorSpaceType.RgbStudioG2084NoneP2020);
var vpCaps = vpe.VideoProcessorCaps;
foreach (VideoProcessorFilterCaps filter in Enum.GetValues(typeof(VideoProcessorFilterCaps)))
{
if ((vpCaps.FilterCaps & filter) != 0)
{
vpe1.GetVideoProcessorFilterRange(ConvertFromVideoProcessorFilterCaps(filter), out var range);
var vf = ConvertFromVideoProcessorFilterRange(range);
vf.Filter = (VideoFilters)filter;
cache.Filters.Add((VideoFilters)filter, vf);
}
}
int typeIndex = -1;
VideoProcessorRateConversionCaps rcCap = new();
for (int i = 0; i < vpCaps.RateConversionCapsCount; i++)
{
vpe.GetVideoProcessorRateConversionCaps(i, out rcCap);
VideoProcessorProcessorCaps pCaps = (VideoProcessorProcessorCaps) rcCap.ProcessorCaps;
typeIndex = i;
if (((VideoProcessorProcessorCaps)rcCap.ProcessorCaps & VideoProcessorProcessorCaps.DeinterlaceBob) != 0)
break;
}
vpe1.Dispose();
cache.TypeIndex = typeIndex;
cache.HLG = supportHLG;
cache.HDR10Limited = supportHDR10Limited;
cache.VideoProcessorCaps = vpCaps;
cache.VideoProcessorRateConversionCaps = rcCap;
vd1.CreateVideoProcessor(vpe, typeIndex, out vp);
if (vp == null)
{
VPFailed();
return;
}
cache.Failed = false;
InitializeFilters();
}
第四步:創建CreateRenderTargetView
CreateRenderTargetView的主要作用是將渲染目標與GPU進行綁定使其能夠作為渲染的輸出目標。在圖形編程中渲染目標通常指的是一個可以接收渲染輸出的表面,它可以是屏幕或者一個紋理或者是一個離屏的渲染緩沖區,當我們在DirectX或類似的圖形API中創建一個渲染目標時我們需要將其與GPU綁定以便GPU知道在哪里進行渲染操作。
internal void InitializeSwapChain(IntPtr handle)
{
lock (lockDevice)
{
ControlHandle = handle;
RECT rect = new();
GetWindowRect(ControlHandle,ref rect);
ControlWidth = rect.Right - rect.Left;
ControlHeight = rect.Bottom - rect.Top;
if (cornerRadius == zeroCornerRadius)
{
swapChain = Engine.Video.Factory.CreateSwapChainForHwnd(Device, handle, GetSwapChainDesc(ControlWidth, ControlHeight));
}
else
{
swapChain = Engine.Video.Factory.CreateSwapChainForComposition(Device, GetSwapChainDesc(ControlWidth, ControlHeight, true, true));
using (var dxgiDevice = Device.QueryInterface<IDXGIDevice>())
dCompDevice = DComp.DCompositionCreateDevice<IDCompositionDevice>(dxgiDevice);
dCompDevice.CreateTargetForHwnd(handle, false, out dCompTarget).CheckError();
dCompDevice.CreateVisual(out dCompVisual).CheckError();
dCompVisual.SetContent(swapChain).CheckError();
dCompTarget.SetRoot(dCompVisual).CheckError();
dCompDevice.Commit().CheckError();
int styleEx = GetWindowLong(handle, (int)WindowLongFlags.GWL_EXSTYLE).ToInt32() | WS_EX_NOREDIRECTIONBITMAP;
SetWindowLong(handle, (int)WindowLongFlags.GWL_EXSTYLE, new IntPtr(styleEx));
}
backBuffer = swapChain.GetBuffer<ID3D11Texture2D>(0);
backBufferRtv = Device.CreateRenderTargetView(backBuffer);
SCDisposed = false;
ResizeBuffers(ControlWidth, ControlHeight); // maybe not required (only for vp)?
}
}
public void ResizeBuffers(int width, int height)
{
lock (lockDevice)
{
ControlWidth = width;
ControlHeight = height;
backBufferRtv.Dispose();
vpov?.Dispose();
backBuffer.Dispose();
swapChain.ResizeBuffers(0, ControlWidth, ControlHeight, Format.Unknown, SwapChainFlags.None);
UpdateCornerRadius();
backBuffer = swapChain.GetBuffer<ID3D11Texture2D>(0);
backBufferRtv = Device.CreateRenderTargetView(backBuffer);
if (videoProcessor == VideoProcessors.D3D11)
{
vd1.CreateVideoProcessorOutputView(backBuffer, vpe, vpovd, out vpov);
}
SetViewport();
}
}