VirtualBox

source: vbox/trunk/src/libs/dxvk-native-1.9.2a/tests/d3d11/test_d3d11_video.cpp@ 99553

最後變更 在這個檔案從99553是 96497,由 vboxsync 提交於 3 年 前

libs/dxvk-native-1.9.2a: export to OSE

  • 屬性 svn:eol-style 設為 native
檔案大小: 15.9 KB
 
1#include <d3d11_1.h>
2
3#include <windows.h>
4#include <windowsx.h>
5
6#include <cmath>
7#include <fstream>
8#include <vector>
9
10#include "../test_utils.h"
11
12using namespace dxvk;
13
14class VideoApp {
15
16public:
17
18 VideoApp(HINSTANCE instance, HWND window)
19 : m_window(window) {
20 // Create base D3D11 device and swap chain
21 DXGI_SWAP_CHAIN_DESC swapchainDesc = { };
22 swapchainDesc.BufferDesc.Width = m_windowSizeX;
23 swapchainDesc.BufferDesc.Height = m_windowSizeY;
24 swapchainDesc.BufferDesc.RefreshRate = { 0, 0 };
25 swapchainDesc.BufferDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
26 swapchainDesc.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
27 swapchainDesc.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
28 swapchainDesc.BufferCount = 2;
29 swapchainDesc.SampleDesc = { 1, 0 };
30 swapchainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
31 swapchainDesc.OutputWindow = m_window;
32 swapchainDesc.Windowed = true;
33 swapchainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
34 swapchainDesc.Flags = 0;
35
36 HRESULT hr = D3D11CreateDeviceAndSwapChain(nullptr,
37 D3D_DRIVER_TYPE_HARDWARE, nullptr, 0, nullptr, 0,
38 D3D11_SDK_VERSION, &swapchainDesc, &m_swapchain,
39 &m_device, nullptr, &m_context);
40
41 if (FAILED(hr)) {
42 std::cerr << "Failed to initialize D3D11 device and swap chain" << std::endl;
43 return;
44 }
45
46 if (FAILED(hr = m_device->QueryInterface(IID_PPV_ARGS(&m_vdevice)))) {
47 std::cerr << "Failed to query D3D11 video device" << std::endl;
48 return;
49 }
50
51 if (FAILED(hr = m_context->QueryInterface(IID_PPV_ARGS(&m_vcontext)))) {
52 std::cerr << "Failed to query D3D11 video context" << std::endl;
53 return;
54 }
55
56 if (FAILED(hr = m_swapchain->ResizeTarget(&swapchainDesc.BufferDesc))) {
57 std::cerr << "Failed to resize target" << std::endl;
58 return;
59 }
60
61 if (FAILED(hr = m_swapchain->GetBuffer(0, IID_PPV_ARGS(&m_swapImage)))) {
62 std::cerr << "Failed to query swap chain image" << std::endl;
63 return;
64 }
65
66 if (FAILED(hr = m_device->CreateRenderTargetView(m_swapImage.ptr(), nullptr, &m_swapImageView))) {
67 std::cerr << "Failed to create render target view" << std::endl;
68 return;
69 }
70
71 // Create video processor instance
72 D3D11_VIDEO_PROCESSOR_CONTENT_DESC videoEnumDesc = { };
73 videoEnumDesc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
74 videoEnumDesc.InputFrameRate = { 60, 1 };
75 videoEnumDesc.InputWidth = 128;
76 videoEnumDesc.InputHeight = 128;
77 videoEnumDesc.OutputFrameRate = { 60, 1 };
78 videoEnumDesc.OutputWidth = 256;
79 videoEnumDesc.OutputHeight = 256;
80 videoEnumDesc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
81
82 if (FAILED(hr = m_vdevice->CreateVideoProcessorEnumerator(&videoEnumDesc, &m_venum))) {
83 std::cerr << "Failed to create D3D11 video processor enumerator" << std::endl;
84 return;
85 }
86
87 if (FAILED(hr = m_vdevice->CreateVideoProcessor(m_venum.ptr(), 0, &m_vprocessor))) {
88 std::cerr << "Failed to create D3D11 video processor" << std::endl;
89 return;
90 }
91
92 // Video output image and view
93 D3D11_TEXTURE2D_DESC textureDesc = { };
94 textureDesc.Width = 256;
95 textureDesc.Height = 256;
96 textureDesc.MipLevels = 1;
97 textureDesc.ArraySize = 1;
98 textureDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
99 textureDesc.SampleDesc = { 1, 0 };
100 textureDesc.Usage = D3D11_USAGE_DEFAULT;
101 textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET;
102
103 if (FAILED(hr = m_device->CreateTexture2D(&textureDesc, nullptr, &m_videoOutput))) {
104 std::cerr << "Failed to create D3D11 video output image" << std::endl;
105 return;
106 }
107
108 D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputDesc = { };
109 outputDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
110 outputDesc.Texture2D.MipSlice = 0;
111
112 if (FAILED(hr = m_vdevice->CreateVideoProcessorOutputView(m_videoOutput.ptr(), m_venum.ptr(), &outputDesc, &m_videoOutputView))) {
113 std::cerr << "Failed to create D3D11 video output view" << std::endl;
114 return;
115 }
116
117 if (FAILED(hr = m_device->CreateRenderTargetView(m_videoOutput.ptr(), nullptr, &m_videoOutputRtv))) {
118 std::cerr << "Failed to create video render target view" << std::endl;
119 return;
120 }
121
122 // RGBA input image and view
123 textureDesc.Width = 128;
124 textureDesc.Height = 128;
125 textureDesc.BindFlags = 0;
126
127 size_t pixelCount = textureDesc.Width * textureDesc.Height;
128
129 size_t rowSizeRgba = textureDesc.Width * 4;
130 size_t rowSizeNv12 = textureDesc.Width;
131 size_t rowSizeYuy2 = textureDesc.Width * 2;
132 size_t imageSizeRgba = textureDesc.Height * rowSizeRgba;
133 size_t imageSizeNv12 = pixelCount + pixelCount / 2;
134 size_t imageSizeYuy2 = textureDesc.Height * rowSizeYuy2;
135
136 std::vector<uint8_t> srcData(pixelCount * 3);
137 std::vector<uint8_t> imgDataRgba(imageSizeRgba);
138 std::vector<uint8_t> imgDataNv12(imageSizeNv12);
139 std::vector<uint8_t> imgDataYuy2(imageSizeYuy2);
140 std::ifstream ifile("video_image.raw", std::ios::binary);
141
142 if (!ifile || !ifile.read(reinterpret_cast<char*>(srcData.data()), srcData.size())) {
143 std::cerr << "Failed to read image file" << std::endl;
144 return;
145 }
146
147 for (size_t i = 0; i < pixelCount; i++) {
148 imgDataRgba[4 * i + 0] = srcData[3 * i + 0];
149 imgDataRgba[4 * i + 1] = srcData[3 * i + 1];
150 imgDataRgba[4 * i + 2] = srcData[3 * i + 2];
151 imgDataRgba[4 * i + 3] = 0xFF;
152
153 imgDataNv12[i] = y_coeff(&srcData[3 * i], 0.299000f, 0.587000f, 0.114000f);
154
155 imgDataYuy2[2 * i + 0] = y_coeff(&srcData[3 * i], 0.299000f, 0.587000f, 0.114000f);
156 imgDataYuy2[2 * i + 1] = i % 2
157 ? c_coeff(&srcData[3 * i], -0.168736f, -0.331264f, 0.500000f)
158 : c_coeff(&srcData[3 * i], 0.500000f, -0.418688f, -0.081312f);
159 }
160
161 for (size_t y = 0; y < textureDesc.Height / 2; y++) {
162 for (size_t x = 0; x < textureDesc.Width / 2; x++) {
163 size_t p = textureDesc.Width * (2 * y) + 2 * x;
164 size_t i = pixelCount + textureDesc.Width * y + 2 * x;
165 imgDataNv12[i + 0] = c_coeff(&srcData[3 * p], 0.500000f, -0.418688f, -0.081312f);
166 imgDataNv12[i + 1] = c_coeff(&srcData[3 * p], -0.168736f, -0.331264f, 0.500000f);
167 }
168 }
169
170 D3D11_SUBRESOURCE_DATA subresourceData = { };
171 subresourceData.pSysMem = imgDataRgba.data();
172 subresourceData.SysMemPitch = rowSizeRgba;
173 subresourceData.SysMemSlicePitch = rowSizeRgba * textureDesc.Height;
174
175 if (FAILED(hr = m_device->CreateTexture2D(&textureDesc, &subresourceData, &m_videoInput))) {
176 std::cerr << "Failed to create D3D11 video input image" << std::endl;
177 return;
178 }
179
180 D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputDesc = { };
181 inputDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
182 inputDesc.Texture2D.MipSlice = 0;
183
184 if (FAILED(hr = m_vdevice->CreateVideoProcessorInputView(m_videoInput.ptr(), m_venum.ptr(), &inputDesc, &m_videoInputView))) {
185 std::cerr << "Failed to create D3D11 video input view" << std::endl;
186 return;
187 }
188
189 // NV12 input image and view
190 textureDesc.Format = DXGI_FORMAT_NV12;
191 textureDesc.BindFlags = 0;
192
193 subresourceData.pSysMem = imgDataNv12.data();
194 subresourceData.SysMemPitch = rowSizeNv12;
195 subresourceData.SysMemSlicePitch = rowSizeNv12 * textureDesc.Height;
196
197 if (SUCCEEDED(hr = m_device->CreateTexture2D(&textureDesc, nullptr, &m_videoInputNv12))) {
198 if (FAILED(hr = m_vdevice->CreateVideoProcessorInputView(m_videoInputNv12.ptr(), m_venum.ptr(), &inputDesc, &m_videoInputViewNv12))) {
199 std::cerr << "Failed to create D3D11 video input view for NV12" << std::endl;
200 return;
201 }
202 } else {
203 std::cerr << "NV12 not supported" << std::endl;
204 }
205
206 textureDesc.Usage = D3D11_USAGE_STAGING;
207 textureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE | D3D11_CPU_ACCESS_READ;
208
209 if (SUCCEEDED(hr = m_device->CreateTexture2D(&textureDesc, nullptr, &m_videoInputNv12Host))) {
210 D3D11_MAPPED_SUBRESOURCE mr = { };
211 m_context->Map(m_videoInputNv12Host.ptr(), 0, D3D11_MAP_WRITE, D3D11_MAP_FLAG_DO_NOT_WAIT, &mr);
212 memcpy(mr.pData, imgDataNv12.data(), imgDataNv12.size());
213 m_context->Unmap(m_videoInputNv12Host.ptr(), 0);
214 D3D11_BOX box = { 0, 0, 0, 128, 128, 1 };
215 m_context->CopySubresourceRegion(m_videoInputNv12.ptr(), 0, 0, 0, 0, m_videoInputNv12Host.ptr(), 0, &box);
216 }
217
218 // YUY2 input image and view
219 textureDesc.Format = DXGI_FORMAT_YUY2;
220 textureDesc.BindFlags = 0;
221 textureDesc.Usage = D3D11_USAGE_DEFAULT;
222 textureDesc.CPUAccessFlags = 0;
223
224 subresourceData.pSysMem = imgDataYuy2.data();
225 subresourceData.SysMemPitch = rowSizeYuy2;
226 subresourceData.SysMemSlicePitch = imageSizeYuy2;
227
228 if (SUCCEEDED(hr = m_device->CreateTexture2D(&textureDesc, &subresourceData, &m_videoInputYuy2))) {
229 if (FAILED(hr = m_vdevice->CreateVideoProcessorInputView(m_videoInputYuy2.ptr(), m_venum.ptr(), &inputDesc, &m_videoInputViewYuy2))) {
230 std::cerr << "Failed to create D3D11 video input view for YUY2" << std::endl;
231 return;
232 }
233 } else {
234 std::cerr << "YUY2 not supported" << std::endl;
235 }
236
237 m_initialized = true;
238 }
239
240
241 ~VideoApp() {
242
243 }
244
245
246 void run() {
247 this->adjustBackBuffer();
248
249 float color[4] = { 0.5f, 0.5f, 0.5f, 1.0f };
250 m_context->ClearRenderTargetView(m_swapImageView.ptr(), color);
251
252 // Full range RGB output color space
253 D3D11_VIDEO_PROCESSOR_COLOR_SPACE csOut = { };
254 csOut.Usage = 0; // Present
255 csOut.RGB_Range = 0; // Full range
256 csOut.Nominal_Range = 1; // Full range
257
258 D3D11_VIDEO_PROCESSOR_COLOR_SPACE csIn = { };
259 csIn.Usage = 0; // Present
260 csIn.RGB_Range = 0; // Full range
261 csIn.Nominal_Range = 1; // Full range
262 csIn.YCbCr_Matrix = 0; // BT.601
263
264 m_vcontext->VideoProcessorSetStreamAutoProcessingMode(m_vprocessor.ptr(), 0, false);
265 m_vcontext->VideoProcessorSetOutputColorSpace(m_vprocessor.ptr(), &csOut);
266 m_vcontext->VideoProcessorSetStreamColorSpace(m_vprocessor.ptr(), 0, &csIn);
267 blit(m_videoInputView.ptr(), 32, 32);
268 blit(m_videoInputViewNv12.ptr(), 32, 320);
269 blit(m_videoInputViewYuy2.ptr(), 32, 608);
270
271 csIn.RGB_Range = 1; // Limited range
272 csIn.Nominal_Range = 0; // Limited range
273 m_vcontext->VideoProcessorSetStreamColorSpace(m_vprocessor.ptr(), 0, &csIn);
274 blit(m_videoInputView.ptr(), 320, 32);
275 blit(m_videoInputViewNv12.ptr(), 320, 320);
276 blit(m_videoInputViewYuy2.ptr(), 320, 608);
277
278 // Limited range RGB output color space
279 csOut.RGB_Range = 1;
280 csOut.Nominal_Range = 0;
281 m_vcontext->VideoProcessorSetOutputColorSpace(m_vprocessor.ptr(), &csOut);
282
283 csIn.RGB_Range = 0; // Full range
284 csIn.Nominal_Range = 1; // Full range
285 m_vcontext->VideoProcessorSetStreamColorSpace(m_vprocessor.ptr(), 0, &csIn);
286 blit(m_videoInputView.ptr(), 608, 32);
287 blit(m_videoInputViewNv12.ptr(), 608, 320);
288 blit(m_videoInputViewYuy2.ptr(), 608, 608);
289
290 csIn.RGB_Range = 1; // Limited range
291 csIn.Nominal_Range = 0; // Limited range
292 m_vcontext->VideoProcessorSetStreamColorSpace(m_vprocessor.ptr(), 0, &csIn);
293 blit(m_videoInputView.ptr(), 896, 32);
294 blit(m_videoInputViewNv12.ptr(), 896, 320);
295 blit(m_videoInputViewYuy2.ptr(), 896, 608);
296
297 m_swapchain->Present(1, 0);
298 }
299
300
301 void blit(ID3D11VideoProcessorInputView* pView, uint32_t x, uint32_t y) {
302 if (!pView)
303 return;
304
305 D3D11_VIDEO_PROCESSOR_STREAM stream = { };
306 stream.Enable = true;
307 stream.pInputSurface = pView;
308
309 D3D11_BOX box;
310 box.left = 0;
311 box.top = 0;
312 box.front = 0;
313 box.right = 256;
314 box.bottom = 256;
315 box.back = 1;
316
317 FLOAT red[4] = { 1.0f, 0.0f, 0.0f, 1.0f };
318 m_context->ClearRenderTargetView(m_videoOutputRtv.ptr(), red);
319 m_vcontext->VideoProcessorBlt(m_vprocessor.ptr(), m_videoOutputView.ptr(), 0, 1, &stream);
320 m_context->CopySubresourceRegion(m_swapImage.ptr(), 0, x, y, 0, m_videoOutput.ptr(), 0, &box);
321 }
322
323
324 void adjustBackBuffer() {
325 RECT windowRect = { };
326 GetClientRect(m_window, &windowRect);
327
328 if (uint32_t(windowRect.right - windowRect.left) != m_windowSizeX
329 || uint32_t(windowRect.bottom - windowRect.top) != m_windowSizeY) {
330 m_windowSizeX = windowRect.right - windowRect.left;
331 m_windowSizeY = windowRect.bottom - windowRect.top;
332
333 m_swapImage = nullptr;
334 m_swapImageView = nullptr;
335
336 HRESULT hr = m_swapchain->ResizeBuffers(0,
337 m_windowSizeX, m_windowSizeY, DXGI_FORMAT_UNKNOWN, 0);
338
339 if (FAILED(hr)) {
340 std::cerr << "Failed to resize swap chain buffer" << std::endl;
341 return;
342 }
343
344 if (FAILED(hr = m_swapchain->GetBuffer(0, IID_PPV_ARGS(&m_swapImage)))) {
345 std::cerr << "Failed to query swap chain image" << std::endl;
346 return;
347 }
348
349 if (FAILED(hr = m_device->CreateRenderTargetView(m_swapImage.ptr(), nullptr, &m_swapImageView))) {
350 std::cerr << "Failed to create render target view" << std::endl;
351 return;
352 }
353 }
354 }
355
356 operator bool () const {
357 return m_initialized;
358 }
359
360private:
361
362 HWND m_window;
363 uint32_t m_windowSizeX = 1280;
364 uint32_t m_windowSizeY = 720;
365
366 Com<IDXGISwapChain> m_swapchain;
367 Com<ID3D11Device> m_device;
368 Com<ID3D11DeviceContext> m_context;
369 Com<ID3D11VideoDevice> m_vdevice;
370 Com<ID3D11VideoContext> m_vcontext;
371 Com<ID3D11VideoProcessorEnumerator> m_venum;
372 Com<ID3D11VideoProcessor> m_vprocessor;
373 Com<ID3D11Texture2D> m_swapImage;
374 Com<ID3D11RenderTargetView> m_swapImageView;
375 Com<ID3D11Texture2D> m_videoOutput;
376 Com<ID3D11VideoProcessorOutputView> m_videoOutputView;
377 Com<ID3D11RenderTargetView> m_videoOutputRtv;
378 Com<ID3D11Texture2D> m_videoInput;
379 Com<ID3D11VideoProcessorInputView> m_videoInputView;
380 Com<ID3D11Texture2D> m_videoInputNv12;
381 Com<ID3D11Texture2D> m_videoInputNv12Host;
382 Com<ID3D11Texture2D> m_videoInputYuy2;
383 Com<ID3D11VideoProcessorInputView> m_videoInputViewNv12;
384 Com<ID3D11VideoProcessorInputView> m_videoInputViewYuy2;
385
386 bool m_initialized = false;
387
388 static inline uint8_t y_coeff(const uint8_t* rgb, float r, float g, float b) {
389 float x = (rgb[0] * r + rgb[1] * g + rgb[2] * b) / 255.0f;
390 return 16 + uint8_t(std::roundf(219.0f * std::clamp(x, 0.0f, 1.0f)));
391 }
392
393 static inline uint8_t c_coeff(const uint8_t* rgb, float r, float g, float b) {
394 float x = ((rgb[0] * r + rgb[1] * g + rgb[2] * b) / 255.0f) + 0.5f;
395 return uint8_t(std::roundf(255.0f * std::clamp(x, 0.0f, 1.0f)));
396 }
397
398};
399
400LRESULT CALLBACK WindowProc(HWND hWnd,
401 UINT message,
402 WPARAM wParam,
403 LPARAM lParam);
404
405int WINAPI WinMain(HINSTANCE hInstance,
406 HINSTANCE hPrevInstance,
407 LPSTR lpCmdLine,
408 int nCmdShow) {
409 HWND hWnd;
410 WNDCLASSEXW wc;
411 ZeroMemory(&wc, sizeof(WNDCLASSEX));
412 wc.cbSize = sizeof(WNDCLASSEX);
413 wc.style = CS_HREDRAW | CS_VREDRAW;
414 wc.lpfnWndProc = WindowProc;
415 wc.hInstance = hInstance;
416 wc.hCursor = LoadCursor(nullptr, IDC_ARROW);
417 wc.hbrBackground = (HBRUSH)COLOR_WINDOW;
418 wc.lpszClassName = L"WindowClass1";
419 RegisterClassExW(&wc);
420
421 hWnd = CreateWindowExW(0,
422 L"WindowClass1",
423 L"Our First Windowed Program",
424 WS_OVERLAPPEDWINDOW,
425 300, 300,
426 1280, 720,
427 nullptr,
428 nullptr,
429 hInstance,
430 nullptr);
431 ShowWindow(hWnd, nCmdShow);
432
433 MSG msg;
434 VideoApp app(hInstance, hWnd);
435
436 while (app) {
437 if (PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE)) {
438 TranslateMessage(&msg);
439 DispatchMessage(&msg);
440
441 if (msg.message == WM_QUIT)
442 return msg.wParam;
443 } else {
444 app.run();
445 }
446 }
447
448 return 0;
449}
450
451LRESULT CALLBACK WindowProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) {
452 switch (message) {
453 case WM_CLOSE:
454 PostQuitMessage(0);
455 return 0;
456 }
457
458 return DefWindowProc(hWnd, message, wParam, lParam);
459}
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette