/xbmc/cores/VideoPlayer/VideoRenderers/HwDecRender/DXVAHD.cpp

https://github.com/arnova/xbmc · C++ · 629 lines · 476 code · 106 blank · 47 comment · 122 complexity · ca7dbaed30e0786c7cfccf483be0f372 MD5 · raw file

  1. /*
  2. * Copyright (C) 2005-2018 Team Kodi
  3. * This file is part of Kodi - https://kodi.tv
  4. *
  5. * SPDX-License-Identifier: GPL-2.0-or-later
  6. * See LICENSES/README.md for more information.
  7. */
  8. // setting that here because otherwise SampleFormat is defined to AVSampleFormat
  9. // which we don't use here
  10. #define FF_API_OLD_SAMPLE_FMT 0
  11. #define DEFAULT_STREAM_INDEX (0)
  12. #include "DXVAHD.h"
  13. #include "VideoRenderers/RenderFlags.h"
  14. #include "VideoRenderers/RenderManager.h"
  15. #include "VideoRenderers/windows/RendererBase.h"
  16. #include "rendering/dx/RenderContext.h"
  17. #include "utils/log.h"
  18. #include <Windows.h>
  19. #include <d3d11_4.h>
  20. #include <dxgi1_5.h>
  21. using namespace DXVA;
  22. using namespace Microsoft::WRL;
  23. #define LOGIFERROR(a) \
  24. do \
  25. { \
  26. HRESULT res = a; \
  27. if (FAILED(res)) \
  28. { \
  29. CLog::LogF(LOGERROR, "failed executing " #a " at line {} with error {:x}", __LINE__, res); \
  30. } \
  31. } while (0);
  32. CProcessorHD::CProcessorHD()
  33. {
  34. DX::Windowing()->Register(this);
  35. }
  36. CProcessorHD::~CProcessorHD()
  37. {
  38. DX::Windowing()->Unregister(this);
  39. UnInit();
  40. }
  41. void CProcessorHD::UnInit()
  42. {
  43. CSingleLock lock(m_section);
  44. Close();
  45. }
  46. void CProcessorHD::Close()
  47. {
  48. CSingleLock lock(m_section);
  49. m_pEnumerator = nullptr;
  50. m_pVideoProcessor = nullptr;
  51. m_pVideoContext = nullptr;
  52. m_pVideoDevice = nullptr;
  53. }
  54. bool CProcessorHD::PreInit() const
  55. {
  56. ComPtr<ID3D11VideoDevice> pVideoDevice;
  57. ComPtr<ID3D11VideoProcessorEnumerator> pEnumerator;
  58. ComPtr<ID3D11Device> pD3DDevice = DX::DeviceResources::Get()->GetD3DDevice();
  59. if (FAILED(pD3DDevice.As(&pVideoDevice)))
  60. {
  61. CLog::LogF(LOGWARNING, "failed to get video device.");
  62. return false;
  63. }
  64. D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc1 = {};
  65. desc1.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
  66. desc1.InputWidth = 640;
  67. desc1.InputHeight = 480;
  68. desc1.OutputWidth = 640;
  69. desc1.OutputHeight = 480;
  70. desc1.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
  71. // try to create video enum
  72. if (FAILED(pVideoDevice->CreateVideoProcessorEnumerator(&desc1, &pEnumerator)))
  73. {
  74. CLog::LogF(LOGWARNING, "failed to create Video Enumerator.");
  75. return false;
  76. }
  77. return true;
  78. }
  79. bool CProcessorHD::InitProcessor()
  80. {
  81. m_pVideoDevice = nullptr;
  82. m_pVideoContext = nullptr;
  83. m_pEnumerator = nullptr;
  84. ComPtr<ID3D11DeviceContext1> pD3DDeviceContext = DX::DeviceResources::Get()->GetImmediateContext();
  85. ComPtr<ID3D11Device> pD3DDevice = DX::DeviceResources::Get()->GetD3DDevice();
  86. if (FAILED(pD3DDeviceContext.As(&m_pVideoContext)))
  87. {
  88. CLog::LogF(LOGWARNING, "video context initialization is failed.");
  89. return false;
  90. }
  91. if (FAILED(pD3DDevice.As(&m_pVideoDevice)))
  92. {
  93. CLog::LogF(LOGWARNING, "video device initialization is failed.");
  94. return false;
  95. }
  96. CLog::LogF(LOGDEBUG, "initing video enumerator with params: {}x{}.", m_width, m_height);
  97. D3D11_VIDEO_PROCESSOR_CONTENT_DESC contentDesc = {};
  98. contentDesc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
  99. contentDesc.InputWidth = m_width;
  100. contentDesc.InputHeight = m_height;
  101. contentDesc.OutputWidth = m_width;
  102. contentDesc.OutputHeight = m_height;
  103. contentDesc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
  104. if (FAILED(m_pVideoDevice->CreateVideoProcessorEnumerator(&contentDesc, m_pEnumerator.ReleaseAndGetAddressOf())))
  105. {
  106. CLog::LogF(LOGWARNING, "failed to init video enumerator with params: {}x{}.", m_width,
  107. m_height);
  108. return false;
  109. }
  110. if (FAILED(m_pEnumerator->GetVideoProcessorCaps(&m_vcaps)))
  111. {
  112. CLog::LogF(LOGWARNING, "failed to get processor caps.");
  113. return false;
  114. }
  115. CLog::LogF(LOGDEBUG, "video processor has {} rate conversion.", m_vcaps.RateConversionCapsCount);
  116. CLog::LogF(LOGDEBUG, "video processor has %#x feature caps.", m_vcaps.FeatureCaps);
  117. CLog::LogF(LOGDEBUG, "video processor has %#x device caps.", m_vcaps.DeviceCaps);
  118. CLog::LogF(LOGDEBUG, "video processor has %#x input format caps.", m_vcaps.InputFormatCaps);
  119. CLog::LogF(LOGDEBUG, "video processor has {} max input streams.", m_vcaps.MaxInputStreams);
  120. CLog::LogF(LOGDEBUG, "video processor has {} max stream states.", m_vcaps.MaxStreamStates);
  121. if (m_vcaps.FeatureCaps & D3D11_VIDEO_PROCESSOR_FEATURE_CAPS_METADATA_HDR10)
  122. CLog::LogF(LOGDEBUG, "video processor supports HDR10.");
  123. if (0 != (m_vcaps.FeatureCaps & D3D11_VIDEO_PROCESSOR_FEATURE_CAPS_LEGACY))
  124. CLog::LogF(LOGWARNING, "the video driver does not support full video processing capabilities.");
  125. m_max_back_refs = 0;
  126. m_max_fwd_refs = 0;
  127. m_procIndex = 0;
  128. unsigned maxProcCaps = 0;
  129. // try to find best processor
  130. for (unsigned int i = 0; i < m_vcaps.RateConversionCapsCount; i++)
  131. {
  132. D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS convCaps;
  133. LOGIFERROR(m_pEnumerator->GetVideoProcessorRateConversionCaps(i, &convCaps))
  134. // check only deinterlace caps
  135. if ((convCaps.ProcessorCaps & 15) > maxProcCaps)
  136. {
  137. m_procIndex = i;
  138. maxProcCaps = convCaps.ProcessorCaps & 15;
  139. }
  140. }
  141. CLog::LogF(LOGDEBUG, "selected video processor index: {}.", m_procIndex);
  142. LOGIFERROR(m_pEnumerator->GetVideoProcessorRateConversionCaps(m_procIndex, &m_rateCaps))
  143. m_max_fwd_refs = std::min(m_rateCaps.FutureFrames, 2u);
  144. m_max_back_refs = std::min(m_rateCaps.PastFrames, 4u);
  145. CLog::LogF(LOGINFO, "supported deinterlace methods: blend:{}, bob:{}, adaptive:{}, mocomp:{}.",
  146. (m_rateCaps.ProcessorCaps & 0x1) != 0 ? "yes" : "no", // BLEND
  147. (m_rateCaps.ProcessorCaps & 0x2) != 0 ? "yes" : "no", // BOB
  148. (m_rateCaps.ProcessorCaps & 0x4) != 0 ? "yes" : "no", // ADAPTIVE
  149. (m_rateCaps.ProcessorCaps & 0x8) != 0 ? "yes" : "no" // MOTION_COMPENSATION
  150. );
  151. CLog::LogF(LOGDEBUG, "selected video processor allows {} future frames and {} past frames.",
  152. m_rateCaps.FutureFrames, m_rateCaps.PastFrames);
  153. //m_size = m_max_back_refs + 1 + m_max_fwd_refs; // refs + 1 display
  154. // Get the image filtering capabilities.
  155. for (size_t i = 0; i < NUM_FILTERS; i++)
  156. {
  157. if (m_vcaps.FilterCaps & (1 << i))
  158. {
  159. m_Filters[i].Range = {};
  160. m_Filters[i].bSupported = SUCCEEDED(m_pEnumerator->GetVideoProcessorFilterRange(PROCAMP_FILTERS[i], &m_Filters[i].Range));
  161. if (m_Filters[i].bSupported)
  162. {
  163. CLog::LogF(LOGDEBUG, "filter {} has following params - max: {}, min: {}, default: {}",
  164. PROCAMP_FILTERS[i], m_Filters[i].Range.Maximum, m_Filters[i].Range.Minimum,
  165. m_Filters[i].Range.Default);
  166. }
  167. }
  168. else
  169. {
  170. CLog::LogF(LOGDEBUG, "filter {} not supported by processor.", PROCAMP_FILTERS[i]);
  171. m_Filters[i].bSupported = false;
  172. }
  173. }
  174. ComPtr<ID3D11VideoProcessorEnumerator1> pEnumerator1;
  175. if (SUCCEEDED(m_pEnumerator.As(&pEnumerator1)))
  176. {
  177. DXGI_FORMAT format = DX::Windowing()->GetBackBuffer().GetFormat();
  178. BOOL supported = 0;
  179. HRESULT hr;
  180. // Check if HLG color space conversion is supported by driver
  181. hr = pEnumerator1->CheckVideoProcessorFormatConversion(
  182. DXGI_FORMAT_P010, DXGI_COLOR_SPACE_YCBCR_STUDIO_GHLG_TOPLEFT_P2020, format,
  183. DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709, &supported);
  184. m_bSupportHLG = SUCCEEDED(hr) && !!supported;
  185. // Check if HDR10 RGB limited range output is supported by driver
  186. hr = pEnumerator1->CheckVideoProcessorFormatConversion(
  187. DXGI_FORMAT_P010, DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020, format,
  188. DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020, &supported);
  189. m_bSupportHDR10Limited = SUCCEEDED(hr) && !!supported;
  190. }
  191. CLog::LogF(LOGDEBUG, "HLG color space conversion is{}supported.", m_bSupportHLG ? " " : " NOT ");
  192. CLog::LogF(LOGDEBUG, "HDR10 RGB limited range output is{}supported.",
  193. m_bSupportHDR10Limited ? " " : " NOT ");
  194. return true;
  195. }
  196. bool CProcessorHD::IsFormatSupported(DXGI_FORMAT format, D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT support) const
  197. {
  198. UINT uiFlags;
  199. if (S_OK == m_pEnumerator->CheckVideoProcessorFormat(format, &uiFlags))
  200. {
  201. if (uiFlags & support)
  202. return true;
  203. }
  204. CLog::LogF(LOGERROR, "unsupported format {} for {}.", format, support);
  205. return false;
  206. }
  207. bool CProcessorHD::CheckFormats() const
  208. {
  209. // check default output format (as render target)
  210. return IsFormatSupported(DX::Windowing()->GetBackBuffer().GetFormat(), D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_OUTPUT);
  211. }
  212. bool CProcessorHD::Open(UINT width, UINT height)
  213. {
  214. Close();
  215. CSingleLock lock(m_section);
  216. m_width = width;
  217. m_height = height;
  218. if (!InitProcessor())
  219. return false;
  220. if (!CheckFormats())
  221. return false;
  222. return OpenProcessor();
  223. }
  224. bool CProcessorHD::ReInit()
  225. {
  226. CSingleLock lock(m_section);
  227. Close();
  228. if (!InitProcessor())
  229. return false;
  230. if (!CheckFormats())
  231. return false;
  232. return true;
  233. }
  234. bool CProcessorHD::OpenProcessor()
  235. {
  236. CSingleLock lock(m_section);
  237. // restore the device if it was lost
  238. if (!m_pEnumerator && !ReInit())
  239. return false;
  240. CLog::LogF(LOGDEBUG, "creating processor.");
  241. // create processor
  242. HRESULT hr = m_pVideoDevice->CreateVideoProcessor(m_pEnumerator.Get(), m_procIndex, m_pVideoProcessor.ReleaseAndGetAddressOf());
  243. if (FAILED(hr))
  244. {
  245. CLog::LogF(LOGDEBUG, "failed creating video processor with error {:x}.", hr);
  246. return false;
  247. }
  248. // Output background color (black)
  249. D3D11_VIDEO_COLOR color;
  250. color.YCbCr = { 0.0625f, 0.5f, 0.5f, 1.0f }; // black color
  251. m_pVideoContext->VideoProcessorSetOutputBackgroundColor(m_pVideoProcessor.Get(), TRUE, &color);
  252. return true;
  253. }
  254. void CProcessorHD::ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER filter, int value, int min, int max, int def) const
  255. {
  256. if (filter >= static_cast<D3D11_VIDEO_PROCESSOR_FILTER>(NUM_FILTERS))
  257. return;
  258. // Unsupported filter. Ignore.
  259. if (!m_Filters[filter].bSupported)
  260. return;
  261. D3D11_VIDEO_PROCESSOR_FILTER_RANGE range = m_Filters[filter].Range;
  262. int val;
  263. if(value > def)
  264. val = range.Default + (range.Maximum - range.Default) * (value - def) / (max - def);
  265. else if(value < def)
  266. val = range.Default + (range.Minimum - range.Default) * (value - def) / (min - def);
  267. else
  268. val = range.Default;
  269. m_pVideoContext->VideoProcessorSetStreamFilter(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, filter, val != range.Default, val);
  270. }
  271. ID3D11VideoProcessorInputView* CProcessorHD::GetInputView(CRenderBuffer* view) const
  272. {
  273. ComPtr<ID3D11VideoProcessorInputView> inputView;
  274. D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC vpivd = {0, D3D11_VPIV_DIMENSION_TEXTURE2D, {0, 0}};
  275. ComPtr<ID3D11Resource> resource;
  276. unsigned arrayIdx = 0;
  277. HRESULT hr = view->GetResource(resource.GetAddressOf(), &arrayIdx);
  278. if (SUCCEEDED(hr))
  279. {
  280. vpivd.Texture2D.ArraySlice = arrayIdx;
  281. hr = m_pVideoDevice->CreateVideoProcessorInputView(resource.Get(), m_pEnumerator.Get(), &vpivd, inputView.GetAddressOf());
  282. }
  283. if (FAILED(hr) || hr == S_FALSE)
  284. CLog::LogF(LOGERROR, "cannot create processor input view.");
  285. return inputView.Detach();
  286. }
  287. DXGI_COLOR_SPACE_TYPE CProcessorHD::GetDXGIColorSpaceSource(CRenderBuffer* view, bool supportHDR, bool supportHLG)
  288. {
  289. // RGB
  290. if (view->color_space == AVCOL_SPC_RGB)
  291. {
  292. if (!view->full_range)
  293. {
  294. if (view->primaries == AVCOL_PRI_BT2020)
  295. {
  296. if (view->color_transfer == AVCOL_TRC_SMPTEST2084 && supportHDR)
  297. return DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020;
  298. return DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020;
  299. }
  300. return DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709;
  301. }
  302. if (view->primaries == AVCOL_PRI_BT2020)
  303. {
  304. if (view->color_transfer == AVCOL_TRC_SMPTEST2084)
  305. return DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020;
  306. return DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P2020;
  307. }
  308. if (view->color_transfer == AVCOL_TRC_LINEAR ||
  309. view->color_transfer == AVCOL_TRC_LOG)
  310. return DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709;
  311. return DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709;
  312. }
  313. // UHDTV
  314. if (view->primaries == AVCOL_PRI_BT2020)
  315. {
  316. // Windows 10 doesn't support HLG passthrough, always is used PQ for HDR passthrough
  317. if ((view->color_transfer == AVCOL_TRC_SMPTEST2084 ||
  318. view->color_transfer == AVCOL_TRC_ARIB_STD_B67) && supportHDR) // is HDR display ON
  319. return DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020;
  320. // HLG transfer can be used for HLG source in SDR display if is supported
  321. if (view->color_transfer == AVCOL_TRC_ARIB_STD_B67 && supportHLG) // driver supports HLG
  322. return DXGI_COLOR_SPACE_YCBCR_STUDIO_GHLG_TOPLEFT_P2020;
  323. if (view->full_range)
  324. return DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P2020;
  325. return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020;
  326. }
  327. // SDTV
  328. if (view->primaries == AVCOL_PRI_BT470BG ||
  329. view->primaries == AVCOL_PRI_SMPTE170M)
  330. {
  331. if (view->full_range)
  332. return DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P601;
  333. return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601;
  334. }
  335. // HDTV
  336. if (view->full_range)
  337. {
  338. if (view->color_transfer == AVCOL_TRC_SMPTE170M)
  339. return DXGI_COLOR_SPACE_YCBCR_FULL_G22_NONE_P709_X601;
  340. return DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709;
  341. }
  342. return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709;
  343. }
  344. DXGI_COLOR_SPACE_TYPE CProcessorHD::GetDXGIColorSpaceTarget(CRenderBuffer* view, bool supportHDR)
  345. {
  346. DXGI_COLOR_SPACE_TYPE color;
  347. color = DX::Windowing()->UseLimitedColor() ? DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709
  348. : DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709;
  349. if (!DX::Windowing()->IsHDROutput())
  350. return color;
  351. // HDR10 or HLG
  352. if (view->primaries == AVCOL_PRI_BT2020 && (view->color_transfer == AVCOL_TRC_SMPTE2084 ||
  353. view->color_transfer == AVCOL_TRC_ARIB_STD_B67))
  354. {
  355. if (supportHDR)
  356. {
  357. color = DX::Windowing()->UseLimitedColor() ? DXGI_COLOR_SPACE_RGB_STUDIO_G2084_NONE_P2020
  358. : DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020;
  359. }
  360. else
  361. {
  362. color = DX::Windowing()->UseLimitedColor() ? DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020
  363. : DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P2020;
  364. }
  365. }
  366. return color;
  367. }
  368. bool CProcessorHD::Render(CRect src, CRect dst, ID3D11Resource* target, CRenderBuffer** views, DWORD flags, UINT frameIdx, UINT rotation, float contrast, float brightness)
  369. {
  370. CSingleLock lock(m_section);
  371. // restore processor if it was lost
  372. if (!m_pVideoProcessor && !OpenProcessor())
  373. return false;
  374. if (!views[2])
  375. return false;
  376. RECT sourceRECT = {static_cast<LONG>(src.x1), static_cast<LONG>(src.y1),
  377. static_cast<LONG>(src.x2), static_cast<LONG>(src.y2)};
  378. RECT dstRECT = {static_cast<LONG>(dst.x1), static_cast<LONG>(dst.y1), static_cast<LONG>(dst.x2),
  379. static_cast<LONG>(dst.y2)};
  380. D3D11_VIDEO_FRAME_FORMAT dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
  381. unsigned int providedPast = 0;
  382. for (int i = 3; i < 8; i++)
  383. {
  384. if (views[i])
  385. providedPast++;
  386. }
  387. unsigned int providedFuture = 0;
  388. for (int i = 1; i >= 0; i--)
  389. {
  390. if (views[i])
  391. providedFuture++;
  392. }
  393. const int futureFrames = std::min(providedFuture, m_rateCaps.FutureFrames);
  394. const int pastFrames = std::min(providedPast, m_rateCaps.PastFrames);
  395. std::vector<ID3D11VideoProcessorInputView*> pastViews(pastFrames, nullptr);
  396. std::vector<ID3D11VideoProcessorInputView*> futureViews(futureFrames, nullptr);
  397. D3D11_VIDEO_PROCESSOR_STREAM stream_data = {};
  398. stream_data.Enable = TRUE;
  399. stream_data.PastFrames = pastFrames;
  400. stream_data.FutureFrames = futureFrames;
  401. stream_data.ppPastSurfaces = pastViews.data();
  402. stream_data.ppFutureSurfaces = futureViews.data();
  403. std::vector<ComPtr<ID3D11VideoProcessorInputView>> all_views;
  404. const int start = 2 - futureFrames;
  405. const int end = 2 + pastFrames;
  406. int count = 0;
  407. for (int i = start; i <= end; i++)
  408. {
  409. if (!views[i])
  410. continue;
  411. ComPtr<ID3D11VideoProcessorInputView> view;
  412. view.Attach(GetInputView(views[i]));
  413. if (i > 2)
  414. {
  415. // frames order should be { ?, T-3, T-2, T-1 }
  416. pastViews[2 + pastFrames - i] = view.Get();
  417. }
  418. else if (i == 2)
  419. {
  420. stream_data.pInputSurface = view.Get();
  421. }
  422. else if (i < 2)
  423. {
  424. // frames order should be { T+1, T+2, T+3, .. }
  425. futureViews[1 - i] = view.Get();
  426. }
  427. if (view)
  428. {
  429. count++;
  430. all_views.push_back(view);
  431. }
  432. }
  433. if (count != pastFrames + futureFrames + 1)
  434. {
  435. CLog::LogF(LOGERROR, "incomplete views set.");
  436. return false;
  437. }
  438. if (flags & RENDER_FLAG_FIELD0 && flags & RENDER_FLAG_TOP)
  439. dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
  440. else if (flags & RENDER_FLAG_FIELD1 && flags & RENDER_FLAG_BOT)
  441. dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
  442. if (flags & RENDER_FLAG_FIELD0 && flags & RENDER_FLAG_BOT)
  443. dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
  444. if (flags & RENDER_FLAG_FIELD1 && flags & RENDER_FLAG_TOP)
  445. dxvaFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
  446. bool frameProgressive = dxvaFrameFormat == D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
  447. // Progressive or Interlaced video at normal rate.
  448. stream_data.InputFrameOrField = frameIdx;
  449. stream_data.OutputIndex = flags & RENDER_FLAG_FIELD1 && !frameProgressive ? 1 : 0;
  450. // input format
  451. m_pVideoContext->VideoProcessorSetStreamFrameFormat(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, dxvaFrameFormat);
  452. // Source rect
  453. m_pVideoContext->VideoProcessorSetStreamSourceRect(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, TRUE, &sourceRECT);
  454. // Stream dest rect
  455. m_pVideoContext->VideoProcessorSetStreamDestRect(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, TRUE, &dstRECT);
  456. // Output rect
  457. m_pVideoContext->VideoProcessorSetOutputTargetRect(m_pVideoProcessor.Get(), TRUE, &dstRECT);
  458. ComPtr<ID3D11VideoContext1> videoCtx1;
  459. if (SUCCEEDED(m_pVideoContext.As(&videoCtx1)))
  460. {
  461. bool supportHDR = DX::Windowing()->IsHDROutput() &&
  462. (m_bSupportHDR10Limited || !DX::Windowing()->UseLimitedColor());
  463. const DXGI_COLOR_SPACE_TYPE sourceColor =
  464. GetDXGIColorSpaceSource(views[2], supportHDR, m_bSupportHLG);
  465. const DXGI_COLOR_SPACE_TYPE targetColor = GetDXGIColorSpaceTarget(views[2], supportHDR);
  466. videoCtx1->VideoProcessorSetStreamColorSpace1(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX,
  467. sourceColor);
  468. videoCtx1->VideoProcessorSetOutputColorSpace1(m_pVideoProcessor.Get(), targetColor);
  469. // makes target available for processing in shaders
  470. videoCtx1->VideoProcessorSetOutputShaderUsage(m_pVideoProcessor.Get(), 1);
  471. }
  472. else
  473. {
  474. // input colorspace
  475. bool isBT601 = views[2]->color_space == AVCOL_SPC_BT470BG || views[2]->color_space == AVCOL_SPC_SMPTE170M;
  476. // clang-format off
  477. D3D11_VIDEO_PROCESSOR_COLOR_SPACE colorSpace
  478. {
  479. 0u, // 0 - Playback, 1 - Processing
  480. views[2]->full_range ? 0u : 1u, // 0 - Full (0-255), 1 - Limited (16-235) (RGB)
  481. isBT601 ? 1u : 0u, // 0 - BT.601, 1 - BT.709
  482. 0u, // 0 - Conventional YCbCr, 1 - xvYCC
  483. views[2]->full_range ? 2u : 1u // 0 - driver defaults, 2 - Full range [0-255], 1 - Studio range [16-235] (YUV)
  484. };
  485. // clang-format on
  486. m_pVideoContext->VideoProcessorSetStreamColorSpace(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, &colorSpace);
  487. // Output color space
  488. // don't apply any color range conversion, this will be fixed at later stage.
  489. colorSpace.Usage = 0; // 0 - playback, 1 - video processing
  490. colorSpace.RGB_Range = DX::Windowing()->UseLimitedColor() ? 1 : 0; // 0 - 0-255, 1 - 16-235
  491. colorSpace.YCbCr_Matrix = 1; // 0 - BT.601, 1 = BT.709
  492. colorSpace.YCbCr_xvYCC = 1; // 0 - Conventional YCbCr, 1 - xvYCC
  493. colorSpace.Nominal_Range = 0; // 2 - 0-255, 1 = 16-235, 0 - undefined
  494. m_pVideoContext->VideoProcessorSetOutputColorSpace(m_pVideoProcessor.Get(), &colorSpace);
  495. }
  496. // brightness
  497. ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_BRIGHTNESS, static_cast<int>(brightness), 0, 100, 50);
  498. // contrast
  499. ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_CONTRAST, static_cast<int>(contrast), 0, 100, 50);
  500. // unused filters
  501. ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_HUE, 50, 0, 100, 50);
  502. ApplyFilter(D3D11_VIDEO_PROCESSOR_FILTER_SATURATION, 50, 0, 100, 50);
  503. // Rotation
  504. m_pVideoContext->VideoProcessorSetStreamRotation(m_pVideoProcessor.Get(), DEFAULT_STREAM_INDEX, rotation != 0,
  505. static_cast<D3D11_VIDEO_PROCESSOR_ROTATION>(rotation / 90));
  506. // create output view for surface.
  507. D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC OutputViewDesc = { D3D11_VPOV_DIMENSION_TEXTURE2D, { 0 }};
  508. ComPtr<ID3D11VideoProcessorOutputView> pOutputView;
  509. HRESULT hr = m_pVideoDevice->CreateVideoProcessorOutputView(target, m_pEnumerator.Get(), &OutputViewDesc, &pOutputView);
  510. if (S_OK != hr)
  511. CLog::LogF(FAILED(hr) ? LOGERROR : LOGWARNING,
  512. "video device returns result '{:x}' while creating processor output view.", hr);
  513. if (SUCCEEDED(hr))
  514. {
  515. hr = m_pVideoContext->VideoProcessorBlt(m_pVideoProcessor.Get(), pOutputView.Get(), frameIdx, 1, &stream_data);
  516. if (S_OK != hr)
  517. {
  518. CLog::LogF(FAILED(hr) ? LOGERROR : LOGWARNING,
  519. "video device returns result '{:x}' while VideoProcessorBlt execution.", hr);
  520. }
  521. }
  522. return !FAILED(hr);
  523. }