/content/media/wmf/WMFReader.cpp

https://bitbucket.org/iamer/mozilla-central · C++ · 779 lines · 585 code · 130 blank · 64 comment · 49 complexity · b9621356d5e683352c56363b694251d5 MD5 · raw file

  1. /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
  2. /* vim:set ts=2 sw=2 sts=2 et cindent: */
  3. /* This Source Code Form is subject to the terms of the Mozilla Public
  4. * License, v. 2.0. If a copy of the MPL was not distributed with this
  5. * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
  6. #include "WMFReader.h"
  7. #include "WMFDecoder.h"
  8. #include "WMFUtils.h"
  9. #include "WMFByteStream.h"
  10. #include "WMFSourceReaderCallback.h"
  11. #ifndef MOZ_SAMPLE_TYPE_FLOAT32
  12. #error We expect 32bit float audio samples on desktop for the Windows Media Foundation media backend.
  13. #endif
  14. #include "MediaDecoder.h"
  15. #include "VideoUtils.h"
  16. namespace mozilla {
  17. #ifdef PR_LOGGING
  18. extern PRLogModuleInfo* gMediaDecoderLog;
  19. #define LOG(...) PR_LOG(gMediaDecoderLog, PR_LOG_DEBUG, (__VA_ARGS__))
  20. #else
  21. #define LOG(...)
  22. #endif
  23. // Uncomment to enable verbose per-sample logging.
  24. //#define LOG_SAMPLE_DECODE 1
  25. WMFReader::WMFReader(AbstractMediaDecoder* aDecoder)
  26. : MediaDecoderReader(aDecoder),
  27. mSourceReader(nullptr),
  28. mAudioChannels(0),
  29. mAudioBytesPerSample(0),
  30. mAudioRate(0),
  31. mVideoHeight(0),
  32. mVideoStride(0),
  33. mHasAudio(false),
  34. mHasVideo(false),
  35. mCanSeek(false)
  36. {
  37. NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
  38. MOZ_COUNT_CTOR(WMFReader);
  39. }
  40. WMFReader::~WMFReader()
  41. {
  42. NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
  43. // Note: We must shutdown the byte stream before calling MFShutdown, else we
  44. // get assertion failures when unlocking the byte stream's work queue.
  45. if (mByteStream) {
  46. DebugOnly<nsresult> rv = mByteStream->Shutdown();
  47. NS_ASSERTION(NS_SUCCEEDED(rv), "Failed to shutdown WMFByteStream");
  48. }
  49. DebugOnly<HRESULT> hr = wmf::MFShutdown();
  50. NS_ASSERTION(SUCCEEDED(hr), "MFShutdown failed");
  51. MOZ_COUNT_DTOR(WMFReader);
  52. }
  53. void
  54. WMFReader::OnDecodeThreadStart()
  55. {
  56. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  57. HRESULT hr = CoInitializeEx(0, COINIT_MULTITHREADED);
  58. NS_ENSURE_TRUE_VOID(SUCCEEDED(hr));
  59. }
  60. void
  61. WMFReader::OnDecodeThreadFinish()
  62. {
  63. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  64. CoUninitialize();
  65. }
  66. nsresult
  67. WMFReader::Init(MediaDecoderReader* aCloneDonor)
  68. {
  69. NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
  70. nsresult rv = WMFDecoder::LoadDLLs();
  71. NS_ENSURE_SUCCESS(rv, rv);
  72. if (FAILED(wmf::MFStartup())) {
  73. NS_WARNING("Failed to initialize Windows Media Foundation");
  74. return NS_ERROR_FAILURE;
  75. }
  76. mSourceReaderCallback = new WMFSourceReaderCallback();
  77. // Must be created on main thread.
  78. mByteStream = new WMFByteStream(mDecoder->GetResource(), mSourceReaderCallback);
  79. return mByteStream->Init();
  80. }
  81. bool
  82. WMFReader::HasAudio()
  83. {
  84. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  85. return mHasAudio;
  86. }
  87. bool
  88. WMFReader::HasVideo()
  89. {
  90. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  91. return mHasVideo;
  92. }
  93. static HRESULT
  94. ConfigureSourceReaderStream(IMFSourceReader *aReader,
  95. const DWORD aStreamIndex,
  96. const GUID& aOutputSubType,
  97. const GUID* aAllowedInSubTypes,
  98. const uint32_t aNumAllowedInSubTypes)
  99. {
  100. NS_ENSURE_TRUE(aReader, E_POINTER);
  101. NS_ENSURE_TRUE(aAllowedInSubTypes, E_POINTER);
  102. RefPtr<IMFMediaType> nativeType;
  103. RefPtr<IMFMediaType> type;
  104. HRESULT hr;
  105. // Find the native format of the stream.
  106. hr = aReader->GetNativeMediaType(aStreamIndex, 0, byRef(nativeType));
  107. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  108. // Get the native output subtype of the stream. This denotes the uncompressed
  109. // type.
  110. GUID subType;
  111. hr = nativeType->GetGUID(MF_MT_SUBTYPE, &subType);
  112. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  113. // Ensure the input type of the media is in the allowed formats list.
  114. bool isSubTypeAllowed = false;
  115. for (uint32_t i = 0; i < aNumAllowedInSubTypes; i++) {
  116. if (aAllowedInSubTypes[i] == subType) {
  117. isSubTypeAllowed = true;
  118. break;
  119. }
  120. }
  121. if (!isSubTypeAllowed) {
  122. nsCString name = GetGUIDName(subType);
  123. LOG("ConfigureSourceReaderStream subType=%s is not allowed to be decoded", name.get());
  124. return E_FAIL;
  125. }
  126. // Find the major type.
  127. GUID majorType;
  128. hr = nativeType->GetGUID(MF_MT_MAJOR_TYPE, &majorType);
  129. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  130. // Define the output type.
  131. hr = wmf::MFCreateMediaType(byRef(type));
  132. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  133. hr = type->SetGUID(MF_MT_MAJOR_TYPE, majorType);
  134. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  135. hr = type->SetGUID(MF_MT_SUBTYPE, aOutputSubType);
  136. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  137. // Set the uncompressed format. This can fail if the decoder can't produce
  138. // that type.
  139. return aReader->SetCurrentMediaType(aStreamIndex, NULL, type);
  140. }
  141. // Returns the duration of the resource, in microseconds.
  142. HRESULT
  143. GetSourceReaderDuration(IMFSourceReader *aReader,
  144. int64_t& aOutDuration)
  145. {
  146. AutoPropVar var;
  147. HRESULT hr = aReader->GetPresentationAttribute(MF_SOURCE_READER_MEDIASOURCE,
  148. MF_PD_DURATION,
  149. &var);
  150. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  151. // WMF stores duration in hundred nanosecond units.
  152. int64_t duration_hns = 0;
  153. hr = wmf::PropVariantToInt64(var, &duration_hns);
  154. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  155. aOutDuration = HNsToUsecs(duration_hns);
  156. return S_OK;
  157. }
  158. HRESULT
  159. GetSourceReaderCanSeek(IMFSourceReader* aReader, bool& aOutCanSeek)
  160. {
  161. NS_ENSURE_TRUE(aReader, E_FAIL);
  162. HRESULT hr;
  163. AutoPropVar var;
  164. hr = aReader->GetPresentationAttribute(MF_SOURCE_READER_MEDIASOURCE,
  165. MF_SOURCE_READER_MEDIASOURCE_CHARACTERISTICS,
  166. &var);
  167. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  168. ULONG flags = 0;
  169. hr = wmf::PropVariantToUInt32(var, &flags);
  170. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  171. aOutCanSeek = ((flags & MFMEDIASOURCE_CAN_SEEK) == MFMEDIASOURCE_CAN_SEEK);
  172. return S_OK;
  173. }
  174. static HRESULT
  175. GetDefaultStride(IMFMediaType *aType, uint32_t* aOutStride)
  176. {
  177. // Try to get the default stride from the media type.
  178. HRESULT hr = aType->GetUINT32(MF_MT_DEFAULT_STRIDE, aOutStride);
  179. if (SUCCEEDED(hr)) {
  180. return S_OK;
  181. }
  182. // Stride attribute not set, calculate it.
  183. GUID subtype = GUID_NULL;
  184. uint32_t width = 0;
  185. uint32_t height = 0;
  186. hr = aType->GetGUID(MF_MT_SUBTYPE, &subtype);
  187. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  188. hr = MFGetAttributeSize(aType, MF_MT_FRAME_SIZE, &width, &height);
  189. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  190. hr = wmf::MFGetStrideForBitmapInfoHeader(subtype.Data1, width, (LONG*)(aOutStride));
  191. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  192. return hr;
  193. }
  194. static int32_t
  195. MFOffsetToInt32(const MFOffset& aOffset)
  196. {
  197. return int32_t(aOffset.value + (aOffset.fract / 65536.0f));
  198. }
  199. // Gets the sub-region of the video frame that should be displayed.
  200. // See: http://msdn.microsoft.com/en-us/library/windows/desktop/bb530115(v=vs.85).aspx
  201. static HRESULT
  202. GetPictureRegion(IMFMediaType* aMediaType, nsIntRect& aOutPictureRegion)
  203. {
  204. // Determine if "pan and scan" is enabled for this media. If it is, we
  205. // only display a region of the video frame, not the entire frame.
  206. BOOL panScan = MFGetAttributeUINT32(aMediaType, MF_MT_PAN_SCAN_ENABLED, FALSE);
  207. // If pan and scan mode is enabled. Try to get the display region.
  208. HRESULT hr = E_FAIL;
  209. MFVideoArea videoArea;
  210. memset(&videoArea, 0, sizeof(MFVideoArea));
  211. if (panScan) {
  212. hr = aMediaType->GetBlob(MF_MT_PAN_SCAN_APERTURE,
  213. (UINT8*)&videoArea,
  214. sizeof(MFVideoArea),
  215. NULL);
  216. }
  217. // If we're not in pan-and-scan mode, or the pan-and-scan region is not set,
  218. // check for a minimimum display aperture.
  219. if (!panScan || hr == MF_E_ATTRIBUTENOTFOUND) {
  220. hr = aMediaType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
  221. (UINT8*)&videoArea,
  222. sizeof(MFVideoArea),
  223. NULL);
  224. }
  225. if (hr == MF_E_ATTRIBUTENOTFOUND) {
  226. // Minimum display aperture is not set, for "backward compatibility with
  227. // some components", check for a geometric aperture.
  228. hr = aMediaType->GetBlob(MF_MT_GEOMETRIC_APERTURE,
  229. (UINT8*)&videoArea,
  230. sizeof(MFVideoArea),
  231. NULL);
  232. }
  233. if (SUCCEEDED(hr)) {
  234. // The media specified a picture region, return it.
  235. aOutPictureRegion = nsIntRect(MFOffsetToInt32(videoArea.OffsetX),
  236. MFOffsetToInt32(videoArea.OffsetY),
  237. videoArea.Area.cx,
  238. videoArea.Area.cy);
  239. return S_OK;
  240. }
  241. // No picture region defined, fall back to using the entire video area.
  242. UINT32 width = 0, height = 0;
  243. hr = MFGetAttributeSize(aMediaType, MF_MT_FRAME_SIZE, &width, &height);
  244. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  245. aOutPictureRegion = nsIntRect(0, 0, width, height);
  246. return S_OK;
  247. }
  248. HRESULT
  249. WMFReader::ConfigureVideoFrameGeometry(IMFMediaType* aMediaType)
  250. {
  251. NS_ENSURE_TRUE(aMediaType != nullptr, E_POINTER);
  252. nsIntRect pictureRegion;
  253. HRESULT hr = GetPictureRegion(aMediaType, pictureRegion);
  254. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  255. UINT32 width = 0, height = 0;
  256. hr = MFGetAttributeSize(aMediaType, MF_MT_FRAME_SIZE, &width, &height);
  257. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  258. uint32_t aspectNum = 0, aspectDenom = 0;
  259. hr = MFGetAttributeRatio(aMediaType,
  260. MF_MT_PIXEL_ASPECT_RATIO,
  261. &aspectNum,
  262. &aspectDenom);
  263. NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
  264. // Calculate and validate the picture region and frame dimensions after
  265. // scaling by the pixel aspect ratio.
  266. nsIntSize frameSize = nsIntSize(width, height);
  267. nsIntSize displaySize = nsIntSize(pictureRegion.width, pictureRegion.height);
  268. ScaleDisplayByAspectRatio(displaySize, float(aspectNum) / float(aspectDenom));
  269. if (!VideoInfo::ValidateVideoRegion(frameSize, pictureRegion, displaySize)) {
  270. // Video track's frame sizes will overflow. Ignore the video track.
  271. return E_FAIL;
  272. }
  273. // Success! Save state.
  274. mInfo.mDisplay = displaySize;
  275. GetDefaultStride(aMediaType, &mVideoStride);
  276. mVideoHeight = height;
  277. mPictureRegion = pictureRegion;
  278. LOG("WMFReader frame geometry frame=(%u,%u) stride=%u picture=(%d, %d, %d, %d) display=(%d,%d) PAR=%d:%d",
  279. width, height,
  280. mVideoStride,
  281. mPictureRegion.x, mPictureRegion.y, mPictureRegion.width, mPictureRegion.height,
  282. displaySize.width, displaySize.height,
  283. aspectNum, aspectDenom);
  284. return S_OK;
  285. }
  286. HRESULT
  287. WMFReader::ConfigureVideoDecoder()
  288. {
  289. NS_ASSERTION(mSourceReader, "Must have a SourceReader before configuring decoders!");
  290. // Determine if we have video.
  291. if (!mSourceReader ||
  292. !SourceReaderHasStream(mSourceReader, MF_SOURCE_READER_FIRST_VIDEO_STREAM)) {
  293. // No stream, no error.
  294. return S_OK;
  295. }
  296. static const GUID MP4VideoTypes[] = {
  297. MFVideoFormat_H264
  298. };
  299. HRESULT hr = ConfigureSourceReaderStream(mSourceReader,
  300. MF_SOURCE_READER_FIRST_VIDEO_STREAM,
  301. MFVideoFormat_YV12,
  302. MP4VideoTypes,
  303. NS_ARRAY_LENGTH(MP4VideoTypes));
  304. if (FAILED(hr)) {
  305. LOG("Failed to configured video output for MFVideoFormat_YV12");
  306. return hr;
  307. }
  308. RefPtr<IMFMediaType> mediaType;
  309. hr = mSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
  310. byRef(mediaType));
  311. if (FAILED(hr)) {
  312. NS_WARNING("Failed to get configured video media type");
  313. return hr;
  314. }
  315. if (FAILED(ConfigureVideoFrameGeometry(mediaType))) {
  316. NS_WARNING("Failed configured video frame dimensions");
  317. return hr;
  318. }
  319. LOG("Successfully configured video stream");
  320. mHasVideo = mInfo.mHasVideo = true;
  321. return S_OK;
  322. }
  323. HRESULT
  324. WMFReader::ConfigureAudioDecoder()
  325. {
  326. NS_ASSERTION(mSourceReader, "Must have a SourceReader before configuring decoders!");
  327. if (!mSourceReader ||
  328. !SourceReaderHasStream(mSourceReader, MF_SOURCE_READER_FIRST_AUDIO_STREAM)) {
  329. // No stream, no error.
  330. return S_OK;
  331. }
  332. static const GUID MP4AudioTypes[] = {
  333. MFAudioFormat_AAC,
  334. MFAudioFormat_MP3
  335. };
  336. HRESULT hr = ConfigureSourceReaderStream(mSourceReader,
  337. MF_SOURCE_READER_FIRST_AUDIO_STREAM,
  338. MFAudioFormat_Float,
  339. MP4AudioTypes,
  340. NS_ARRAY_LENGTH(MP4AudioTypes));
  341. if (FAILED(hr)) {
  342. NS_WARNING("Failed to configure WMF Audio decoder for PCM output");
  343. return hr;
  344. }
  345. RefPtr<IMFMediaType> mediaType;
  346. hr = mSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
  347. byRef(mediaType));
  348. if (FAILED(hr)) {
  349. NS_WARNING("Failed to get configured audio media type");
  350. return hr;
  351. }
  352. mAudioRate = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_SAMPLES_PER_SECOND, 0);
  353. mAudioChannels = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_NUM_CHANNELS, 0);
  354. mAudioBytesPerSample = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_BITS_PER_SAMPLE, 16) / 8;
  355. mInfo.mAudioChannels = mAudioChannels;
  356. mInfo.mAudioRate = mAudioRate;
  357. mHasAudio = mInfo.mHasAudio = true;
  358. LOG("Successfully configured audio stream. rate=%u channels=%u bitsPerSample=%u",
  359. mAudioRate, mAudioChannels, mAudioBytesPerSample);
  360. return S_OK;
  361. }
  362. nsresult
  363. WMFReader::ReadMetadata(VideoInfo* aInfo,
  364. MetadataTags** aTags)
  365. {
  366. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  367. LOG("WMFReader::ReadMetadata()");
  368. HRESULT hr;
  369. RefPtr<IMFAttributes> attr;
  370. hr = wmf::MFCreateAttributes(byRef(attr), 1);
  371. NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  372. hr = attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, mSourceReaderCallback);
  373. NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  374. hr = wmf::MFCreateSourceReaderFromByteStream(mByteStream, attr, byRef(mSourceReader));
  375. NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  376. hr = ConfigureVideoDecoder();
  377. NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  378. hr = ConfigureAudioDecoder();
  379. NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  380. // Abort if both video and audio failed to initialize.
  381. NS_ENSURE_TRUE(mInfo.mHasAudio || mInfo.mHasVideo, NS_ERROR_FAILURE);
  382. int64_t duration = 0;
  383. if (SUCCEEDED(GetSourceReaderDuration(mSourceReader, duration))) {
  384. ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
  385. mDecoder->SetMediaDuration(duration);
  386. }
  387. hr = GetSourceReaderCanSeek(mSourceReader, mCanSeek);
  388. NS_ASSERTION(SUCCEEDED(hr), "Can't determine if resource is seekable");
  389. *aInfo = mInfo;
  390. *aTags = nullptr;
  391. // aTags can be retrieved using techniques like used here:
  392. // http://blogs.msdn.com/b/mf/archive/2010/01/12/mfmediapropdump.aspx
  393. return NS_OK;
  394. }
  395. static int64_t
  396. GetSampleDuration(IMFSample* aSample)
  397. {
  398. int64_t duration = 0;
  399. aSample->GetSampleDuration(&duration);
  400. return HNsToUsecs(duration);
  401. }
  402. bool
  403. WMFReader::DecodeAudioData()
  404. {
  405. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  406. HRESULT hr;
  407. hr = mSourceReader->ReadSample(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
  408. 0, // control flags
  409. 0, // read stream index
  410. nullptr,
  411. nullptr,
  412. nullptr);
  413. if (FAILED(hr)) {
  414. LOG("WMFReader::DecodeAudioData() ReadSample failed with hr=0x%x", hr);
  415. // End the stream.
  416. mAudioQueue.Finish();
  417. return false;
  418. }
  419. DWORD flags = 0;
  420. LONGLONG timestampHns = 0;
  421. RefPtr<IMFSample> sample;
  422. hr = mSourceReaderCallback->Wait(&flags, &timestampHns, byRef(sample));
  423. if (FAILED(hr) ||
  424. (flags & MF_SOURCE_READERF_ERROR) ||
  425. (flags & MF_SOURCE_READERF_ENDOFSTREAM) ||
  426. (flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)) {
  427. LOG("WMFReader::DecodeAudioData() ReadSample failed with hr=0x%x flags=0x%x",
  428. hr, flags);
  429. // End the stream.
  430. mAudioQueue.Finish();
  431. return false;
  432. }
  433. if (!sample) {
  434. // Not enough data? Try again...
  435. return true;
  436. }
  437. RefPtr<IMFMediaBuffer> buffer;
  438. hr = sample->ConvertToContiguousBuffer(byRef(buffer));
  439. NS_ENSURE_TRUE(SUCCEEDED(hr), false);
  440. BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we don't need to free it.
  441. DWORD maxLength = 0, currentLength = 0;
  442. hr = buffer->Lock(&data, &maxLength, &currentLength);
  443. NS_ENSURE_TRUE(SUCCEEDED(hr), false);
  444. uint32_t numFrames = currentLength / mAudioBytesPerSample / mAudioChannels;
  445. NS_ASSERTION(sizeof(AudioDataValue) == mAudioBytesPerSample, "Size calculation is wrong");
  446. nsAutoArrayPtr<AudioDataValue> pcmSamples(new AudioDataValue[numFrames * mAudioChannels]);
  447. memcpy(pcmSamples.get(), data, currentLength);
  448. buffer->Unlock();
  449. int64_t offset = mDecoder->GetResource()->Tell();
  450. int64_t timestamp = HNsToUsecs(timestampHns);
  451. int64_t duration = GetSampleDuration(sample);
  452. mAudioQueue.Push(new AudioData(offset,
  453. timestamp,
  454. duration,
  455. numFrames,
  456. pcmSamples.forget(),
  457. mAudioChannels));
  458. #ifdef LOG_SAMPLE_DECODE
  459. LOG("Decoded audio sample! timestamp=%lld duration=%lld currentLength=%u",
  460. timestamp, duration, currentLength);
  461. #endif
  462. return true;
  463. }
  464. bool
  465. WMFReader::DecodeVideoFrame(bool &aKeyframeSkip,
  466. int64_t aTimeThreshold)
  467. {
  468. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  469. // Record number of frames decoded and parsed. Automatically update the
  470. // stats counters using the AutoNotifyDecoded stack-based class.
  471. uint32_t parsed = 0, decoded = 0;
  472. AbstractMediaDecoder::AutoNotifyDecoded autoNotify(mDecoder, parsed, decoded);
  473. HRESULT hr;
  474. hr = mSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
  475. 0, // control flags
  476. 0, // read stream index
  477. nullptr,
  478. nullptr,
  479. nullptr);
  480. if (FAILED(hr)) {
  481. LOG("WMFReader::DecodeVideoData() ReadSample failed with hr=0x%x", hr);
  482. // End the stream.
  483. mVideoQueue.Finish();
  484. return false;
  485. }
  486. DWORD flags = 0;
  487. LONGLONG timestampHns = 0;
  488. RefPtr<IMFSample> sample;
  489. hr = mSourceReaderCallback->Wait(&flags, &timestampHns, byRef(sample));
  490. if (flags & MF_SOURCE_READERF_ERROR) {
  491. NS_WARNING("WMFReader: Catastrophic failure reading video sample");
  492. // Future ReadSample() calls will fail, so give up and report end of stream.
  493. mVideoQueue.Finish();
  494. return false;
  495. }
  496. if (FAILED(hr)) {
  497. // Unknown failure, ask caller to try again?
  498. return true;
  499. }
  500. if (!sample) {
  501. if ((flags & MF_SOURCE_READERF_ENDOFSTREAM)) {
  502. LOG("WMFReader; Null sample after video decode, at end of stream");
  503. // End the stream.
  504. mVideoQueue.Finish();
  505. return false;
  506. }
  507. LOG("WMFReader; Null sample after video decode. Maybe insufficient data...");
  508. return true;
  509. }
  510. if ((flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)) {
  511. LOG("WMFReader: Video media type changed!");
  512. RefPtr<IMFMediaType> mediaType;
  513. hr = mSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM,
  514. byRef(mediaType));
  515. if (FAILED(hr) ||
  516. FAILED(ConfigureVideoFrameGeometry(mediaType))) {
  517. NS_WARNING("Failed to reconfigure video media type");
  518. mVideoQueue.Finish();
  519. return false;
  520. }
  521. }
  522. int64_t timestamp = HNsToUsecs(timestampHns);
  523. if (timestamp < aTimeThreshold) {
  524. return true;
  525. }
  526. int64_t offset = mDecoder->GetResource()->Tell();
  527. int64_t duration = GetSampleDuration(sample);
  528. RefPtr<IMFMediaBuffer> buffer;
  529. // Must convert to contiguous buffer to use IMD2DBuffer interface.
  530. hr = sample->ConvertToContiguousBuffer(byRef(buffer));
  531. if (FAILED(hr)) {
  532. NS_WARNING("ConvertToContiguousBuffer() failed!");
  533. return true;
  534. }
  535. // Try and use the IMF2DBuffer interface if available, otherwise fallback
  536. // to the IMFMediaBuffer interface. Apparently IMF2DBuffer is more efficient,
  537. // but only some systems (Windows 8?) support it.
  538. BYTE* data = nullptr;
  539. LONG stride = 0;
  540. RefPtr<IMF2DBuffer> twoDBuffer;
  541. hr = buffer->QueryInterface(static_cast<IMF2DBuffer**>(byRef(twoDBuffer)));
  542. if (SUCCEEDED(hr)) {
  543. hr = twoDBuffer->Lock2D(&data, &stride);
  544. NS_ENSURE_TRUE(SUCCEEDED(hr), false);
  545. } else {
  546. hr = buffer->Lock(&data, NULL, NULL);
  547. NS_ENSURE_TRUE(SUCCEEDED(hr), false);
  548. stride = mVideoStride;
  549. }
  550. // YV12, planar format: [YYYY....][VVVV....][UUUU....]
  551. // i.e., Y, then V, then U.
  552. VideoData::YCbCrBuffer b;
  553. // Y (Y') plane
  554. b.mPlanes[0].mData = data;
  555. b.mPlanes[0].mStride = stride;
  556. b.mPlanes[0].mHeight = mVideoHeight;
  557. b.mPlanes[0].mWidth = stride;
  558. b.mPlanes[0].mOffset = 0;
  559. b.mPlanes[0].mSkip = 0;
  560. // The V and U planes are stored 16-row-aligned, so we need to add padding
  561. // to the row heights to ensure the Y'CbCr planes are referenced properly.
  562. uint32_t padding = 0;
  563. if (mVideoHeight % 16 != 0) {
  564. padding = 16 - (mVideoHeight % 16);
  565. }
  566. uint32_t y_size = stride * (mVideoHeight + padding);
  567. uint32_t v_size = stride * (mVideoHeight + padding) / 4;
  568. uint32_t halfStride = (stride + 1) / 2;
  569. uint32_t halfHeight = (mVideoHeight + 1) / 2;
  570. // U plane (Cb)
  571. b.mPlanes[1].mData = data + y_size + v_size;
  572. b.mPlanes[1].mStride = halfStride;
  573. b.mPlanes[1].mHeight = halfHeight;
  574. b.mPlanes[1].mWidth = halfStride;
  575. b.mPlanes[1].mOffset = 0;
  576. b.mPlanes[1].mSkip = 0;
  577. // V plane (Cr)
  578. b.mPlanes[2].mData = data + y_size;
  579. b.mPlanes[2].mStride = halfStride;
  580. b.mPlanes[2].mHeight = halfHeight;
  581. b.mPlanes[2].mWidth = halfStride;
  582. b.mPlanes[2].mOffset = 0;
  583. b.mPlanes[2].mSkip = 0;
  584. VideoData *v = VideoData::Create(mInfo,
  585. mDecoder->GetImageContainer(),
  586. offset,
  587. timestamp,
  588. timestamp + duration,
  589. b,
  590. false,
  591. -1,
  592. mPictureRegion);
  593. if (twoDBuffer) {
  594. twoDBuffer->Unlock2D();
  595. } else {
  596. buffer->Unlock();
  597. }
  598. if (!v) {
  599. NS_WARNING("Failed to create VideoData");
  600. return false;
  601. }
  602. parsed++;
  603. decoded++;
  604. mVideoQueue.Push(v);
  605. #ifdef LOG_SAMPLE_DECODE
  606. LOG("Decoded video sample timestamp=%lld duration=%lld stride=%d height=%u flags=%u",
  607. timestamp, duration, stride, mVideoHeight, flags);
  608. #endif
  609. if ((flags & MF_SOURCE_READERF_ENDOFSTREAM)) {
  610. // End of stream.
  611. mVideoQueue.Finish();
  612. LOG("End of video stream");
  613. return false;
  614. }
  615. return true;
  616. }
  617. nsresult
  618. WMFReader::Seek(int64_t aTargetUs,
  619. int64_t aStartTime,
  620. int64_t aEndTime,
  621. int64_t aCurrentTime)
  622. {
  623. LOG("WMFReader::Seek() %lld", aTargetUs);
  624. NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  625. if (!mCanSeek) {
  626. return NS_ERROR_FAILURE;
  627. }
  628. nsresult rv = ResetDecode();
  629. NS_ENSURE_SUCCESS(rv, rv);
  630. AutoPropVar var;
  631. HRESULT hr = InitPropVariantFromInt64(UsecsToHNs(aTargetUs), &var);
  632. NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  633. hr = mSourceReader->SetCurrentPosition(GUID_NULL, var);
  634. NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
  635. return DecodeToTarget(aTargetUs);
  636. }
  637. nsresult
  638. WMFReader::GetBuffered(nsTimeRanges* aBuffered, int64_t aStartTime)
  639. {
  640. MediaResource* stream = mDecoder->GetResource();
  641. int64_t durationUs = 0;
  642. {
  643. ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
  644. durationUs = mDecoder->GetMediaDuration();
  645. }
  646. GetEstimatedBufferedTimeRanges(stream, durationUs, aBuffered);
  647. return NS_OK;
  648. }
  649. } // namespace mozilla