PageRenderTime 524ms CodeModel.GetById 504ms RepoModel.GetById 1ms app.codeStats 0ms

/H264Dec/SoftAVC.cpp

http://github.com/mbebenita/Broadway
C++ | 560 lines | 455 code | 84 blank | 21 comment | 72 complexity | a5c56cde4ded20ea1ab3ba97ba1e7010 MD5 | raw file
Possible License(s): BSD-3-Clause
  1. /*
  2. * Copyright (C) 2011 The Android Open Source Project
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. //#define LOG_NDEBUG 0
  17. #define LOG_TAG "SoftAVC"
  18. #include <utils/Log.h>
  19. #include "SoftAVC.h"
  20. #include <media/stagefright/foundation/ADebug.h>
  21. #include <media/stagefright/MediaDefs.h>
  22. #include <media/stagefright/MediaErrors.h>
  23. #include <media/IOMX.h>
  24. namespace android {
  25. static const CodecProfileLevel kProfileLevels[] = {
  26. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 },
  27. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
  28. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
  29. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
  30. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
  31. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 },
  32. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
  33. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
  34. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3 },
  35. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
  36. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
  37. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4 },
  38. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
  39. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 },
  40. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5 },
  41. { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 },
  42. };
  43. template<class T>
  44. static void InitOMXParams(T *params) {
  45. params->nSize = sizeof(T);
  46. params->nVersion.s.nVersionMajor = 1;
  47. params->nVersion.s.nVersionMinor = 0;
  48. params->nVersion.s.nRevision = 0;
  49. params->nVersion.s.nStep = 0;
  50. }
  51. SoftAVC::SoftAVC(
  52. const char *name,
  53. const OMX_CALLBACKTYPE *callbacks,
  54. OMX_PTR appData,
  55. OMX_COMPONENTTYPE **component)
  56. : SimpleSoftOMXComponent(name, callbacks, appData, component),
  57. mHandle(NULL),
  58. mInputBufferCount(0),
  59. mWidth(320),
  60. mHeight(240),
  61. mPictureSize(mWidth * mHeight * 3 / 2),
  62. mCropLeft(0),
  63. mCropTop(0),
  64. mCropWidth(mWidth),
  65. mCropHeight(mHeight),
  66. mFirstPicture(NULL),
  67. mFirstPictureId(-1),
  68. mPicId(0),
  69. mHeadersDecoded(false),
  70. mEOSStatus(INPUT_DATA_AVAILABLE),
  71. mOutputPortSettingsChange(NONE) {
  72. initPorts();
  73. CHECK_EQ(initDecoder(), (status_t)OK);
  74. }
  75. SoftAVC::~SoftAVC() {
  76. H264SwDecRelease(mHandle);
  77. mHandle = NULL;
  78. while (mPicToHeaderMap.size() != 0) {
  79. OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.editValueAt(0);
  80. mPicToHeaderMap.removeItemsAt(0);
  81. delete header;
  82. header = NULL;
  83. }
  84. List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
  85. List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
  86. CHECK(outQueue.empty());
  87. CHECK(inQueue.empty());
  88. delete[] mFirstPicture;
  89. }
  90. void SoftAVC::initPorts() {
  91. OMX_PARAM_PORTDEFINITIONTYPE def;
  92. InitOMXParams(&def);
  93. def.nPortIndex = kInputPortIndex;
  94. def.eDir = OMX_DirInput;
  95. def.nBufferCountMin = kNumInputBuffers;
  96. def.nBufferCountActual = def.nBufferCountMin;
  97. def.nBufferSize = 8192;
  98. def.bEnabled = OMX_TRUE;
  99. def.bPopulated = OMX_FALSE;
  100. def.eDomain = OMX_PortDomainVideo;
  101. def.bBuffersContiguous = OMX_FALSE;
  102. def.nBufferAlignment = 1;
  103. def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_AVC);
  104. def.format.video.pNativeRender = NULL;
  105. def.format.video.nFrameWidth = mWidth;
  106. def.format.video.nFrameHeight = mHeight;
  107. def.format.video.nStride = def.format.video.nFrameWidth;
  108. def.format.video.nSliceHeight = def.format.video.nFrameHeight;
  109. def.format.video.nBitrate = 0;
  110. def.format.video.xFramerate = 0;
  111. def.format.video.bFlagErrorConcealment = OMX_FALSE;
  112. def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
  113. def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
  114. def.format.video.pNativeWindow = NULL;
  115. addPort(def);
  116. def.nPortIndex = kOutputPortIndex;
  117. def.eDir = OMX_DirOutput;
  118. def.nBufferCountMin = kNumOutputBuffers;
  119. def.nBufferCountActual = def.nBufferCountMin;
  120. def.bEnabled = OMX_TRUE;
  121. def.bPopulated = OMX_FALSE;
  122. def.eDomain = OMX_PortDomainVideo;
  123. def.bBuffersContiguous = OMX_FALSE;
  124. def.nBufferAlignment = 2;
  125. def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
  126. def.format.video.pNativeRender = NULL;
  127. def.format.video.nFrameWidth = mWidth;
  128. def.format.video.nFrameHeight = mHeight;
  129. def.format.video.nStride = def.format.video.nFrameWidth;
  130. def.format.video.nSliceHeight = def.format.video.nFrameHeight;
  131. def.format.video.nBitrate = 0;
  132. def.format.video.xFramerate = 0;
  133. def.format.video.bFlagErrorConcealment = OMX_FALSE;
  134. def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
  135. def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
  136. def.format.video.pNativeWindow = NULL;
  137. def.nBufferSize =
  138. (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2;
  139. addPort(def);
  140. }
  141. status_t SoftAVC::initDecoder() {
  142. // Force decoder to output buffers in display order.
  143. if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) {
  144. return OK;
  145. }
  146. return UNKNOWN_ERROR;
  147. }
  148. OMX_ERRORTYPE SoftAVC::internalGetParameter(
  149. OMX_INDEXTYPE index, OMX_PTR params) {
  150. switch (index) {
  151. case OMX_IndexParamVideoPortFormat:
  152. {
  153. OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
  154. (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
  155. if (formatParams->nPortIndex > kOutputPortIndex) {
  156. return OMX_ErrorUndefined;
  157. }
  158. if (formatParams->nIndex != 0) {
  159. return OMX_ErrorNoMore;
  160. }
  161. if (formatParams->nPortIndex == kInputPortIndex) {
  162. formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
  163. formatParams->eColorFormat = OMX_COLOR_FormatUnused;
  164. formatParams->xFramerate = 0;
  165. } else {
  166. CHECK(formatParams->nPortIndex == kOutputPortIndex);
  167. formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
  168. formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
  169. formatParams->xFramerate = 0;
  170. }
  171. return OMX_ErrorNone;
  172. }
  173. case OMX_IndexParamVideoProfileLevelQuerySupported:
  174. {
  175. OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
  176. (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) params;
  177. if (profileLevel->nPortIndex != kInputPortIndex) {
  178. LOGE("Invalid port index: %ld", profileLevel->nPortIndex);
  179. return OMX_ErrorUnsupportedIndex;
  180. }
  181. size_t index = profileLevel->nProfileIndex;
  182. size_t nProfileLevels =
  183. sizeof(kProfileLevels) / sizeof(kProfileLevels[0]);
  184. if (index >= nProfileLevels) {
  185. return OMX_ErrorNoMore;
  186. }
  187. profileLevel->eProfile = kProfileLevels[index].mProfile;
  188. profileLevel->eLevel = kProfileLevels[index].mLevel;
  189. return OMX_ErrorNone;
  190. }
  191. default:
  192. return SimpleSoftOMXComponent::internalGetParameter(index, params);
  193. }
  194. }
  195. OMX_ERRORTYPE SoftAVC::internalSetParameter(
  196. OMX_INDEXTYPE index, const OMX_PTR params) {
  197. switch (index) {
  198. case OMX_IndexParamStandardComponentRole:
  199. {
  200. const OMX_PARAM_COMPONENTROLETYPE *roleParams =
  201. (const OMX_PARAM_COMPONENTROLETYPE *)params;
  202. if (strncmp((const char *)roleParams->cRole,
  203. "video_decoder.avc",
  204. OMX_MAX_STRINGNAME_SIZE - 1)) {
  205. return OMX_ErrorUndefined;
  206. }
  207. return OMX_ErrorNone;
  208. }
  209. case OMX_IndexParamVideoPortFormat:
  210. {
  211. OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
  212. (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
  213. if (formatParams->nPortIndex > kOutputPortIndex) {
  214. return OMX_ErrorUndefined;
  215. }
  216. if (formatParams->nIndex != 0) {
  217. return OMX_ErrorNoMore;
  218. }
  219. return OMX_ErrorNone;
  220. }
  221. default:
  222. return SimpleSoftOMXComponent::internalSetParameter(index, params);
  223. }
  224. }
  225. OMX_ERRORTYPE SoftAVC::getConfig(
  226. OMX_INDEXTYPE index, OMX_PTR params) {
  227. switch (index) {
  228. case OMX_IndexConfigCommonOutputCrop:
  229. {
  230. OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params;
  231. if (rectParams->nPortIndex != 1) {
  232. return OMX_ErrorUndefined;
  233. }
  234. rectParams->nLeft = mCropLeft;
  235. rectParams->nTop = mCropTop;
  236. rectParams->nWidth = mCropWidth;
  237. rectParams->nHeight = mCropHeight;
  238. return OMX_ErrorNone;
  239. }
  240. default:
  241. return OMX_ErrorUnsupportedIndex;
  242. }
  243. }
  244. void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
  245. if (mOutputPortSettingsChange != NONE) {
  246. return;
  247. }
  248. if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
  249. return;
  250. }
  251. List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
  252. List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
  253. H264SwDecRet ret = H264SWDEC_PIC_RDY;
  254. status_t err = OK;
  255. bool portSettingsChanged = false;
  256. while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
  257. && outQueue.size() == kNumOutputBuffers) {
  258. if (mEOSStatus == INPUT_EOS_SEEN) {
  259. drainAllOutputBuffers();
  260. return;
  261. }
  262. BufferInfo *inInfo = *inQueue.begin();
  263. OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
  264. ++mPicId;
  265. if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
  266. inQueue.erase(inQueue.begin());
  267. inInfo->mOwnedByUs = false;
  268. notifyEmptyBufferDone(inHeader);
  269. mEOSStatus = INPUT_EOS_SEEN;
  270. continue;
  271. }
  272. OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE;
  273. memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE));
  274. header->nTimeStamp = inHeader->nTimeStamp;
  275. header->nFlags = inHeader->nFlags;
  276. mPicToHeaderMap.add(mPicId, header);
  277. inQueue.erase(inQueue.begin());
  278. H264SwDecInput inPicture;
  279. H264SwDecOutput outPicture;
  280. memset(&inPicture, 0, sizeof(inPicture));
  281. inPicture.dataLen = inHeader->nFilledLen;
  282. inPicture.pStream = inHeader->pBuffer + inHeader->nOffset;
  283. inPicture.picId = mPicId;
  284. inPicture.intraConcealmentMethod = 1;
  285. H264SwDecPicture decodedPicture;
  286. while (inPicture.dataLen > 0) {
  287. ret = H264SwDecDecode(mHandle, &inPicture, &outPicture);
  288. if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||
  289. ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {
  290. inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);
  291. inPicture.pStream = outPicture.pStrmCurrPos;
  292. if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY) {
  293. mHeadersDecoded = true;
  294. H264SwDecInfo decoderInfo;
  295. CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
  296. if (handlePortSettingChangeEvent(&decoderInfo)) {
  297. portSettingsChanged = true;
  298. }
  299. if (decoderInfo.croppingFlag &&
  300. handleCropRectEvent(&decoderInfo.cropParams)) {
  301. portSettingsChanged = true;
  302. }
  303. }
  304. } else {
  305. if (portSettingsChanged) {
  306. if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
  307. == H264SWDEC_PIC_RDY) {
  308. // Save this output buffer; otherwise, it will be
  309. // lost during dynamic port reconfiguration because
  310. // OpenMAX client will delete _all_ output buffers
  311. // in the process.
  312. saveFirstOutputBuffer(
  313. decodedPicture.picId,
  314. (uint8_t *)decodedPicture.pOutputPicture);
  315. }
  316. }
  317. inPicture.dataLen = 0;
  318. if (ret < 0) {
  319. LOGE("Decoder failed: %d", ret);
  320. err = ERROR_MALFORMED;
  321. }
  322. }
  323. }
  324. inInfo->mOwnedByUs = false;
  325. notifyEmptyBufferDone(inHeader);
  326. if (portSettingsChanged) {
  327. portSettingsChanged = false;
  328. return;
  329. }
  330. if (mFirstPicture && !outQueue.empty()) {
  331. drainOneOutputBuffer(mFirstPictureId, mFirstPicture);
  332. delete[] mFirstPicture;
  333. mFirstPicture = NULL;
  334. mFirstPictureId = -1;
  335. }
  336. while (!outQueue.empty() &&
  337. mHeadersDecoded &&
  338. H264SwDecNextPicture(mHandle, &decodedPicture, 0)
  339. == H264SWDEC_PIC_RDY) {
  340. int32_t picId = decodedPicture.picId;
  341. uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
  342. drainOneOutputBuffer(picId, data);
  343. }
  344. if (err != OK) {
  345. notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
  346. }
  347. }
  348. }
  349. bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) {
  350. if (mWidth != info->picWidth || mHeight != info->picHeight) {
  351. mWidth = info->picWidth;
  352. mHeight = info->picHeight;
  353. mPictureSize = mWidth * mHeight * 3 / 2;
  354. mCropWidth = mWidth;
  355. mCropHeight = mHeight;
  356. updatePortDefinitions();
  357. notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
  358. mOutputPortSettingsChange = AWAITING_DISABLED;
  359. return true;
  360. }
  361. return false;
  362. }
  363. bool SoftAVC::handleCropRectEvent(const CropParams *crop) {
  364. if (mCropLeft != crop->cropLeftOffset ||
  365. mCropTop != crop->cropTopOffset ||
  366. mCropWidth != crop->cropOutWidth ||
  367. mCropHeight != crop->cropOutHeight) {
  368. mCropLeft = crop->cropLeftOffset;
  369. mCropTop = crop->cropTopOffset;
  370. mCropWidth = crop->cropOutWidth;
  371. mCropHeight = crop->cropOutHeight;
  372. notify(OMX_EventPortSettingsChanged, 1,
  373. OMX_IndexConfigCommonOutputCrop, NULL);
  374. return true;
  375. }
  376. return false;
  377. }
  378. void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
  379. CHECK(mFirstPicture == NULL);
  380. mFirstPictureId = picId;
  381. mFirstPicture = new uint8_t[mPictureSize];
  382. memcpy(mFirstPicture, data, mPictureSize);
  383. }
  384. void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
  385. List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
  386. BufferInfo *outInfo = *outQueue.begin();
  387. outQueue.erase(outQueue.begin());
  388. OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
  389. OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
  390. outHeader->nTimeStamp = header->nTimeStamp;
  391. outHeader->nFlags = header->nFlags;
  392. outHeader->nFilledLen = mPictureSize;
  393. memcpy(outHeader->pBuffer + outHeader->nOffset,
  394. data, mPictureSize);
  395. mPicToHeaderMap.removeItem(picId);
  396. delete header;
  397. outInfo->mOwnedByUs = false;
  398. notifyFillBufferDone(outHeader);
  399. }
  400. bool SoftAVC::drainAllOutputBuffers() {
  401. List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
  402. H264SwDecPicture decodedPicture;
  403. while (!outQueue.empty()) {
  404. BufferInfo *outInfo = *outQueue.begin();
  405. outQueue.erase(outQueue.begin());
  406. OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
  407. if (mHeadersDecoded &&
  408. H264SWDEC_PIC_RDY ==
  409. H264SwDecNextPicture(mHandle, &decodedPicture, 1 /* flush */)) {
  410. int32_t picId = decodedPicture.picId;
  411. CHECK(mPicToHeaderMap.indexOfKey(picId) >= 0);
  412. memcpy(outHeader->pBuffer + outHeader->nOffset,
  413. decodedPicture.pOutputPicture,
  414. mPictureSize);
  415. OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
  416. outHeader->nTimeStamp = header->nTimeStamp;
  417. outHeader->nFlags = header->nFlags;
  418. outHeader->nFilledLen = mPictureSize;
  419. mPicToHeaderMap.removeItem(picId);
  420. delete header;
  421. } else {
  422. outHeader->nTimeStamp = 0;
  423. outHeader->nFilledLen = 0;
  424. outHeader->nFlags = OMX_BUFFERFLAG_EOS;
  425. mEOSStatus = OUTPUT_FRAMES_FLUSHED;
  426. }
  427. outInfo->mOwnedByUs = false;
  428. notifyFillBufferDone(outHeader);
  429. }
  430. return true;
  431. }
  432. void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
  433. if (portIndex == kInputPortIndex) {
  434. mEOSStatus = INPUT_DATA_AVAILABLE;
  435. }
  436. }
  437. void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
  438. switch (mOutputPortSettingsChange) {
  439. case NONE:
  440. break;
  441. case AWAITING_DISABLED:
  442. {
  443. CHECK(!enabled);
  444. mOutputPortSettingsChange = AWAITING_ENABLED;
  445. break;
  446. }
  447. default:
  448. {
  449. CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
  450. CHECK(enabled);
  451. mOutputPortSettingsChange = NONE;
  452. break;
  453. }
  454. }
  455. }
  456. void SoftAVC::updatePortDefinitions() {
  457. OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef;
  458. def->format.video.nFrameWidth = mWidth;
  459. def->format.video.nFrameHeight = mHeight;
  460. def->format.video.nStride = def->format.video.nFrameWidth;
  461. def->format.video.nSliceHeight = def->format.video.nFrameHeight;
  462. def = &editPortInfo(1)->mDef;
  463. def->format.video.nFrameWidth = mWidth;
  464. def->format.video.nFrameHeight = mHeight;
  465. def->format.video.nStride = def->format.video.nFrameWidth;
  466. def->format.video.nSliceHeight = def->format.video.nFrameHeight;
  467. def->nBufferSize =
  468. (def->format.video.nFrameWidth
  469. * def->format.video.nFrameHeight * 3) / 2;
  470. }
  471. } // namespace android
  472. android::SoftOMXComponent *createSoftOMXComponent(
  473. const char *name, const OMX_CALLBACKTYPE *callbacks,
  474. OMX_PTR appData, OMX_COMPONENTTYPE **component) {
  475. return new android::SoftAVC(name, callbacks, appData, component);
  476. }