PageRenderTime 57ms CodeModel.GetById 20ms RepoModel.GetById 0ms app.codeStats 1ms

/core/Decoder.cpp

https://bitbucket.org/jwalraven/fobs-mac-64x
C++ | 1623 lines | 1321 code | 150 blank | 152 comment | 307 complexity | 859380510549307d14973e13495ffb0a MD5 | raw file
Possible License(s): LGPL-2.1, GPL-2.0, GPL-3.0, LGPL-3.0, CC-BY-SA-3.0
  1. /******************************************************************************
  2. * FOBS C++ wrapper code
  3. * Copyright (c) 2004 Omnividea Multimedia S.L
  4. * Coded by Josˇ San Pedro Wandelmer
  5. *
  6. * This file is part of FOBS.
  7. *
  8. * FOBS is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU Lesser General Public License as
  10. * published by the Free Software Foundation; either version 2.1
  11. * of the License, or (at your option) any later version.
  12. *
  13. * FOBS is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FOBS; if not, write to the Free Software
  20. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  21. ******************************************************************************/
  22. extern "C"
  23. {
  24. #include "libavformat/avformat.h"
  25. #include "libavdevice/avdevice.h"
  26. #include "libswscale/swscale.h"
  27. #include "libavcodec/opt.h"
  28. #include "libavutil/fifo.h"
  29. #include "libavutil/avstring.h"
  30. }
  31. #include "Decoder.h"
  32. #include <iostream>
  33. #include "PacketBuffer.h"
  34. using namespace std;
  35. using namespace omnividea::fobs;
  36. class _AVInit
  37. {
  38. public:
  39. _AVInit()
  40. {
  41. avcodec_register_all();
  42. //avdevice_register_all();
  43. av_register_all();
  44. }
  45. ~_AVInit()
  46. {
  47. //av_free_static();
  48. }
  49. };
  50. static _AVInit __singleton;
  51. static enum CodecID find_codec(const char *name, int type, int encoder)
  52. {
  53. const char *codec_string = encoder ? "encoder" : "decoder";
  54. AVCodec *codec;
  55. if(!name)
  56. return CODEC_ID_NONE;
  57. codec = encoder ?
  58. avcodec_find_encoder_by_name(name) :
  59. avcodec_find_decoder_by_name(name);
  60. if(!codec) {
  61. return CODEC_ID_NONE;
  62. }
  63. if(codec->type != type) {
  64. return CODEC_ID_NONE;
  65. }
  66. return codec->id;
  67. }
  68. void Decoder::setAudioResampleFlag(bool flag)
  69. {
  70. this->audioResampleFlag = flag;
  71. }
  72. FrameIndex Decoder::frameIndexFromTimeStamp(TimeStamp t)
  73. {
  74. return (FrameIndex)(t / FOBS_TIMESTAMP_UNITS_D * getFrameRate());
  75. }
  76. TimeStamp Decoder::timeStampFromFrameIndex(FrameIndex f)
  77. {
  78. return (TimeStamp)((double)f / (double)getFrameRate() * FOBS_TIMESTAMP_UNITS_D);
  79. }
  80. bool Decoder::compareTimeStamps(TimeStamp t1, TimeStamp t2)
  81. {
  82. bool res = false;
  83. if(isVideoPresent()) res = ::abs(t2-t1) <= 1000.0/getFrameRate();
  84. else res = ::abs(t2-t1) <= 1000.0/getAudioSampleRate();
  85. //cout << "T1: " << t1 << " == T2: " << t2 << " : " << res << endl;
  86. return res;
  87. }
  88. TimeStamp Decoder::pts2TimeStamp(int64_t pts, AVRational *pts_timebase) {
  89. //cout << "(Pts) " << pts << "* (Base Num)" << pts_timebase->num << "/ (double)(Base Den)"<<pts_timebase->den<<"*"<<FOBS_TIMESTAMP_UNITS_D<<endl;
  90. TimeStamp t= (TimeStamp)(pts*pts_timebase->num/(double)pts_timebase->den*FOBS_TIMESTAMP_UNITS_D);
  91. //cout << "Result: " << t <<endl;
  92. return t;
  93. }
  94. int64_t Decoder::timeStamp2pts(TimeStamp ts, AVRational *pts_timebase) {
  95. return (int64_t)(ts * (double)pts_timebase->den / pts_timebase->num / FOBS_TIMESTAMP_UNITS_D);
  96. }
  97. ReturnCode Decoder::testOpen()
  98. {
  99. ReturnCode error = opened?OkCode:NotInitializedError;
  100. return error;
  101. }
  102. ReturnCode Decoder::testClose()
  103. {
  104. ReturnCode error = (!opened)?OkCode:AlreadyInitializedError;
  105. return error;
  106. }
  107. Decoder::Decoder(const char *filename)
  108. {
  109. strcpy(this->filename,filename);
  110. audioEnabledFlag = false;
  111. audioResampleFlag = false;
  112. incorrectPts = false;
  113. //derived from static members
  114. yuvPicture = new AVPicture();
  115. rgbPicture = new AVPicture();
  116. rgbaPicture = new AVPicture();
  117. decodedPicture = new AVPicture();
  118. transitionPicture = new AVPicture();
  119. transitionPictureRgb = new AVPicture();
  120. videoBuffer = new PacketBuffer();
  121. audioBuffer = new PacketBuffer();
  122. reset();
  123. }
  124. Decoder::~Decoder()
  125. {
  126. close();
  127. delete yuvPicture;
  128. delete rgbPicture;
  129. delete rgbaPicture;
  130. delete decodedPicture;
  131. delete transitionPicture;
  132. delete transitionPictureRgb;
  133. delete videoBuffer;
  134. delete audioBuffer;
  135. }
  136. void Decoder::reset()
  137. {
  138. rgbBuf = NULL;
  139. rgbaBuf = NULL;
  140. yuvBuf = NULL;
  141. yuvBytes = NULL;
  142. opened = false;
  143. img_resample_ctx = NULL;
  144. audioResampler = NULL;
  145. inputFile = NULL;
  146. inputFileFormat = NULL;
  147. transitionPictureBuf = NULL;
  148. transitionPictureBufRgb = NULL;
  149. currentYuv = currentRgb = currentRgba = 0;
  150. currentYuvFlag = false;
  151. currentRgbFlag = false;
  152. currentRgbaFlag = false;
  153. position = 0;
  154. positionAudio = 0;
  155. frameFlag = false;
  156. videoStreamIndex = -1;
  157. audioStreamIndex = -1;
  158. videoBuffer->clear();
  159. audioBuffer->clear();
  160. }
  161. ReturnCode Decoder::close()
  162. {
  163. ReturnCode error = testOpen();
  164. if(isOk(error))
  165. {
  166. av_free(rgbBuf);
  167. av_free(rgbaBuf);
  168. av_free(yuvBuf);
  169. delete []yuvBytes;
  170. if(videoStreamIndex >= 0 && inputFile->streams[videoStreamIndex] != NULL)
  171. {
  172. avcodec_close(inputFile->streams[videoStreamIndex]->codec);
  173. //av_free(inputStream.st);
  174. }
  175. if(audioStreamIndex >= 0 && inputFile->streams[audioStreamIndex] != NULL)
  176. {
  177. avcodec_close(inputFile->streams[audioStreamIndex]->codec);
  178. }
  179. av_close_input_file(inputFile);
  180. av_free(inputFileFormat);
  181. av_free(transitionPictureBuf);
  182. av_free(transitionPictureBufRgb);
  183. //if(img_resample_ctx)img_resample_close(img_resample_ctx);
  184. if(img_resample_ctx)sws_freeContext(img_resample_ctx);
  185. if(audioResampler)audio_resample_close(audioResampler);
  186. reset();
  187. }
  188. return error;
  189. }
  190. ReturnCode Decoder::_open()
  191. {
  192. int ret;
  193. AVFormatParameters params, *ap = &params;
  194. ReturnCode error = testClose();
  195. if(isOk(error))
  196. {
  197. /* get default parameters */
  198. int audio_sample_rate = 44100;
  199. int audio_channels = 1;
  200. AVRational frame_rate = (AVRational) {0,0};
  201. int frame_width = 0;
  202. int frame_height = 0;
  203. int frame_padtop = 0;
  204. int frame_padbottom = 0;
  205. int frame_padleft = 0;
  206. int frame_padright = 0;
  207. enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
  208. int video_channel = 0;
  209. char *audio_codec_name = NULL;
  210. char *video_codec_name = NULL;
  211. inputFile = av_alloc_format_context();
  212. memset(ap, 0, sizeof(*ap));
  213. ap->prealloced_context = 1;
  214. ap->sample_rate = audio_sample_rate;
  215. ap->channels = audio_channels;
  216. ap->time_base.den = frame_rate.num;
  217. ap->time_base.num = frame_rate.den;
  218. ap->width = frame_width + frame_padleft + frame_padright;
  219. ap->height = frame_height + frame_padtop + frame_padbottom;
  220. ap->pix_fmt = frame_pix_fmt;
  221. ap->channel = video_channel;
  222. ap->video_codec_id = find_codec(video_codec_name, CODEC_TYPE_VIDEO, 0);
  223. ap->audio_codec_id = find_codec(audio_codec_name, CODEC_TYPE_AUDIO, 0);
  224. //AVFormatContext *avformat_opts = av_alloc_format_context();
  225. //set_context_opts(inputFile, avformat_opts, AV_OPT_FLAG_DECODING_PARAM);
  226. inputFile->video_codec_id = find_codec(video_codec_name , CODEC_TYPE_VIDEO , 0);
  227. inputFile->audio_codec_id = find_codec(audio_codec_name , CODEC_TYPE_AUDIO , 0);
  228. /* open the input file with generic libav function */
  229. ret = av_open_input_file(&inputFile, filename, inputFileFormat, 0, ap);
  230. if (ret < 0)
  231. {
  232. error = FileOpenError;
  233. }
  234. }
  235. if(isOk(error))
  236. {
  237. /* If not enough info to get the stream parameters, we decode the
  238. first frames to get it. (used in mpeg case for example) */
  239. ret = av_find_stream_info(inputFile);
  240. if (ret < 0) {
  241. error = FormatUnsupportedError;
  242. }
  243. }
  244. /*
  245. if(isOk(error))
  246. {
  247. dump_format(inputFile, 1, filename.c_str(), 0);
  248. }
  249. */
  250. if(isOk(error))
  251. {
  252. //eofReachedFlag = false;
  253. int discard = 1;
  254. //Get just two streams...First Video & First Audio
  255. for(int i=0; i < inputFile->nb_streams; i++)
  256. {
  257. if(inputFile->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO && videoStreamIndex == -1)
  258. {
  259. discard = 0;
  260. videoStreamIndex = i;
  261. }
  262. else if(inputFile->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO && audioStreamIndex == -1)
  263. {
  264. discard = 0;
  265. audioStreamIndex = i;
  266. }
  267. }
  268. if(discard == 1)
  269. {
  270. error = VideoStreamMissingError;
  271. }
  272. }
  273. if(isOk(error))
  274. {
  275. //test initialization of variables
  276. if(isVideoPresent() && (_getWidth()==0 || _getHeight() == 0))
  277. {
  278. error = FormatUnsupportedError;
  279. }
  280. }
  281. if(isOk(error) && isAudioPresent())
  282. {
  283. //cout << "Audio Resample Flag: "<<audioResampleFlag<<endl;
  284. if(inputFile->streams[audioStreamIndex]->codec->channels > 2 && audioResampleFlag)
  285. {
  286. inputFile->streams[audioStreamIndex]->codec->channels = 2;
  287. inputFile->streams[audioStreamIndex]->codec->request_channels=2;
  288. //audioResampler = audio_resample_init(2, inputFile->streams[audioStreamIndex]->codec->channels, getAudioSampleRate(), getAudioSampleRate());
  289. //cout << "AudioResampler will be needed: " << audioResampler << endl;
  290. }
  291. }
  292. if(isOk(error))
  293. {
  294. AVCodec *codec;
  295. if(videoStreamIndex >= 0)
  296. {
  297. codec = avcodec_find_decoder(inputFile->streams[videoStreamIndex]->codec->codec_id);
  298. if (!codec)
  299. {
  300. error = CodecUnsupportedError;
  301. }
  302. else if (avcodec_open(inputFile->streams[videoStreamIndex]->codec, codec) < 0)
  303. {
  304. error = CodecUnsupportedError;
  305. }
  306. }
  307. if(audioStreamIndex >= 0)
  308. {
  309. codec = avcodec_find_decoder(inputFile->streams[audioStreamIndex]->codec->codec_id);
  310. if (!codec)
  311. {
  312. //error = CodecUnsupportedError;
  313. audioStreamIndex = -1;
  314. cout << "Audio codec unsupported!\n";
  315. }
  316. else if (avcodec_open(inputFile->streams[audioStreamIndex]->codec, codec) < 0)
  317. {
  318. //error = CodecUnsupportedError;
  319. audioStreamIndex = -1;
  320. cout << "Audio codec unsupported!\n";
  321. }
  322. }
  323. }
  324. if(isOk(error) && isVideoPresent())
  325. {
  326. yuvBytes = new char[_getWidth()*_getHeight()];
  327. if(yuvBytes == NULL)
  328. {
  329. error = MemoryError;
  330. }
  331. }
  332. if(isOk(error))
  333. {
  334. if(firstVideoPositionFlag == false)
  335. {
  336. firstVideoPosition = 0;
  337. firstVideoFramePosition = -1;
  338. }
  339. if(firstAudioPositionFlag == false)
  340. {
  341. firstAudioPosition = 0;
  342. firstAudioFramePosition = -1;
  343. }
  344. opened = true;
  345. }
  346. return error;
  347. }
  348. ReturnCode Decoder::open()
  349. {
  350. ReturnCode error = testClose();
  351. if(isError(error)) return error;
  352. reset();
  353. firstVideoPositionFlag = false;
  354. firstAudioPositionFlag = false;
  355. error = _open();
  356. //find first video frame
  357. if(isVideoPresent())
  358. {
  359. while((isOk(error)|| error == NoFrameError) && firstVideoFramePosition == -1) error = nextFrame();
  360. }
  361. else
  362. {
  363. while((isOk(error)|| error == NoFrameError) && firstAudioFramePosition == -1) error = nextAudioFrame();
  364. }
  365. if(isOk(error))
  366. {
  367. if(inputFile->duration == (TimeStamp)AV_NOPTS_VALUE)
  368. {
  369. if(isVideoPresent())
  370. {
  371. while(placeAtNextFrame(true) == 0)
  372. {
  373. videoBuffer->deleteNext();
  374. duration = position;
  375. }
  376. if(duration == 0)
  377. {
  378. error = FormatUnsupportedError;
  379. }
  380. }
  381. else
  382. {
  383. while(placeAtNextFrame(false) == 0)
  384. {
  385. audioBuffer->deleteNext();
  386. duration = positionAudio;
  387. }
  388. if(duration == 0)
  389. {
  390. error = FormatUnsupportedError;
  391. }
  392. }
  393. }
  394. else
  395. {
  396. cout << ((double)inputFile->duration/(double)AV_TIME_BASE);
  397. duration = (TimeStamp) ((double)inputFile->duration*FOBS_TIMESTAMP_UNITS_D/(double)AV_TIME_BASE);
  398. }
  399. cout << "First Position: " << firstVideoPosition << ", " << firstVideoFramePosition << " Duration: " << duration << endl;
  400. if(isVideoPresent())cout << "Frame Rate: " << getFrameRate() << endl;
  401. }
  402. close();
  403. if(isOk(error))
  404. {
  405. return _open();
  406. }
  407. return error;
  408. }
  409. double Decoder::getFirstFrameTime()
  410. {
  411. return firstVideoFramePosition / FOBS_TIMESTAMP_UNITS_D;
  412. }
  413. double Decoder::getFirstAudioSampleTime()
  414. {
  415. return firstAudioFramePosition / FOBS_TIMESTAMP_UNITS_D;
  416. }
  417. omnividea::fobs::uint Decoder::getWidth()
  418. {
  419. ReturnCode error = testOpen();
  420. if(isOk(error))
  421. {
  422. return _getWidth();
  423. }
  424. else
  425. {
  426. return 0;
  427. }
  428. }
  429. omnividea::fobs::uint Decoder::_getWidth()
  430. {
  431. return inputFile->streams[videoStreamIndex]->codec->width;
  432. }
  433. omnividea::fobs::uint Decoder::getHeight()
  434. {
  435. ReturnCode error = testOpen();
  436. if(isOk(error))
  437. {
  438. return _getHeight();
  439. }
  440. else
  441. {
  442. return 0;
  443. }
  444. }
  445. omnividea::fobs::uint Decoder::_getHeight()
  446. {
  447. return inputFile->streams[videoStreamIndex]->codec->height;
  448. }
  449. int Decoder::getBitRate()
  450. {
  451. ReturnCode error = testOpen();
  452. if(isOk(error))
  453. {
  454. int res = inputFile->streams[videoStreamIndex]->codec->bit_rate / 1000;
  455. if(res == 0) res = inputFile->bit_rate / 1000;
  456. return res;
  457. }
  458. else
  459. {
  460. return 0;
  461. }
  462. }
  463. double Decoder::getFrameRate()
  464. {
  465. ReturnCode error = testOpen();
  466. if(isOk(error))
  467. {
  468. //return (double)inputStream.st->codec.frame_rate / DEFAULT_FRAME_RATE_BASE; //For ffmpeg-0.4.6
  469. //cout << "FR: " << inputFile->streams[videoStreamIndex]->codec.frame_rate << " - Base: " << inputFile->streams[videoStreamIndex]->codec.frame_rate_base << endl;
  470. //return (double)inputFile->streams[videoStreamIndex]->codec->time_base.den / (double)inputFile->streams[videoStreamIndex]->codec->time_base.num; //For ffmpeg-0.4.8
  471. return (double)inputFile->streams[videoStreamIndex]->r_frame_rate.num / (double)inputFile->streams[videoStreamIndex]->r_frame_rate.den;
  472. }
  473. else
  474. {
  475. return -1;
  476. }
  477. }
  478. TimeStamp Decoder::getDurationMilliseconds()
  479. {
  480. return duration;
  481. }
  482. double Decoder::getDurationSeconds()
  483. {
  484. return duration/FOBS_TIMESTAMP_UNITS_D;
  485. }
  486. byte Decoder::getCrFactor()
  487. {
  488. return 2;
  489. }
  490. byte Decoder::getCbFactor()
  491. {
  492. return 2;
  493. }
  494. byte *Decoder::getLuminance()
  495. {
  496. if(testOpen() != 0) return NULL;
  497. int ret;
  498. AVPicture *tmpPicture;
  499. enum PixelFormat pix_fmt=PIX_FMT_YUV420P;
  500. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  501. if(currentYuvFlag && compareTimeStamps(position, currentYuv))
  502. {
  503. return (byte *)yuvBytes;
  504. }
  505. /* convert pixel format if needed */
  506. if(pix_fmt == dec->pix_fmt)
  507. {
  508. tmpPicture = decodedPicture;
  509. }
  510. else
  511. {
  512. /* create temporary picture */
  513. if(yuvPicture->data[0] == NULL)
  514. {
  515. int size = avpicture_get_size(pix_fmt, dec->width, dec->height);
  516. yuvBuf = (uint8_t *)av_malloc(size);
  517. if (!yuvBuf)
  518. {
  519. return NULL;
  520. }
  521. avpicture_fill(yuvPicture, yuvBuf, pix_fmt, dec->width, dec->height);
  522. }
  523. ret = img_convert(yuvPicture, pix_fmt,
  524. decodedPicture, dec->pix_fmt,
  525. dec->width, dec->height);
  526. if(ret < 0)
  527. {
  528. return NULL;
  529. }
  530. tmpPicture = yuvPicture;
  531. }
  532. unsigned long offset = 0, srcOffset = 0;
  533. for(unsigned i = 0; i < _getHeight(); i++)
  534. {
  535. memcpy(yuvBytes+offset, tmpPicture->data[0]+srcOffset, getWidth());
  536. offset+=getWidth();
  537. srcOffset += decodedPicture->linesize[0];
  538. }
  539. currentYuv = position;
  540. currentYuvFlag = true;
  541. return (byte *)yuvBytes;
  542. }
  543. byte *Decoder::getCr()
  544. {
  545. if(testOpen() != 0) return NULL;
  546. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  547. enum PixelFormat pix_fmt=PIX_FMT_YUV420P;
  548. getLuminance();
  549. if(pix_fmt == dec->pix_fmt)
  550. {
  551. return (byte *)(decodedPicture->data[1]);
  552. }
  553. else
  554. {
  555. return (byte *)(yuvPicture->data[1]);
  556. }
  557. }
  558. byte *Decoder::getCb()
  559. {
  560. if(testOpen() != 0) return NULL;
  561. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  562. enum PixelFormat pix_fmt=PIX_FMT_YUV420P;
  563. getLuminance();
  564. if(pix_fmt == dec->pix_fmt)
  565. {
  566. return (byte *)(decodedPicture->data[2]);
  567. }
  568. else
  569. {
  570. return (byte *)(yuvPicture->data[2]);
  571. }
  572. }
  573. ReturnCode Decoder::reallocTransitionPicture(int newWidth, int newHeight)
  574. {
  575. ReturnCode error = OkCode;
  576. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  577. enum PixelFormat pix_fmt = dec->pix_fmt;
  578. if(transitionPictureBuf == NULL ||
  579. transitionPictureWidth != newWidth ||
  580. transitionPictureHeight != newHeight)
  581. {
  582. av_free(transitionPictureBuf);
  583. av_free(transitionPictureBufRgb);
  584. transitionPictureBuf = NULL;
  585. transitionPictureBufRgb = NULL;
  586. if(img_resample_ctx) sws_freeContext(img_resample_ctx);
  587. //if(img_resample_ctx)img_resample_close(img_resample_ctx);
  588. img_resample_ctx = NULL;
  589. int size = avpicture_get_size(pix_fmt, newWidth, newHeight);
  590. int sizeRgb = avpicture_get_size(PIX_FMT_RGB24, newWidth, newHeight);
  591. //cerr << "Decoder Transition: size=" << size << " sizeRgb=" << sizeRgb << " W=" << newWidth << " H=" << newHeight << endl;
  592. transitionPictureBuf = (uint8_t*)av_malloc(size);
  593. if (!transitionPictureBuf)
  594. {
  595. error = MemoryError;
  596. }
  597. if(isOk(error))
  598. {
  599. transitionPictureBufRgb = (uint8_t*)av_malloc(sizeRgb);
  600. if (!transitionPictureBufRgb)
  601. {
  602. error = MemoryError;
  603. }
  604. }
  605. if(isOk(error))
  606. {
  607. enum PixelFormat pix_fmt;
  608. AVPicture *pict;
  609. error = getRawFrame(&pict, (int *)&pix_fmt);
  610. if(isOk(error))
  611. {
  612. img_resample_ctx = sws_getContext(
  613. this->_getWidth(), this->_getHeight(), pix_fmt,
  614. newWidth, newHeight, pix_fmt,
  615. 0, NULL, NULL, NULL);
  616. //img_resample_ctx = img_resample_init( newWidth, newHeight, this->_getWidth(), this->_getHeight());
  617. if(img_resample_ctx == NULL)
  618. {
  619. error = MemoryError;
  620. }
  621. }
  622. }
  623. if(isOk(error))
  624. {
  625. avpicture_fill(transitionPicture, transitionPictureBuf, pix_fmt, newWidth, newHeight);
  626. avpicture_fill(transitionPictureRgb, transitionPictureBufRgb, PIX_FMT_RGB24, newWidth, newHeight);
  627. transitionPictureWidth = newWidth;
  628. transitionPictureHeight = newHeight;
  629. }
  630. }
  631. return error;
  632. }
  633. byte *Decoder::getRGB(int width, int height)
  634. {
  635. AVPicture *pict;
  636. enum PixelFormat pix_fmt;
  637. ReturnCode error = getRawFrame(&pict, (int *)&pix_fmt);
  638. if(isError(error)) return NULL;
  639. enum PixelFormat dst_pix_fmt=PIX_FMT_RGB24;
  640. if(reallocTransitionPicture(width, height) == 0)
  641. {
  642. //img_resample(img_resample_ctx, transitionPicture, decodedPicture);
  643. sws_scale(img_resample_ctx, decodedPicture->data, decodedPicture->linesize, 0, transitionPictureHeight/*?*/, transitionPicture->data, transitionPicture->linesize);
  644. if(img_convert(transitionPictureRgb, dst_pix_fmt,transitionPicture, pix_fmt, width, height) < 0)
  645. {
  646. return NULL;
  647. }
  648. }
  649. else
  650. {
  651. return NULL;
  652. }
  653. return (byte *)transitionPictureRgb->data[0];
  654. }
  655. byte *Decoder::getRGB()
  656. {
  657. if(testOpen() != 0) return NULL;
  658. int ret;
  659. AVPicture *tmpPicture;
  660. enum PixelFormat pix_fmt=PIX_FMT_RGB24;
  661. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  662. if(currentRgbFlag && compareTimeStamps(position, currentRgb))
  663. {
  664. if(pix_fmt == dec->pix_fmt)
  665. {
  666. return (byte *)decodedPicture->data[0];
  667. }
  668. else
  669. {
  670. return (byte *)rgbPicture->data[0];
  671. }
  672. }
  673. /* convert pixel format if needed */
  674. if(pix_fmt == dec->pix_fmt)
  675. {
  676. tmpPicture = decodedPicture;
  677. }
  678. else
  679. {
  680. /* create temporary picture */
  681. if(rgbBuf == NULL)
  682. {
  683. int size = avpicture_get_size(pix_fmt, dec->width, dec->height);
  684. rgbBuf = (uint8_t *)av_malloc(size);
  685. if (!rgbBuf)
  686. {
  687. return NULL;
  688. }
  689. avpicture_fill(rgbPicture, rgbBuf, pix_fmt, dec->width, dec->height);
  690. }
  691. ret = img_convert(rgbPicture, pix_fmt,
  692. decodedPicture, dec->pix_fmt,
  693. dec->width, dec->height);
  694. if(ret < 0)
  695. {
  696. return NULL;
  697. }
  698. tmpPicture = rgbPicture;
  699. }
  700. currentRgb = position;
  701. currentRgbFlag = true;
  702. return (byte *)tmpPicture->data[0];
  703. }
  704. byte *Decoder::getRGBA()
  705. {
  706. if(testOpen() != 0) return NULL;
  707. int ret;
  708. AVPicture *tmpPicture;
  709. enum PixelFormat pix_fmt=PIX_FMT_RGBA32;
  710. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  711. if(currentRgbaFlag && compareTimeStamps(position, currentRgba))
  712. {
  713. if(pix_fmt == dec->pix_fmt)
  714. {
  715. return (byte *)decodedPicture->data[0];
  716. }
  717. else
  718. {
  719. return (byte *)rgbaPicture->data[0];
  720. }
  721. }
  722. /* convert pixel format if needed */
  723. if(pix_fmt == dec->pix_fmt)
  724. {
  725. tmpPicture = decodedPicture;
  726. }
  727. else
  728. {
  729. /* create temporary picture */
  730. if(rgbaBuf == NULL)
  731. {
  732. int size = avpicture_get_size(pix_fmt, dec->width, dec->height);
  733. rgbaBuf = (uint8_t *)av_malloc(size);
  734. if (!rgbaBuf)
  735. {
  736. return NULL;
  737. }
  738. avpicture_fill(rgbaPicture, rgbaBuf, pix_fmt, dec->width, dec->height);
  739. }
  740. ret = img_convert(rgbaPicture, pix_fmt,
  741. decodedPicture, dec->pix_fmt,
  742. dec->width, dec->height);
  743. if(ret < 0)
  744. {
  745. return NULL;
  746. }
  747. tmpPicture = rgbaPicture;
  748. }
  749. currentRgba = position;
  750. currentRgbaFlag = true;
  751. return (byte *)tmpPicture->data[0];
  752. }
  753. byte *Decoder::getRGBA(char *buf)
  754. {
  755. if(testOpen() != 0) return NULL;
  756. int ret;
  757. enum PixelFormat pix_fmt=PIX_FMT_RGBA32;
  758. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  759. {
  760. AVPicture tmpPicture;
  761. avpicture_fill(&tmpPicture, (uint8_t *)buf, pix_fmt, dec->width, dec->height);
  762. ret = img_convert(&tmpPicture, pix_fmt,
  763. decodedPicture, dec->pix_fmt,
  764. dec->width, dec->height);
  765. if(ret < 0)
  766. {
  767. return NULL;
  768. }
  769. }
  770. return (byte *)buf;
  771. }
  772. ReturnCode Decoder::getRawFrame(AVPicture** pict, int* pix_fmt)
  773. {
  774. ReturnCode error = testOpen();
  775. if(isOk(error))
  776. {
  777. AVCodecContext *dec = inputFile->streams[videoStreamIndex]->codec;
  778. *pix_fmt = (int)dec->pix_fmt;
  779. *pict = decodedPicture;
  780. }
  781. return error;
  782. }
  783. uint8_t* Decoder::getAudioSamples()
  784. {
  785. //cout << "Requesting audio frame. Size="<<decodedAudioFrameSize;
  786. //cout << " which should be about "<<(double)decodedAudioFrameSize/getAudioChannelNumber()/2.0/getAudioSampleRate()*1000.0<<"ms"<<endl;
  787. return decodedAudioFrame;
  788. }
  789. int Decoder::getAudioSamplesSize()
  790. {
  791. return decodedAudioFrameSize;
  792. }
  793. char* Decoder::getFileName()
  794. {
  795. return filename;
  796. }
  797. /*bool Decoder::moreFrames()
  798. {
  799. return !(eofReachedFlag);
  800. }*/
  801. FrameIndex Decoder::getFrameIndex()
  802. {
  803. return frameIndexFromTimeStamp(position);
  804. //return (FrameIndex)((position - firstVideoFramePosition) * getFrameRate() + 0.5);
  805. }
  806. double Decoder::getFrameTime()
  807. {
  808. return (position - firstVideoFramePosition)/FOBS_TIMESTAMP_UNITS_D;
  809. }
  810. double Decoder::getNextFrameTime()
  811. {
  812. return getFrameTime() + 1.0/getFrameRate();
  813. }
  814. ReturnCode Decoder::readNextFrame()
  815. {
  816. static unsigned counter = 0;
  817. AVPacket pkt;
  818. int frameReady = false;
  819. ReturnCode error = testOpen();
  820. /*if(isOk(error))
  821. {
  822. if(eofReachedFlag) error = VideoEndError;
  823. }*/
  824. while(isOk(error) && !frameReady)
  825. {
  826. /* read a packet from it and output it in the fifo */
  827. if (av_read_frame(inputFile, &pkt) < 0)
  828. {
  829. //eofReachedFlag = true;
  830. error = VideoEndError;
  831. continue;
  832. }
  833. if(pkt.stream_index == videoStreamIndex)
  834. {
  835. videoBuffer->append(&pkt);
  836. }
  837. else if(pkt.stream_index == audioStreamIndex)
  838. {
  839. audioBuffer->append(&pkt);
  840. }
  841. else
  842. {
  843. av_free_packet(&pkt);
  844. continue;
  845. }
  846. frameReady = true;
  847. }
  848. return error;
  849. }
  850. ReturnCode Decoder::placeAtNextFrame(bool videoFrame)
  851. {
  852. ReturnCode error = testOpen();
  853. PacketBuffer *buffer = videoFrame?videoBuffer:audioBuffer;
  854. /*if(isOk(error))
  855. {
  856. if(eofReachedFlag) error = VideoEndError;
  857. }*/
  858. while(isOk(error) && buffer->count() <= 0)
  859. {
  860. error = readNextFrame();
  861. }
  862. if(isOk(error))
  863. {
  864. AVPacket *pkt = buffer->readNext();
  865. //if(pkt != NULL) cout << "-DTS " << (videoFrame?"Video":"Audio") << ": "<< pkt->dts << " size:" << pkt->size << endl;
  866. if(pkt == NULL)
  867. {
  868. error = VideoPacketBufferEmptyError;
  869. }
  870. else if(pkt->size > 0)
  871. {
  872. if(videoFrame)
  873. {
  874. AVRational vTimeBase = inputFile->streams[videoStreamIndex]->time_base;
  875. if(pkt->dts == (TimeStamp)AV_NOPTS_VALUE)
  876. {
  877. position += (TimeStamp)(FOBS_TIMESTAMP_UNITS_D/getFrameRate());
  878. //cout << "PosNew=" << position <<endl;
  879. if(firstVideoPositionFlag == false)
  880. {
  881. firstVideoPosition = position;
  882. firstVideoPositionFlag = true;
  883. }
  884. else
  885. {
  886. if(position < firstVideoPosition) firstVideoPosition = position;
  887. }
  888. }
  889. else
  890. {
  891. //position += (TimeStamp)(FOBS_TIMESTAMP_UNITS_D/getFrameRate());
  892. if(firstVideoPositionFlag == false || ( pts2TimeStamp(pkt->dts, &vTimeBase) < firstVideoPosition ) )
  893. {
  894. firstVideoPosition = pts2TimeStamp(pkt->dts, &vTimeBase);
  895. firstVideoPositionFlag = true;
  896. }
  897. position = pts2TimeStamp(pkt->dts - timeStamp2pts(firstVideoPosition, &vTimeBase), &vTimeBase);
  898. //cout << "VIDEO PosNew=" << position << " DTS="<<pkt->dts<<" PTS=" << pkt->pts << " DEN=" << vTimeBase.den << " NUM=" << vTimeBase.num << " FVideoDTS : " << timeStamp2pts(firstVideoPosition, &vTimeBase) << endl;
  899. AVStream *s = (inputFile->streams[videoStreamIndex]);
  900. }
  901. frameFlag = true;
  902. }
  903. else
  904. {
  905. AVRational aTimeBase = inputFile->streams[audioStreamIndex]->time_base;
  906. int aDen = aTimeBase.den;
  907. if((videoStreamIndex < 0 && firstAudioPositionFlag != -1 && pkt->dts == 0 && pkt->pts == 0)||incorrectPts)
  908. {
  909. pkt->dts = (TimeStamp)AV_NOPTS_VALUE;
  910. incorrectPts = true;
  911. //cout << "Incorrect Time Values detected\n";
  912. }
  913. if(pkt->dts == (TimeStamp)AV_NOPTS_VALUE)
  914. {
  915. positionAudio += (TimeStamp)((double)FOBS_TIMESTAMP_UNITS_D*(double)getAudioSamplesSize()/getAudioChannelNumber()/2/getAudioSampleRate());
  916. //cout << "FiguringOut Audio Position: "<< positionAudio << endl;
  917. if(firstAudioPositionFlag == false)
  918. {
  919. firstAudioPosition = position;
  920. firstAudioPositionFlag = true;
  921. }
  922. else
  923. {
  924. if(positionAudio < firstAudioPosition) firstAudioPosition = positionAudio;
  925. }
  926. }
  927. else
  928. {
  929. //cout << "DTS" << pkt->pts <<endl;
  930. if(firstAudioPositionFlag == false || pts2TimeStamp(pkt->dts, &aTimeBase) < firstAudioPosition )
  931. {
  932. //cout << "Changing first audio" << endl;
  933. firstAudioPosition = pts2TimeStamp(pkt->dts, &aTimeBase);
  934. firstAudioPositionFlag = true;
  935. }
  936. positionAudio = pts2TimeStamp(pkt->dts - timeStamp2pts(firstAudioPosition, &aTimeBase), &aTimeBase);
  937. }
  938. //cout << "AUDIO PosNew=" << positionAudio << " DTS="<<pkt->dts<<" PTS=" << pkt->pts << " DEN=" << aTimeBase.den << " NUM=" << aTimeBase.num << " FAudioDTS : " << timeStamp2pts(firstAudioPosition, &aTimeBase)<<endl;
  939. }
  940. }
  941. else
  942. {
  943. frameFlag = false;
  944. }
  945. }
  946. /*
  947. cout << "PTS: ";
  948. if(pkt.pts == AV_NOPTS_VALUE) cout << "UNKNOWN ";
  949. else cout << ((double)pkt.pts/(double)AV_TIME_BASE);
  950. cout << endl;
  951. cout << "DTS: ";
  952. if(pkt.dts == AV_NOPTS_VALUE) cout << "UNKNOWN ";
  953. else cout << ((double)pkt.dts/(double)AV_TIME_BASE);
  954. cout << endl;
  955. */
  956. return error;
  957. }
  958. ReturnCode Decoder::decodeFrame()
  959. {
  960. ReturnCode error = testOpen();
  961. int got_picture = 0;
  962. AVPacket *pkt = videoBuffer->extractNext();
  963. if(pkt == NULL)
  964. {
  965. error = VideoPacketBufferEmptyError;
  966. }
  967. if(isOk(error))
  968. {
  969. if(pkt->size == 0)
  970. {
  971. error = BadParamsError;
  972. }
  973. }
  974. if(isOk(error))
  975. {
  976. //cout << "Decode Video: " << pkt->dts << endl;
  977. if (inputFile->streams[videoStreamIndex]->codec->codec_id == CODEC_ID_RAWVIDEO)
  978. {
  979. int size;
  980. size = (_getWidth() * _getHeight());
  981. avpicture_fill(decodedPicture, pkt->data,
  982. inputFile->streams[videoStreamIndex]->codec->pix_fmt,
  983. inputFile->streams[videoStreamIndex]->codec->width,
  984. inputFile->streams[videoStreamIndex]->codec->height);
  985. }
  986. else
  987. {
  988. AVFrame big_picture;
  989. int ret = avcodec_decode_video(inputFile->streams[videoStreamIndex]->codec,
  990. &big_picture, &got_picture, pkt->data, pkt->size);
  991. *decodedPicture= *(AVPicture*)&big_picture;
  992. inputFile->streams[videoStreamIndex]->quality= big_picture.quality;
  993. if (ret < 0 || !got_picture)
  994. {
  995. error = NoFrameError;
  996. }
  997. else if(firstVideoFramePosition == -1)
  998. {
  999. firstVideoFramePosition = position;
  1000. }
  1001. }
  1002. av_free_packet(pkt);
  1003. }
  1004. return error;
  1005. }
  1006. ReturnCode Decoder::decodeAudioFrame()
  1007. {
  1008. ReturnCode error = testOpen();
  1009. int data_size = 0;
  1010. //short samples[AVCODEC_MAX_AUDIO_FRAME_SIZE / 2];
  1011. uint8_t *samples = decodedAudioFrame;
  1012. AVPacket *pkt = audioBuffer->extractNext();
  1013. if(pkt == NULL)
  1014. {
  1015. error = VideoPacketBufferEmptyError;
  1016. }
  1017. if(isOk(error))
  1018. {
  1019. if(pkt->size == 0)
  1020. {
  1021. error = BadParamsError;
  1022. av_free_packet(pkt);
  1023. }
  1024. }
  1025. uint64_t ts;
  1026. int ret;
  1027. uint8_t *ptr;
  1028. int len;
  1029. if(isOk(error))
  1030. {
  1031. ts = pkt->dts;
  1032. ptr = pkt->data;
  1033. len = pkt->size;
  1034. ret = avcodec_decode_audio(inputFile->streams[audioStreamIndex]->codec, (short*) samples, &data_size, ptr, len);
  1035. if(ret < 0)
  1036. {
  1037. error = GenericError;
  1038. }
  1039. else if(ret == len)
  1040. {
  1041. av_free_packet(pkt);
  1042. }
  1043. }
  1044. if(isOk(error))
  1045. {
  1046. /* Some bug in mpeg audio decoder gives */
  1047. /* data_size < 0, it seems they are overflows */
  1048. if (data_size <= 0) {
  1049. //no audio frame
  1050. decodedAudioFrameSize = 0;
  1051. //cout << "No Audio Frame!!" << endl;
  1052. }
  1053. else
  1054. {
  1055. /* Only used if setAudioResampleFlag
  1056. if(audioResampler)
  1057. {
  1058. //Resampling needed
  1059. int nSamples = data_size / _getAudioChannelNumber()/2;
  1060. audio_resample(audioResampler, (short *)resampledAudioBuffer, (short *)samples, nSamples);
  1061. data_size = nSamples * getAudioChannelNumber() * 2;
  1062. //memcpy(samples, resampled, data_size);
  1063. } */
  1064. //cout << "Audio Decoded - DTS="<<((double)ts/(double)AV_TIME_BASE)<<" Size=" << data_size << " Max=" << AVCODEC_MAX_AUDIO_FRAME_SIZE << endl;
  1065. //audioTime = ((double)ts/(double)AV_TIME_BASE);
  1066. //memcpy(decodedAudioFrame, samples, data_size);
  1067. if(firstAudioFramePosition == -1)
  1068. {
  1069. firstAudioFramePosition = positionAudio;
  1070. }
  1071. decodedAudioFrameSize = data_size;
  1072. //cout << "OK!!" << endl;
  1073. //cout << "Audio decode ret=" << ret<< " len="<<len<<" data_size="<<data_size<<endl;
  1074. if(ret != len)
  1075. {
  1076. //Some pkt data was not processed... include it again in the packet list
  1077. //cout << "Ret="<<ret<<" - Len="<<len<<endl;
  1078. if(pkt->size > 0) //Strange audio processing behaviour...
  1079. {
  1080. if(ret == 0)
  1081. {
  1082. audioBuffer->prepend(pkt);
  1083. }
  1084. else
  1085. {
  1086. AVPacket newPkt;
  1087. av_new_packet(&newPkt, pkt->size - ret);
  1088. memcpy(newPkt.data, pkt->data + ret, pkt->size - ret);
  1089. av_free_packet(pkt);
  1090. audioBuffer->prepend(&newPkt);
  1091. }
  1092. }
  1093. }
  1094. }
  1095. }
  1096. return error;
  1097. }
  1098. ReturnCode Decoder::nextFrame()
  1099. {
  1100. //cout << "Next Video Frame Time: " << getNextFrameTime() << endl;
  1101. ReturnCode error = placeAtNextFrame(true);
  1102. if(isOk(error))
  1103. {
  1104. AVPacket *pkt = videoBuffer->readNext();
  1105. if(pkt == NULL)
  1106. {
  1107. error = VideoPacketBufferEmptyError;
  1108. }
  1109. else if(pkt->size == 0)
  1110. {
  1111. videoBuffer->deleteNext();
  1112. return nextFrame();
  1113. }
  1114. else
  1115. {
  1116. error = decodeFrame();
  1117. }
  1118. }
  1119. if(error == NoFrameError) return nextFrame();
  1120. return error;
  1121. }
  1122. ReturnCode Decoder::nextAudioFrame()
  1123. {
  1124. ReturnCode error = placeAtNextFrame(false);
  1125. if(isOk(error))
  1126. {
  1127. AVPacket *pkt = audioBuffer->readNext();
  1128. if(pkt == NULL)
  1129. {
  1130. error = VideoPacketBufferEmptyError;
  1131. }
  1132. else if(pkt->size == 0)
  1133. {
  1134. audioBuffer->deleteNext();
  1135. return nextAudioFrame();
  1136. }
  1137. else
  1138. {
  1139. //cout << "Decoder AUDIO Time: " << pkt->dts << endl;
  1140. error = decodeAudioFrame();
  1141. if(isOk(error))
  1142. {
  1143. //cout << "New Audio Sample Set" << endl;
  1144. if(getAudioSamplesSize() <= 0)
  1145. {
  1146. return nextAudioFrame();
  1147. }
  1148. }
  1149. }
  1150. }
  1151. return error;
  1152. }
  1153. ReturnCode Decoder::prevFrame()
  1154. {
  1155. return setFrame(frameIndexFromTimeStamp(position) - 1);
  1156. }
  1157. TimeStamp Decoder::getAVPosition()
  1158. {
  1159. if(isVideoPresent())
  1160. {
  1161. return position;
  1162. }
  1163. if(isAudioPresent())
  1164. {
  1165. return positionAudio;
  1166. }
  1167. }
  1168. ReturnCode Decoder::_setFrame(TimeStamp newPosition)
  1169. {
  1170. ReturnCode error = testOpen();
  1171. if(isOk(error))
  1172. {
  1173. if(isVideoPresent() && firstVideoFramePosition == -1)
  1174. {
  1175. error = nextFrame();
  1176. error = nextAudioFrame();
  1177. }
  1178. else if(isAudioPresent() && firstAudioFramePosition == -1)
  1179. {
  1180. error = nextFrame();
  1181. error = nextAudioFrame();
  1182. }
  1183. }
  1184. if(isOk(error))
  1185. {
  1186. if(newPosition > getDurationMilliseconds()) error = BadParamsError;
  1187. }
  1188. TimeStamp currentPosition=isVideoPresent()?position:positionAudio;
  1189. TimeStamp firstPosition=isVideoPresent()?firstVideoFramePosition:firstAudioFramePosition;
  1190. if(isOk(error) && !compareTimeStamps(currentPosition, newPosition))
  1191. {
  1192. videoBuffer->clear();
  1193. audioBuffer->clear();
  1194. TimeStamp newPos = newPosition - 1000;
  1195. if(newPos < firstPosition) newPos = 0;
  1196. if(newPosition < firstPosition) newPosition = firstPosition;
  1197. int64_t realPos = newPos * 1000;
  1198. cout << "Seeking pos: " << newPosition << " - Real: " << realPos << endl;
  1199. int res = -1;
  1200. if(!incorrectPts)
  1201. {
  1202. res = av_seek_frame(inputFile, -1, realPos, AVSEEK_FLAG_BACKWARD);
  1203. }
  1204. if(isVideoPresent())
  1205. {
  1206. if(res < 0)
  1207. {
  1208. error = setFrameClassic(newPosition);
  1209. }
  1210. else
  1211. {
  1212. error = setFrameFast(newPosition);
  1213. }
  1214. }
  1215. else
  1216. {
  1217. cout << "Performing Audio seek: " << res << endl;
  1218. if(res < 0)
  1219. {
  1220. error = setAudioClassic(newPosition);
  1221. }
  1222. else
  1223. {
  1224. error = setAudioFast(newPosition);
  1225. }
  1226. }
  1227. audioBuffer->clear();
  1228. }
  1229. return error;
  1230. }
  1231. /*ReturnCode Decoder::setAudioFast(TimeStamp newPosition)
  1232. {
  1233. ReturnCode error = OkCode;
  1234. for(error = nextAudioFrame(); isOk(error);error = nextAudioFrame())
  1235. {
  1236. if(compareTimeStamps(positionAudio, newPosition)) break;
  1237. if(positionAudio > newPosition) break;
  1238. }
  1239. return error;
  1240. }*/
  1241. ReturnCode Decoder::setAudioFast(TimeStamp newPosition)
  1242. {
  1243. ReturnCode error = OkCode;
  1244. for(error = placeAtNextFrame(false); isOk(error);error = placeAtNextFrame(false))
  1245. {
  1246. if(compareTimeStamps(positionAudio, newPosition)) break;
  1247. if(positionAudio > newPosition) break;
  1248. AVPacket *pkt = audioBuffer->extractNext();
  1249. av_free_packet(pkt);
  1250. }
  1251. return error;
  1252. }
  1253. ReturnCode Decoder::setAudioClassic(TimeStamp newPosition)
  1254. {
  1255. ReturnCode error = testOpen();
  1256. cout << "SetAudioClassic Called... Use another format!\n";
  1257. if(isOk(error))
  1258. {
  1259. if(newPosition > duration) error = BadParamsError;
  1260. }
  1261. if(isOk(error))
  1262. {
  1263. if(compareTimeStamps(positionAudio, newPosition))
  1264. {
  1265. error = OkCode;
  1266. }
  1267. else if(newPosition > position)
  1268. {
  1269. setAudioFast(newPosition);
  1270. }
  1271. else
  1272. {
  1273. error = close();
  1274. if(isOk(error))
  1275. {
  1276. error = _open();
  1277. }
  1278. if(isOk(error))
  1279. {
  1280. error = setFrameByTime(newPosition);
  1281. }
  1282. }
  1283. }
  1284. return error;
  1285. }
  1286. ReturnCode Decoder::setFrameFast(TimeStamp newPosition)
  1287. {
  1288. ReturnCode error = OkCode;
  1289. bool keyFrame = false;
  1290. for(error = placeAtNextFrame(true); isOk(error);)
  1291. {
  1292. AVPacket *pkt = videoBuffer->readNext();
  1293. if(keyFrame == false && pkt->flags & PKT_FLAG_KEY)
  1294. {
  1295. keyFrame = true;
  1296. error = decodeFrame();
  1297. }
  1298. else
  1299. {
  1300. if(keyFrame) error = nextFrame();
  1301. else
  1302. {
  1303. AVPacket *pkt = videoBuffer->extractNext();
  1304. av_free_packet(pkt);
  1305. error = placeAtNextFrame(true);
  1306. }
  1307. }
  1308. //cout << "P: " << position << endl;
  1309. if(compareTimeStamps(position, newPosition) || newPosition <= position) break;
  1310. }
  1311. //Workaround to sync a/v after a seek op
  1312. //audioBuffer->clear();
  1313. positionAudio = firstAudioPosition;
  1314. while(audioBuffer->count()>0 && position>positionAudio && isOk(error))
  1315. {
  1316. error = placeAtNextFrame(false);
  1317. error = decodeAudioFrame();
  1318. /*
  1319. AVPacket *pkt = audioBuffer->readNext();
  1320. AVRational aTimeBase = inputFile->streams[audioStreamIndex]->time_base;
  1321. if(position +1000 < pts2TimeStamp(pkt->dts, &aTimeBase)) break;
  1322. pkt = audioBuffer->extractNext();
  1323. cout << "Extracting one audio packet to sync - VidPos: "<<position<<" and packet was: "<< pts2TimeStamp(pkt->pts, &aTimeBase) << endl;
  1324. av_free_packet(pkt);
  1325. */
  1326. }
  1327. return error;
  1328. }
  1329. ReturnCode Decoder::setFrameClassic(TimeStamp newPosition)
  1330. {
  1331. ReturnCode error = testOpen();
  1332. cout << "SetFrameClassic Called... Use another video format!\n";
  1333. //FrameIndex frameIndex = frameIndexFromTimeStamp(newPosition);
  1334. if(isOk(error))
  1335. {
  1336. if(newPosition > duration) error = BadParamsError;
  1337. }
  1338. if(isOk(error))
  1339. {
  1340. if(compareTimeStamps(position, newPosition) && frameFlag)
  1341. {
  1342. error = OkCode;
  1343. }
  1344. else if(newPosition > position || !frameFlag)
  1345. {
  1346. bool keyFrame = false;
  1347. for(error = placeAtNextFrame(true); isOk(error) || !frameFlag;)
  1348. {
  1349. AVPacket *pkt = videoBuffer->readNext();
  1350. if((keyFrame == false) && (((pkt->flags & PKT_FLAG_KEY) && (newPosition - position < 5000)) || (newPosition - position < 3500)))
  1351. {
  1352. keyFrame = true;
  1353. error = decodeFrame();
  1354. }
  1355. else
  1356. {
  1357. if(keyFrame) error = nextFrame();
  1358. else
  1359. {
  1360. AVPacket *pkt = videoBuffer->extractNext();
  1361. av_free_packet(pkt);
  1362. error = placeAtNextFrame(true);
  1363. }
  1364. }
  1365. if(compareTimeStamps(position, newPosition) || newPosition <= position) break;
  1366. //if(newPosition <= position) break;
  1367. /*
  1368. error = nextFrame();
  1369. if(position > newPosition) break;
  1370. */
  1371. }
  1372. }
  1373. else
  1374. {
  1375. cout << "Begin reading video again" << endl;
  1376. error = close();
  1377. if(isOk(error))
  1378. {
  1379. error = _open();
  1380. }
  1381. if(isOk(error))
  1382. {
  1383. error = setFrameByTime(newPosition);
  1384. }
  1385. }
  1386. }
  1387. return error;
  1388. }
  1389. ReturnCode Decoder::setFrame(FrameIndex frameIndex)
  1390. {
  1391. return _setFrame(timeStampFromFrameIndex(frameIndex));
  1392. }
  1393. ReturnCode Decoder::setFrameByTime(double seconds)
  1394. {
  1395. ReturnCode error = OkCode;
  1396. error = _setFrame((TimeStamp)(seconds * FOBS_TIMESTAMP_UNITS_D));
  1397. return error;
  1398. }
  1399. ReturnCode Decoder::setFrameByTime(TimeStamp milliseconds)
  1400. {
  1401. return _setFrame(milliseconds);
  1402. }
  1403. ReturnCode Decoder::setPosition(TimeStamp milliseconds)
  1404. {
  1405. if(isVideoPresent())
  1406. {
  1407. return setFrameByTime(milliseconds);
  1408. }
  1409. }
  1410. bool Decoder::isVideoPresent()
  1411. {
  1412. return videoStreamIndex >= 0;
  1413. }
  1414. bool Decoder::isAudioPresent()
  1415. {
  1416. return audioStreamIndex >= 0;
  1417. }
  1418. void Decoder::enableAudio(bool flag)
  1419. {
  1420. audioEnabledFlag = flag;
  1421. }
  1422. omnividea::fobs::uint Decoder::getAudioSampleRate()
  1423. {
  1424. uint res = 0;
  1425. if(isAudioPresent())
  1426. {
  1427. res = inputFile->streams[audioStreamIndex]->codec->sample_rate;
  1428. }
  1429. return res;
  1430. }
  1431. omnividea::fobs::uint Decoder::getAudioBitRate()
  1432. {
  1433. uint res = 0;
  1434. if(isAudioPresent())
  1435. {
  1436. res = inputFile->streams[audioStreamIndex]->codec->bit_rate / 1000;
  1437. }
  1438. return res;
  1439. }
  1440. omnividea::fobs::uint Decoder::getAudioChannelNumber()
  1441. {
  1442. uint res = 0;
  1443. if(isAudioPresent())
  1444. {
  1445. res = inputFile->streams[audioStreamIndex]->codec->channels;
  1446. }
  1447. return res;// > 2?2:res;
  1448. }
  1449. double Decoder::getAudioTime()
  1450. {
  1451. return (positionAudio - firstAudioFramePosition)/FOBS_TIMESTAMP_UNITS_D;
  1452. }
  1453. double Decoder::getTime()
  1454. {
  1455. if(isVideoPresent())
  1456. {
  1457. return getFrameTime();//*FOBS_TIMESTAMP_UNITS_D;
  1458. }
  1459. else
  1460. {
  1461. return getAudioTime();//*FOBS_TIMESTAMP_UNITS_D;
  1462. }
  1463. }
  1464. AVCodecContext *Decoder::getAudioCodec()
  1465. {
  1466. if(isAudioPresent()) return inputFile->streams[audioStreamIndex]->codec;
  1467. return NULL;
  1468. }
  1469. AVCodecContext *Decoder::getVideoCodec()
  1470. {
  1471. if(videoStreamIndex >= 0) return inputFile->streams[videoStreamIndex]->codec;
  1472. }