/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java

https://github.com/Balamurugan-R/javacv · Java · 673 lines · 515 code · 60 blank · 98 comment · 191 complexity · e0bae99dbeb6cd74f29c49be222fb874 MD5 · raw file

  1. /*
  2. * Copyright (C) 2009,2010,2011,2012,2013 Samuel Audet
  3. *
  4. * This file is part of JavaCV.
  5. *
  6. * JavaCV is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version (subject to the "Classpath" exception
  10. * as provided in the LICENSE.txt file that accompanied this code).
  11. *
  12. * JavaCV is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with JavaCV. If not, see <http://www.gnu.org/licenses/>.
  19. *
  20. *
  21. * Based on the avcodec_sample.0.5.0.c file available at
  22. * http://web.me.com/dhoerl/Home/Tech_Blog/Entries/2009/1/22_Revised_avcodec_sample.c_files/avcodec_sample.0.5.0.c
  23. * by Martin Bรถhme, Stephen Dranger, and David Hoerl
  24. * as well as on the decoding_encoding.c file included in FFmpeg 0.11.1,
  25. * which is covered by the following copyright notice:
  26. *
  27. * Copyright (c) 2001 Fabrice Bellard
  28. *
  29. * Permission is hereby granted, free of charge, to any person obtaining a copy
  30. * of this software and associated documentation files (the "Software"), to deal
  31. * in the Software without restriction, including without limitation the rights
  32. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  33. * copies of the Software, and to permit persons to whom the Software is
  34. * furnished to do so, subject to the following conditions:
  35. *
  36. * The above copyright notice and this permission notice shall be included in
  37. * all copies or substantial portions of the Software.
  38. *
  39. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  40. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  41. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  42. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  43. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  44. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  45. * THE SOFTWARE.
  46. */
  47. package org.bytedeco.javacv;
  48. import java.io.File;
  49. import java.nio.Buffer;
  50. import java.nio.ByteBuffer;
  51. import org.bytedeco.javacpp.BytePointer;
  52. import org.bytedeco.javacpp.DoublePointer;
  53. import org.bytedeco.javacpp.IntPointer;
  54. import org.bytedeco.javacpp.Loader;
  55. import org.bytedeco.javacpp.PointerPointer;
  56. import static org.bytedeco.javacpp.avcodec.*;
  57. import static org.bytedeco.javacpp.avdevice.*;
  58. import static org.bytedeco.javacpp.avformat.*;
  59. import static org.bytedeco.javacpp.avutil.*;
  60. import static org.bytedeco.javacpp.opencv_core.*;
  61. import static org.bytedeco.javacpp.swscale.*;
  62. /**
  63. *
  64. * @author Samuel Audet
  65. */
  66. public class FFmpegFrameGrabber extends FrameGrabber {
  67. public static String[] getDeviceDescriptions() throws Exception {
  68. tryLoad();
  69. throw new UnsupportedOperationException("Device enumeration not support by FFmpeg.");
  70. }
  71. public static FFmpegFrameGrabber createDefault(File deviceFile) throws Exception { return new FFmpegFrameGrabber(deviceFile); }
  72. public static FFmpegFrameGrabber createDefault(String devicePath) throws Exception { return new FFmpegFrameGrabber(devicePath); }
  73. public static FFmpegFrameGrabber createDefault(int deviceNumber) throws Exception { return null; }
  74. private static Exception loadingException = null;
  75. public static void tryLoad() throws Exception {
  76. if (loadingException != null) {
  77. throw loadingException;
  78. } else {
  79. try {
  80. Loader.load(org.bytedeco.javacpp.avutil.class);
  81. Loader.load(org.bytedeco.javacpp.avcodec.class);
  82. Loader.load(org.bytedeco.javacpp.avformat.class);
  83. Loader.load(org.bytedeco.javacpp.avdevice.class);
  84. Loader.load(org.bytedeco.javacpp.swscale.class);
  85. } catch (Throwable t) {
  86. if (t instanceof Exception) {
  87. throw loadingException = (Exception)t;
  88. } else {
  89. throw loadingException = new Exception("Failed to load " + FFmpegFrameGrabber.class, t);
  90. }
  91. }
  92. }
  93. }
  94. static {
  95. // Register all formats and codecs
  96. avcodec_register_all();
  97. avdevice_register_all();
  98. av_register_all();
  99. avformat_network_init();
  100. }
  101. public FFmpegFrameGrabber(File file) {
  102. this(file.getAbsolutePath());
  103. }
  104. public FFmpegFrameGrabber(String filename) {
  105. this.filename = filename;
  106. }
  107. public void release() throws Exception {
  108. synchronized (org.bytedeco.javacpp.avcodec.class) {
  109. releaseUnsafe();
  110. }
  111. }
  112. public void releaseUnsafe() throws Exception {
  113. if (pkt != null && pkt2 != null) {
  114. if (pkt2.size() > 0) {
  115. av_free_packet(pkt);
  116. }
  117. pkt = pkt2 = null;
  118. }
  119. // Free the RGB image
  120. if (buffer_rgb != null) {
  121. av_free(buffer_rgb);
  122. buffer_rgb = null;
  123. }
  124. if (picture_rgb != null) {
  125. avcodec_free_frame(picture_rgb);
  126. picture_rgb = null;
  127. }
  128. // Free the native format picture frame
  129. if (picture != null) {
  130. avcodec_free_frame(picture);
  131. picture = null;
  132. }
  133. // Close the video codec
  134. if (video_c != null) {
  135. avcodec_close(video_c);
  136. video_c = null;
  137. }
  138. // Free the audio samples frame
  139. if (samples_frame != null) {
  140. avcodec_free_frame(samples_frame);
  141. samples_frame = null;
  142. }
  143. // Close the audio codec
  144. if (audio_c != null) {
  145. avcodec_close(audio_c);
  146. audio_c = null;
  147. }
  148. // Close the video file
  149. if (oc != null && !oc.isNull()) {
  150. avformat_close_input(oc);
  151. oc = null;
  152. }
  153. if (img_convert_ctx != null) {
  154. sws_freeContext(img_convert_ctx);
  155. img_convert_ctx = null;
  156. }
  157. got_frame = null;
  158. return_image = null;
  159. frameGrabbed = false;
  160. frame = null;
  161. timestamp = 0;
  162. frameNumber = 0;
  163. }
  164. @Override protected void finalize() throws Throwable {
  165. super.finalize();
  166. release();
  167. }
  168. private String filename;
  169. private AVFormatContext oc;
  170. private AVStream video_st, audio_st;
  171. private AVCodecContext video_c, audio_c;
  172. private AVFrame picture, picture_rgb;
  173. private BytePointer buffer_rgb;
  174. private AVFrame samples_frame;
  175. private BytePointer[] samples_ptr;
  176. private Buffer[] samples_buf;
  177. private AVPacket pkt, pkt2;
  178. private int sizeof_pkt;
  179. private int[] got_frame;
  180. private SwsContext img_convert_ctx;
  181. private IplImage return_image;
  182. private boolean frameGrabbed;
  183. private Frame frame;
  184. @Override public double getGamma() {
  185. // default to a gamma of 2.2 for cheap Webcams, DV cameras, etc.
  186. if (gamma == 0.0) {
  187. return 2.2;
  188. } else {
  189. return gamma;
  190. }
  191. }
  192. @Override public String getFormat() {
  193. if (oc == null) {
  194. return super.getFormat();
  195. } else {
  196. return oc.iformat().name().getString();
  197. }
  198. }
  199. @Override public int getImageWidth() {
  200. return return_image == null ? super.getImageWidth() : return_image.width();
  201. }
  202. @Override public int getImageHeight() {
  203. return return_image == null ? super.getImageHeight() : return_image.height();
  204. }
  205. @Override public int getAudioChannels() {
  206. return audio_c == null ? super.getAudioChannels() : audio_c.channels();
  207. }
  208. @Override public int getPixelFormat() {
  209. if (imageMode == ImageMode.COLOR || imageMode == ImageMode.GRAY) {
  210. if (pixelFormat == AV_PIX_FMT_NONE) {
  211. return imageMode == ImageMode.COLOR ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_GRAY8;
  212. } else {
  213. return pixelFormat;
  214. }
  215. } else if (video_c != null) { // RAW
  216. return video_c.pix_fmt();
  217. } else {
  218. return super.getPixelFormat();
  219. }
  220. }
  221. @Override public double getFrameRate() {
  222. if (video_st == null) {
  223. return super.getFrameRate();
  224. } else {
  225. AVRational r = video_st.r_frame_rate();
  226. return (double)r.num() / r.den();
  227. }
  228. }
  229. @Override public int getSampleFormat() {
  230. return audio_c == null ? super.getSampleFormat() : audio_c.sample_fmt();
  231. }
  232. @Override public int getSampleRate() {
  233. return audio_c == null ? super.getSampleRate() : audio_c.sample_rate();
  234. }
  235. @Override public void setFrameNumber(int frameNumber) throws Exception {
  236. // best guess, AVSEEK_FLAG_FRAME has not been implemented in FFmpeg...
  237. setTimestamp(Math.round(1000000L * frameNumber / getFrameRate()));
  238. }
  239. @Override public void setTimestamp(long timestamp) throws Exception {
  240. int ret;
  241. if (oc == null) {
  242. super.setTimestamp(timestamp);
  243. } else {
  244. timestamp = timestamp * AV_TIME_BASE / 1000000L;
  245. /* add the stream start time */
  246. if (oc.start_time() != AV_NOPTS_VALUE) {
  247. timestamp += oc.start_time();
  248. }
  249. if ((ret = avformat_seek_file(oc, -1, Long.MIN_VALUE, timestamp, Long.MAX_VALUE, AVSEEK_FLAG_BACKWARD)) < 0) {
  250. throw new Exception("avformat_seek_file() error " + ret + ": Could not seek file to timestamp " + timestamp + ".");
  251. }
  252. if (video_c != null) {
  253. avcodec_flush_buffers(video_c);
  254. }
  255. if (audio_c != null) {
  256. avcodec_flush_buffers(audio_c);
  257. }
  258. if (pkt2.size() > 0) {
  259. pkt2.size(0);
  260. av_free_packet(pkt);
  261. }
  262. /* comparing to timestamp +/- 1 avoids rouding issues for framerates
  263. which are no proper divisors of 1000000, e.g. where
  264. av_frame_get_best_effort_timestamp in grabFrame sets this.timestamp
  265. to ...666 and the given timestamp has been rounded to ...667
  266. (or vice versa)
  267. */
  268. while (this.timestamp > timestamp + 1 && grabFrame(false) != null) {
  269. // flush frames if seeking backwards
  270. }
  271. while (this.timestamp < timestamp - 1 && grabFrame(false) != null) {
  272. // decode up to the desired frame
  273. }
  274. if (video_c != null) {
  275. frameGrabbed = true;
  276. }
  277. }
  278. }
  279. @Override public int getLengthInFrames() {
  280. // best guess...
  281. return (int)(getLengthInTime() * getFrameRate() / 1000000L);
  282. }
  283. @Override public long getLengthInTime() {
  284. return oc.duration() * 1000000L / AV_TIME_BASE;
  285. }
  286. public void start() throws Exception {
  287. synchronized (org.bytedeco.javacpp.avcodec.class) {
  288. startUnsafe();
  289. }
  290. }
  291. public void startUnsafe() throws Exception {
  292. int ret;
  293. img_convert_ctx = null;
  294. oc = new AVFormatContext(null);
  295. video_c = null;
  296. audio_c = null;
  297. pkt = new AVPacket();
  298. pkt2 = new AVPacket();
  299. sizeof_pkt = pkt.sizeof();
  300. got_frame = new int[1];
  301. return_image = null;
  302. frameGrabbed = false;
  303. frame = new Frame();
  304. timestamp = 0;
  305. frameNumber = 0;
  306. pkt2.size(0);
  307. // Open video file
  308. AVInputFormat f = null;
  309. if (format != null && format.length() > 0) {
  310. if ((f = av_find_input_format(format)) == null) {
  311. throw new Exception("av_find_input_format() error: Could not find input format \"" + format + "\".");
  312. }
  313. }
  314. AVDictionary options = new AVDictionary(null);
  315. if (frameRate > 0) {
  316. AVRational r = av_d2q(frameRate, 1001000);
  317. av_dict_set(options, "framerate", r.num() + "/" + r.den(), 0);
  318. }
  319. if (pixelFormat >= 0) {
  320. av_dict_set(options, "pixel_format", av_get_pix_fmt_name(pixelFormat).getString(), 0);
  321. } else if (imageMode != ImageMode.RAW) {
  322. av_dict_set(options, "pixel_format", imageMode == ImageMode.COLOR ? "bgr24" : "gray8", 0);
  323. }
  324. if (imageWidth > 0 && imageHeight > 0) {
  325. av_dict_set(options, "video_size", imageWidth + "x" + imageHeight, 0);
  326. }
  327. if (sampleRate > 0) {
  328. av_dict_set(options, "sample_rate", "" + sampleRate, 0);
  329. }
  330. if (audioChannels > 0) {
  331. av_dict_set(options, "channels", "" + audioChannels, 0);
  332. }
  333. if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
  334. av_dict_set(options, "pixel_format", null, 0);
  335. if ((ret = avformat_open_input(oc, filename, f, options)) < 0) {
  336. throw new Exception("avformat_open_input() error " + ret + ": Could not open input \"" + filename + "\". (Has setFormat() been called?)");
  337. }
  338. }
  339. av_dict_free(options);
  340. // Retrieve stream information
  341. if ((ret = avformat_find_stream_info(oc, (PointerPointer)null)) < 0) {
  342. throw new Exception("avformat_find_stream_info() error " + ret + ": Could not find stream information.");
  343. }
  344. // Dump information about file onto standard error
  345. av_dump_format(oc, 0, filename, 0);
  346. // Find the first video and audio stream
  347. video_st = audio_st = null;
  348. int nb_streams = oc.nb_streams();
  349. for (int i = 0; i < nb_streams; i++) {
  350. AVStream st = oc.streams(i);
  351. // Get a pointer to the codec context for the video or audio stream
  352. AVCodecContext c = st.codec();
  353. if (video_st == null && c.codec_type() == AVMEDIA_TYPE_VIDEO) {
  354. video_st = st;
  355. video_c = c;
  356. } else if (audio_st == null && c.codec_type() == AVMEDIA_TYPE_AUDIO) {
  357. audio_st = st;
  358. audio_c = c;
  359. }
  360. }
  361. if (video_st == null && audio_st == null) {
  362. throw new Exception("Did not find a video or audio stream inside \"" + filename + "\".");
  363. }
  364. if (video_st != null) {
  365. // Find the decoder for the video stream
  366. AVCodec codec = avcodec_find_decoder(video_c.codec_id());
  367. if (codec == null) {
  368. throw new Exception("avcodec_find_decoder() error: Unsupported video format or codec not found: " + video_c.codec_id() + ".");
  369. }
  370. // Open video codec
  371. if ((ret = avcodec_open2(video_c, codec, (PointerPointer)null)) < 0) {
  372. throw new Exception("avcodec_open2() error " + ret + ": Could not open video codec.");
  373. }
  374. // Hack to correct wrong frame rates that seem to be generated by some codecs
  375. if (video_c.time_base().num() > 1000 && video_c.time_base().den() == 1) {
  376. video_c.time_base().den(1000);
  377. }
  378. // Allocate video frame and an AVFrame structure for the RGB image
  379. if ((picture = avcodec_alloc_frame()) == null) {
  380. throw new Exception("avcodec_alloc_frame() error: Could not allocate raw picture frame.");
  381. }
  382. if ((picture_rgb = avcodec_alloc_frame()) == null) {
  383. throw new Exception("avcodec_alloc_frame() error: Could not allocate RGB picture frame.");
  384. }
  385. int width = getImageWidth() > 0 ? getImageWidth() : video_c.width();
  386. int height = getImageHeight() > 0 ? getImageHeight() : video_c.height();
  387. switch (imageMode) {
  388. case COLOR:
  389. case GRAY:
  390. int fmt = getPixelFormat();
  391. // Determine required buffer size and allocate buffer
  392. int size = avpicture_get_size(fmt, width, height);
  393. buffer_rgb = new BytePointer(av_malloc(size));
  394. // Assign appropriate parts of buffer to image planes in picture_rgb
  395. // Note that picture_rgb is an AVFrame, but AVFrame is a superset of AVPicture
  396. avpicture_fill(new AVPicture(picture_rgb), buffer_rgb, fmt, width, height);
  397. return_image = IplImage.createHeader(width, height, IPL_DEPTH_8U, 1);
  398. break;
  399. case RAW:
  400. buffer_rgb = null;
  401. return_image = IplImage.createHeader(video_c.width(), video_c.height(), IPL_DEPTH_8U, 1);
  402. break;
  403. default:
  404. assert false;
  405. }
  406. }
  407. if (audio_st != null) {
  408. // Find the decoder for the audio stream
  409. AVCodec codec = avcodec_find_decoder(audio_c.codec_id());
  410. if (codec == null) {
  411. throw new Exception("avcodec_find_decoder() error: Unsupported audio format or codec not found: " + audio_c.codec_id() + ".");
  412. }
  413. // Open audio codec
  414. if ((ret = avcodec_open2(audio_c, codec, (PointerPointer)null)) < 0) {
  415. throw new Exception("avcodec_open2() error " + ret + ": Could not open audio codec.");
  416. }
  417. // Allocate audio samples frame
  418. if ((samples_frame = avcodec_alloc_frame()) == null) {
  419. throw new Exception("avcodec_alloc_frame() error: Could not allocate audio frame.");
  420. }
  421. }
  422. }
  423. public void stop() throws Exception {
  424. release();
  425. }
  426. public void trigger() throws Exception {
  427. if (oc == null || oc.isNull()) {
  428. throw new Exception("Could not trigger: No AVFormatContext. (Has start() been called?)");
  429. }
  430. if (pkt2.size() > 0) {
  431. pkt2.size(0);
  432. av_free_packet(pkt);
  433. }
  434. for (int i = 0; i < numBuffers+1; i++) {
  435. if (av_read_frame(oc, pkt) < 0) {
  436. return;
  437. }
  438. av_free_packet(pkt);
  439. }
  440. }
  441. private void processImage() throws Exception {
  442. switch (imageMode) {
  443. case COLOR:
  444. case GRAY:
  445. // Deinterlace Picture
  446. if (deinterlace) {
  447. AVPicture p = new AVPicture(picture);
  448. avpicture_deinterlace(p, p, video_c.pix_fmt(), video_c.width(), video_c.height());
  449. }
  450. // Convert the image into BGR or GRAY format that OpenCV uses
  451. img_convert_ctx = sws_getCachedContext(img_convert_ctx,
  452. video_c.width(), video_c.height(), video_c.pix_fmt(),
  453. getImageWidth(), getImageHeight(), getPixelFormat(), SWS_BILINEAR,
  454. null, null, (DoublePointer)null);
  455. if (img_convert_ctx == null) {
  456. throw new Exception("sws_getCachedContext() error: Cannot initialize the conversion context.");
  457. }
  458. // Convert the image from its native format to RGB or GRAY
  459. sws_scale(img_convert_ctx, new PointerPointer(picture), picture.linesize(), 0,
  460. video_c.height(), new PointerPointer(picture_rgb), picture_rgb.linesize());
  461. return_image.imageData(buffer_rgb);
  462. return_image.widthStep(picture_rgb.linesize(0));
  463. break;
  464. case RAW:
  465. assert video_c.width() == return_image.width() &&
  466. video_c.height() == return_image.height();
  467. return_image.imageData(picture.data(0));
  468. return_image.widthStep(picture.linesize(0));
  469. break;
  470. default:
  471. assert false;
  472. }
  473. return_image.imageSize(return_image.height() * return_image.widthStep());
  474. return_image.nChannels(return_image.widthStep() / return_image.width());
  475. }
  476. public IplImage grab() throws Exception {
  477. Frame f = grabFrame(true, false, false);
  478. return f != null ? f.image : null;
  479. }
  480. @Override public Frame grabFrame() throws Exception {
  481. return grabFrame(true, true, false);
  482. }
  483. public Frame grabFrame(boolean processImage) throws Exception {
  484. return grabFrame(processImage, true, false);
  485. }
  486. public Frame grabKeyFrame() throws Exception {
  487. return grabFrame(true, false, true);
  488. }
  489. private Frame grabFrame(boolean processImage, boolean doAudio, boolean keyFrames) throws Exception {
  490. if (oc == null || oc.isNull()) {
  491. throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)");
  492. }
  493. frame.keyFrame = false;
  494. frame.image = null;
  495. frame.sampleRate = 0;
  496. frame.audioChannels = 0;
  497. frame.samples = null;
  498. frame.opaque = null;
  499. if (frameGrabbed) {
  500. frameGrabbed = false;
  501. if (processImage) {
  502. processImage();
  503. }
  504. frame.keyFrame = picture.key_frame() != 0;
  505. frame.image = return_image;
  506. frame.opaque = picture;
  507. return frame;
  508. }
  509. boolean done = false;
  510. while (!done) {
  511. if (pkt2.size() <= 0) {
  512. if (av_read_frame(oc, pkt) < 0) {
  513. if (video_st != null) {
  514. // The video codec may have buffered some frames
  515. pkt.stream_index(video_st.index());
  516. pkt.flags(AV_PKT_FLAG_KEY);
  517. pkt.data(null);
  518. pkt.size(0);
  519. } else {
  520. return null;
  521. }
  522. }
  523. }
  524. // Is this a packet from the video stream?
  525. if (video_st != null && pkt.stream_index() == video_st.index()
  526. && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) {
  527. // Decode video frame
  528. int len = avcodec_decode_video2(video_c, picture, got_frame, pkt);
  529. // Did we get a video frame?
  530. if (len >= 0 && got_frame[0] != 0
  531. && (!keyFrames || picture.pict_type() == AV_PICTURE_TYPE_I)) {
  532. long pts = av_frame_get_best_effort_timestamp(picture);
  533. AVRational time_base = video_st.time_base();
  534. timestamp = 1000000L * pts * time_base.num() / time_base.den();
  535. // best guess, AVCodecContext.frame_number = number of decoded frames...
  536. frameNumber = (int)(timestamp * getFrameRate() / 1000000L);
  537. if (processImage) {
  538. processImage();
  539. }
  540. done = true;
  541. frame.keyFrame = picture.key_frame() != 0;
  542. frame.image = return_image;
  543. frame.opaque = picture;
  544. } else if (pkt.data() == null && pkt.size() == 0) {
  545. return null;
  546. }
  547. } else if (doAudio && audio_st != null && pkt.stream_index() == audio_st.index()) {
  548. if (pkt2.size() <= 0) {
  549. // HashMap is unacceptably slow on Android
  550. // pkt2.put(pkt);
  551. BytePointer.memcpy(pkt2, pkt, sizeof_pkt);
  552. }
  553. avcodec_get_frame_defaults(samples_frame);
  554. // Decode audio frame
  555. int len = avcodec_decode_audio4(audio_c, samples_frame, got_frame, pkt2);
  556. if (len <= 0) {
  557. // On error, trash the whole packet
  558. pkt2.size(0);
  559. } else {
  560. pkt2.data(pkt2.data().position(len));
  561. pkt2.size(pkt2.size() - len);
  562. if (got_frame[0] != 0) {
  563. long pts = av_frame_get_best_effort_timestamp(samples_frame);
  564. AVRational time_base = audio_st.time_base();
  565. timestamp = 1000000L * pts * time_base.num() / time_base.den();
  566. /* if a frame has been decoded, output it */
  567. done = true;
  568. int sample_format = samples_frame.format();
  569. int planes = av_sample_fmt_is_planar(sample_format) != 0 ? (int)samples_frame.channels() : 1;
  570. int data_size = av_samples_get_buffer_size((IntPointer)null, audio_c.channels(),
  571. samples_frame.nb_samples(), audio_c.sample_fmt(), 1) / planes;
  572. if (samples_buf == null || samples_buf.length != planes) {
  573. samples_ptr = new BytePointer[planes];
  574. samples_buf = new Buffer[planes];
  575. }
  576. frame.keyFrame = samples_frame.key_frame() != 0;
  577. frame.sampleRate = audio_c.sample_rate();
  578. frame.audioChannels = audio_c.channels();
  579. frame.samples = samples_buf;
  580. frame.opaque = samples_frame;
  581. int sample_size = data_size / av_get_bytes_per_sample(sample_format);
  582. for (int i = 0; i < planes; i++) {
  583. BytePointer p = samples_frame.data(i);
  584. if (!p.equals(samples_ptr[i]) || samples_ptr[i].capacity() < data_size) {
  585. samples_ptr[i] = p.capacity(data_size);
  586. ByteBuffer b = p.asBuffer();
  587. switch (sample_format) {
  588. case AV_SAMPLE_FMT_U8:
  589. case AV_SAMPLE_FMT_U8P: samples_buf[i] = b; break;
  590. case AV_SAMPLE_FMT_S16:
  591. case AV_SAMPLE_FMT_S16P: samples_buf[i] = b.asShortBuffer(); break;
  592. case AV_SAMPLE_FMT_S32:
  593. case AV_SAMPLE_FMT_S32P: samples_buf[i] = b.asIntBuffer(); break;
  594. case AV_SAMPLE_FMT_FLT:
  595. case AV_SAMPLE_FMT_FLTP: samples_buf[i] = b.asFloatBuffer(); break;
  596. case AV_SAMPLE_FMT_DBL:
  597. case AV_SAMPLE_FMT_DBLP: samples_buf[i] = b.asDoubleBuffer(); break;
  598. default: assert false;
  599. }
  600. }
  601. samples_buf[i].position(0).limit(sample_size);
  602. }
  603. }
  604. }
  605. }
  606. if (pkt2.size() <= 0) {
  607. // Free the packet that was allocated by av_read_frame
  608. av_free_packet(pkt);
  609. }
  610. }
  611. return frame;
  612. }
  613. }