/gst-plugins-bad0.10/gst/videoparsers/gsth264parse.c

https://review.tizen.org/git/ · C · 1998 lines · 1526 code · 290 blank · 182 comment · 307 complexity · 6006fcf47a53ab5fabf6ad2691aa4817 MD5 · raw file

Large files are truncated click here to view the full file

  1. /* GStreamer H.264 Parser
  2. * Copyright (C) <2010> Collabora ltd
  3. * Copyright (C) <2010> Nokia Corporation
  4. * Copyright (C) <2011> Intel Corporation
  5. *
  6. * Copyright (C) <2010> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
  7. * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
  8. *
  9. * This library is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Library General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2 of the License, or (at your option) any later version.
  13. *
  14. * This library is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Library General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Library General Public
  20. * License along with this library; if not, write to the
  21. * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
  22. * Boston, MA 02111-1307, USA.
  23. */
  24. #ifdef HAVE_CONFIG_H
  25. # include "config.h"
  26. #endif
  27. #include <gst/base/gstbytereader.h>
  28. #include <gst/base/gstbytewriter.h>
  29. #include <gst/base/gstadapter.h>
  30. #include <gst/video/video.h>
  31. #include "gsth264parse.h"
  32. #include <string.h>
  33. GST_DEBUG_CATEGORY (h264_parse_debug);
  34. #define GST_CAT_DEFAULT h264_parse_debug
  35. /* Modification: only packetized format(3gpp) need to be set passthrough */
  36. #define CHECK_PACKETIZED_FOR_PASSTHROUGH
  37. #define DEFAULT_CONFIG_INTERVAL (0)
  38. enum
  39. {
  40. PROP_0,
  41. PROP_CONFIG_INTERVAL,
  42. PROP_LAST
  43. };
  44. enum
  45. {
  46. GST_H264_PARSE_FORMAT_NONE,
  47. GST_H264_PARSE_FORMAT_AVC,
  48. GST_H264_PARSE_FORMAT_BYTE
  49. };
  50. enum
  51. {
  52. GST_H264_PARSE_ALIGN_NONE = 0,
  53. GST_H264_PARSE_ALIGN_NAL,
  54. GST_H264_PARSE_ALIGN_AU
  55. };
  56. static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
  57. GST_PAD_SINK,
  58. GST_PAD_ALWAYS,
  59. GST_STATIC_CAPS ("video/x-h264"));
  60. static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
  61. GST_PAD_SRC,
  62. GST_PAD_ALWAYS,
  63. GST_STATIC_CAPS ("video/x-h264, parsed = (boolean) true, "
  64. "stream-format=(string) { avc, byte-stream }, "
  65. "alignment=(string) { au, nal }"));
  66. GST_BOILERPLATE (GstH264Parse, gst_h264_parse, GstBaseParse,
  67. GST_TYPE_BASE_PARSE);
  68. static void gst_h264_parse_finalize (GObject * object);
  69. static gboolean gst_h264_parse_start (GstBaseParse * parse);
  70. static gboolean gst_h264_parse_stop (GstBaseParse * parse);
  71. static gboolean gst_h264_parse_check_valid_frame (GstBaseParse * parse,
  72. GstBaseParseFrame * frame, guint * framesize, gint * skipsize);
  73. static GstFlowReturn gst_h264_parse_parse_frame (GstBaseParse * parse,
  74. GstBaseParseFrame * frame);
  75. static GstFlowReturn gst_h264_parse_pre_push_frame (GstBaseParse * parse,
  76. GstBaseParseFrame * frame);
  77. static void gst_h264_parse_set_property (GObject * object, guint prop_id,
  78. const GValue * value, GParamSpec * pspec);
  79. static void gst_h264_parse_get_property (GObject * object, guint prop_id,
  80. GValue * value, GParamSpec * pspec);
  81. static gboolean gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps);
  82. static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse);
  83. static GstFlowReturn gst_h264_parse_chain (GstPad * pad, GstBuffer * buffer);
  84. static gboolean gst_h264_parse_event (GstBaseParse * parse, GstEvent * event);
  85. static gboolean gst_h264_parse_src_event (GstBaseParse * parse,
  86. GstEvent * event);
  87. static void
  88. gst_h264_parse_base_init (gpointer g_class)
  89. {
  90. GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
  91. gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
  92. gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
  93. gst_element_class_set_details_simple (gstelement_class, "H.264 parser",
  94. "Codec/Parser/Converter/Video",
  95. "Parses H.264 streams",
  96. "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
  97. GST_DEBUG_CATEGORY_INIT (h264_parse_debug, "h264parse", 0, "h264 parser");
  98. }
  99. static void
  100. gst_h264_parse_class_init (GstH264ParseClass * klass)
  101. {
  102. GObjectClass *gobject_class = (GObjectClass *) klass;
  103. GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
  104. gobject_class->finalize = gst_h264_parse_finalize;
  105. gobject_class->set_property = gst_h264_parse_set_property;
  106. gobject_class->get_property = gst_h264_parse_get_property;
  107. g_object_class_install_property (gobject_class, PROP_CONFIG_INTERVAL,
  108. g_param_spec_uint ("config-interval",
  109. "SPS PPS Send Interval",
  110. "Send SPS and PPS Insertion Interval in seconds (sprop parameter sets "
  111. "will be multiplexed in the data stream when detected.) (0 = disabled)",
  112. 0, 3600, DEFAULT_CONFIG_INTERVAL,
  113. G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
  114. /* Override BaseParse vfuncs */
  115. parse_class->start = GST_DEBUG_FUNCPTR (gst_h264_parse_start);
  116. parse_class->stop = GST_DEBUG_FUNCPTR (gst_h264_parse_stop);
  117. parse_class->check_valid_frame =
  118. GST_DEBUG_FUNCPTR (gst_h264_parse_check_valid_frame);
  119. parse_class->parse_frame = GST_DEBUG_FUNCPTR (gst_h264_parse_parse_frame);
  120. parse_class->pre_push_frame =
  121. GST_DEBUG_FUNCPTR (gst_h264_parse_pre_push_frame);
  122. parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_set_caps);
  123. parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_get_caps);
  124. parse_class->event = GST_DEBUG_FUNCPTR (gst_h264_parse_event);
  125. parse_class->src_event = GST_DEBUG_FUNCPTR (gst_h264_parse_src_event);
  126. }
  127. static void
  128. gst_h264_parse_init (GstH264Parse * h264parse, GstH264ParseClass * g_class)
  129. {
  130. h264parse->frame_out = gst_adapter_new ();
  131. /* retrieve and intercept baseparse.
  132. * Quite HACKish, but fairly OK since it is needed to perform avc packet
  133. * splitting, which is the penultimate de-parsing */
  134. h264parse->parse_chain =
  135. GST_PAD_CHAINFUNC (GST_BASE_PARSE_SINK_PAD (h264parse));
  136. gst_pad_set_chain_function (GST_BASE_PARSE_SINK_PAD (h264parse),
  137. gst_h264_parse_chain);
  138. }
  139. static void
  140. gst_h264_parse_finalize (GObject * object)
  141. {
  142. GstH264Parse *h264parse = GST_H264_PARSE (object);
  143. g_object_unref (h264parse->frame_out);
  144. G_OBJECT_CLASS (parent_class)->finalize (object);
  145. }
  146. static void
  147. gst_h264_parse_reset_frame (GstH264Parse * h264parse)
  148. {
  149. GST_DEBUG_OBJECT (h264parse, "reset frame");
  150. /* done parsing; reset state */
  151. h264parse->nalu.valid = FALSE;
  152. h264parse->nalu.offset = 0;
  153. h264parse->nalu.sc_offset = 0;
  154. h264parse->nalu.size = 0;
  155. h264parse->current_off = 0;
  156. h264parse->picture_start = FALSE;
  157. h264parse->update_caps = FALSE;
  158. h264parse->idr_pos = -1;
  159. h264parse->sei_pos = -1;
  160. h264parse->keyframe = FALSE;
  161. h264parse->frame_start = FALSE;
  162. gst_adapter_clear (h264parse->frame_out);
  163. }
  164. static void
  165. gst_h264_parse_reset (GstH264Parse * h264parse)
  166. {
  167. h264parse->width = 0;
  168. h264parse->height = 0;
  169. h264parse->fps_num = 0;
  170. h264parse->fps_den = 0;
  171. h264parse->aspect_ratio_idc = 0;
  172. h264parse->sar_width = 0;
  173. h264parse->sar_height = 0;
  174. h264parse->upstream_par_n = -1;
  175. h264parse->upstream_par_d = -1;
  176. gst_buffer_replace (&h264parse->codec_data, NULL);
  177. h264parse->nal_length_size = 4;
  178. h264parse->packetized = FALSE;
  179. h264parse->align = GST_H264_PARSE_ALIGN_NONE;
  180. h264parse->format = GST_H264_PARSE_FORMAT_NONE;
  181. h264parse->last_report = GST_CLOCK_TIME_NONE;
  182. h264parse->push_codec = FALSE;
  183. h264parse->dts = GST_CLOCK_TIME_NONE;
  184. h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
  185. h264parse->do_ts = TRUE;
  186. h264parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
  187. h264parse->force_key_unit_event = NULL;
  188. gst_h264_parse_reset_frame (h264parse);
  189. }
  190. static gboolean
  191. gst_h264_parse_start (GstBaseParse * parse)
  192. {
  193. GstH264Parse *h264parse = GST_H264_PARSE (parse);
  194. GST_DEBUG_OBJECT (parse, "start");
  195. gst_h264_parse_reset (h264parse);
  196. h264parse->nalparser = gst_h264_nal_parser_new ();
  197. h264parse->dts = GST_CLOCK_TIME_NONE;
  198. h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
  199. h264parse->sei_pic_struct_pres_flag = FALSE;
  200. h264parse->sei_pic_struct = 0;
  201. h264parse->field_pic_flag = 0;
  202. gst_base_parse_set_min_frame_size (parse, 6);
  203. return TRUE;
  204. }
  205. static gboolean
  206. gst_h264_parse_stop (GstBaseParse * parse)
  207. {
  208. guint i;
  209. GstH264Parse *h264parse = GST_H264_PARSE (parse);
  210. GST_DEBUG_OBJECT (parse, "stop");
  211. gst_h264_parse_reset (h264parse);
  212. for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++)
  213. gst_buffer_replace (&h264parse->sps_nals[i], NULL);
  214. for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++)
  215. gst_buffer_replace (&h264parse->pps_nals[i], NULL);
  216. gst_h264_nal_parser_free (h264parse->nalparser);
  217. return TRUE;
  218. }
  219. static const gchar *
  220. gst_h264_parse_get_string (GstH264Parse * parse, gboolean format, gint code)
  221. {
  222. if (format) {
  223. switch (code) {
  224. case GST_H264_PARSE_FORMAT_AVC:
  225. return "avc";
  226. case GST_H264_PARSE_FORMAT_BYTE:
  227. return "byte-stream";
  228. default:
  229. return "none";
  230. }
  231. } else {
  232. switch (code) {
  233. case GST_H264_PARSE_ALIGN_NAL:
  234. return "nal";
  235. case GST_H264_PARSE_ALIGN_AU:
  236. return "au";
  237. default:
  238. return "none";
  239. }
  240. }
  241. }
  242. static void
  243. gst_h264_parse_format_from_caps (GstCaps * caps, guint * format, guint * align)
  244. {
  245. g_return_if_fail (gst_caps_is_fixed (caps));
  246. GST_DEBUG ("parsing caps: %" GST_PTR_FORMAT, caps);
  247. if (format)
  248. *format = GST_H264_PARSE_FORMAT_NONE;
  249. if (align)
  250. *align = GST_H264_PARSE_ALIGN_NONE;
  251. if (caps && gst_caps_get_size (caps) > 0) {
  252. GstStructure *s = gst_caps_get_structure (caps, 0);
  253. const gchar *str = NULL;
  254. if (format) {
  255. if ((str = gst_structure_get_string (s, "stream-format"))) {
  256. if (strcmp (str, "avc") == 0)
  257. *format = GST_H264_PARSE_FORMAT_AVC;
  258. else if (strcmp (str, "byte-stream") == 0)
  259. *format = GST_H264_PARSE_FORMAT_BYTE;
  260. }
  261. }
  262. if (align) {
  263. if ((str = gst_structure_get_string (s, "alignment"))) {
  264. if (strcmp (str, "au") == 0)
  265. *align = GST_H264_PARSE_ALIGN_AU;
  266. else if (strcmp (str, "nal") == 0)
  267. *align = GST_H264_PARSE_ALIGN_NAL;
  268. }
  269. }
  270. }
  271. }
  272. /* check downstream caps to configure format and alignment */
  273. static void
  274. gst_h264_parse_negotiate (GstH264Parse * h264parse, GstCaps * in_caps)
  275. {
  276. GstCaps *caps;
  277. guint format = GST_H264_PARSE_FORMAT_NONE;
  278. guint align = GST_H264_PARSE_ALIGN_NONE;
  279. g_return_if_fail ((in_caps == NULL) || gst_caps_is_fixed (in_caps));
  280. caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
  281. GST_DEBUG_OBJECT (h264parse, "allowed caps: %" GST_PTR_FORMAT, caps);
  282. /* concentrate on leading structure, since decodebin2 parser
  283. * capsfilter always includes parser template caps */
  284. if (caps) {
  285. caps = gst_caps_make_writable (caps);
  286. gst_caps_truncate (caps);
  287. GST_DEBUG_OBJECT (h264parse, "negotiating with caps: %" GST_PTR_FORMAT,
  288. caps);
  289. }
  290. if (in_caps && caps) {
  291. if (gst_caps_can_intersect (in_caps, caps)) {
  292. GST_DEBUG_OBJECT (h264parse, "downstream accepts upstream caps");
  293. gst_h264_parse_format_from_caps (in_caps, &format, &align);
  294. gst_caps_unref (caps);
  295. caps = NULL;
  296. }
  297. }
  298. if (caps) {
  299. /* fixate to avoid ambiguity with lists when parsing */
  300. gst_pad_fixate_caps (GST_BASE_PARSE_SRC_PAD (h264parse), caps);
  301. gst_h264_parse_format_from_caps (caps, &format, &align);
  302. gst_caps_unref (caps);
  303. }
  304. /* default */
  305. if (!format)
  306. format = GST_H264_PARSE_FORMAT_BYTE;
  307. if (!align)
  308. align = GST_H264_PARSE_ALIGN_AU;
  309. GST_DEBUG_OBJECT (h264parse, "selected format %s, alignment %s",
  310. gst_h264_parse_get_string (h264parse, TRUE, format),
  311. gst_h264_parse_get_string (h264parse, FALSE, align));
  312. h264parse->format = format;
  313. h264parse->align = align;
  314. }
  315. static GstBuffer *
  316. gst_h264_parse_wrap_nal (GstH264Parse * h264parse, guint format, guint8 * data,
  317. guint size)
  318. {
  319. GstBuffer *buf;
  320. guint nl = h264parse->nal_length_size;
  321. GST_DEBUG_OBJECT (h264parse, "nal length %d", size);
  322. buf = gst_buffer_new_and_alloc (size + nl + 4);
  323. if (format == GST_H264_PARSE_FORMAT_AVC) {
  324. GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf), size << (32 - 8 * nl));
  325. } else {
  326. /* HACK: nl should always be 4 here, otherwise this won't work.
  327. * There are legit cases where nl in avc stream is 2, but byte-stream
  328. * SC is still always 4 bytes. */
  329. nl = 4;
  330. GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf), 1);
  331. }
  332. GST_BUFFER_SIZE (buf) = size + nl;
  333. memcpy (GST_BUFFER_DATA (buf) + nl, data, size);
  334. return buf;
  335. }
  336. static void
  337. gst_h264_parser_store_nal (GstH264Parse * h264parse, guint id,
  338. GstH264NalUnitType naltype, GstH264NalUnit * nalu)
  339. {
  340. GstBuffer *buf, **store;
  341. guint size = nalu->size, store_size;
  342. if (naltype == GST_H264_NAL_SPS) {
  343. store_size = GST_H264_MAX_SPS_COUNT;
  344. store = h264parse->sps_nals;
  345. GST_DEBUG_OBJECT (h264parse, "storing sps %u", id);
  346. } else if (naltype == GST_H264_NAL_PPS) {
  347. store_size = GST_H264_MAX_PPS_COUNT;
  348. store = h264parse->pps_nals;
  349. GST_DEBUG_OBJECT (h264parse, "storing pps %u", id);
  350. } else
  351. return;
  352. if (id >= store_size) {
  353. GST_DEBUG_OBJECT (h264parse, "unable to store nal, id out-of-range %d", id);
  354. return;
  355. }
  356. buf = gst_buffer_new_and_alloc (size);
  357. memcpy (GST_BUFFER_DATA (buf), nalu->data + nalu->offset, size);
  358. if (store[id])
  359. gst_buffer_unref (store[id]);
  360. store[id] = buf;
  361. }
  362. /* SPS/PPS/IDR considered key, all others DELTA;
  363. * so downstream waiting for keyframe can pick up at SPS/PPS/IDR */
  364. #define NAL_TYPE_IS_KEY(nt) (((nt) == 5) || ((nt) == 7) || ((nt) == 8))
  365. /* caller guarantees 2 bytes of nal payload */
  366. static void
  367. gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
  368. {
  369. guint nal_type;
  370. GstH264PPS pps;
  371. GstH264SPS sps;
  372. GstH264SEIMessage sei;
  373. GstH264NalParser *nalparser = h264parse->nalparser;
  374. /* nothing to do for broken input */
  375. if (G_UNLIKELY (nalu->size < 2)) {
  376. GST_DEBUG_OBJECT (h264parse, "not processing nal size %u", nalu->size);
  377. return;
  378. }
  379. /* we have a peek as well */
  380. nal_type = nalu->type;
  381. h264parse->keyframe |= NAL_TYPE_IS_KEY (nal_type);
  382. GST_DEBUG_OBJECT (h264parse, "processing nal of type %u, size %u",
  383. nal_type, nalu->size);
  384. switch (nal_type) {
  385. case GST_H264_NAL_SPS:
  386. gst_h264_parser_parse_sps (nalparser, nalu, &sps, TRUE);
  387. GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
  388. h264parse->update_caps = TRUE;
  389. /* found in stream, no need to forcibly push at start */
  390. h264parse->push_codec = FALSE;
  391. gst_h264_parser_store_nal (h264parse, sps.id, nal_type, nalu);
  392. break;
  393. case GST_H264_NAL_PPS:
  394. gst_h264_parser_parse_pps (nalparser, nalu, &pps);
  395. /* parameters might have changed, force caps check */
  396. GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
  397. h264parse->update_caps = TRUE;
  398. /* found in stream, no need to forcibly push at start */
  399. h264parse->push_codec = FALSE;
  400. gst_h264_parser_store_nal (h264parse, pps.id, nal_type, nalu);
  401. break;
  402. case GST_H264_NAL_SEI:
  403. gst_h264_parser_parse_sei (nalparser, nalu, &sei);
  404. switch (sei.payloadType) {
  405. case GST_H264_SEI_PIC_TIMING:
  406. h264parse->sei_pic_struct_pres_flag =
  407. sei.pic_timing.pic_struct_present_flag;
  408. h264parse->sei_cpb_removal_delay = sei.pic_timing.cpb_removal_delay;
  409. if (h264parse->sei_pic_struct_pres_flag)
  410. h264parse->sei_pic_struct = sei.pic_timing.pic_struct;
  411. break;
  412. case GST_H264_SEI_BUF_PERIOD:
  413. if (h264parse->ts_trn_nb == GST_CLOCK_TIME_NONE ||
  414. h264parse->dts == GST_CLOCK_TIME_NONE)
  415. h264parse->ts_trn_nb = 0;
  416. else
  417. h264parse->ts_trn_nb = h264parse->dts;
  418. GST_LOG_OBJECT (h264parse,
  419. "new buffering period; ts_trn_nb updated: %" GST_TIME_FORMAT,
  420. GST_TIME_ARGS (h264parse->ts_trn_nb));
  421. break;
  422. }
  423. /* mark SEI pos */
  424. if (h264parse->sei_pos == -1) {
  425. if (h264parse->format == GST_H264_PARSE_FORMAT_AVC)
  426. h264parse->sei_pos = gst_adapter_available (h264parse->frame_out);
  427. else
  428. h264parse->sei_pos = nalu->sc_offset;
  429. GST_DEBUG_OBJECT (h264parse, "marking SEI in frame at offset %d",
  430. h264parse->sei_pos);
  431. }
  432. break;
  433. case GST_H264_NAL_SLICE:
  434. case GST_H264_NAL_SLICE_DPA:
  435. case GST_H264_NAL_SLICE_DPB:
  436. case GST_H264_NAL_SLICE_DPC:
  437. case GST_H264_NAL_SLICE_IDR:
  438. /* don't need to parse the whole slice (header) here */
  439. if (*(nalu->data + nalu->offset + 1) & 0x80) {
  440. /* means first_mb_in_slice == 0 */
  441. /* real frame data */
  442. GST_DEBUG_OBJECT (h264parse, "first_mb_in_slice = 0");
  443. h264parse->frame_start = TRUE;
  444. }
  445. GST_DEBUG_OBJECT (h264parse, "frame start: %i", h264parse->frame_start);
  446. #ifndef GST_DISABLE_GST_DEBUG
  447. {
  448. GstH264SliceHdr slice;
  449. GstH264ParserResult pres;
  450. pres = gst_h264_parser_parse_slice_hdr (nalparser, nalu, &slice,
  451. FALSE, FALSE);
  452. GST_DEBUG_OBJECT (h264parse,
  453. "parse result %d, first MB: %u, slice type: %u",
  454. pres, slice.first_mb_in_slice, slice.type);
  455. }
  456. #endif
  457. if (G_LIKELY (nal_type != GST_H264_NAL_SLICE_IDR &&
  458. !h264parse->push_codec))
  459. break;
  460. /* if we need to sneak codec NALs into the stream,
  461. * this is a good place, so fake it as IDR
  462. * (which should be at start anyway) */
  463. /* mark where config needs to go if interval expired */
  464. /* mind replacement buffer if applicable */
  465. if (h264parse->idr_pos == -1) {
  466. if (h264parse->format == GST_H264_PARSE_FORMAT_AVC)
  467. h264parse->idr_pos = gst_adapter_available (h264parse->frame_out);
  468. else
  469. h264parse->idr_pos = nalu->sc_offset;
  470. GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
  471. h264parse->idr_pos);
  472. }
  473. /* if SEI preceeds (faked) IDR, then we have to insert config there */
  474. if (h264parse->sei_pos >= 0 && h264parse->idr_pos > h264parse->sei_pos) {
  475. h264parse->idr_pos = h264parse->sei_pos;
  476. GST_DEBUG_OBJECT (h264parse, "moved IDR mark to SEI position %d",
  477. h264parse->idr_pos);
  478. }
  479. break;
  480. default:
  481. gst_h264_parser_parse_nal (nalparser, nalu);
  482. }
  483. /* if AVC output needed, collect properly prefixed nal in adapter,
  484. * and use that to replace outgoing buffer data later on */
  485. if (h264parse->format == GST_H264_PARSE_FORMAT_AVC) {
  486. GstBuffer *buf;
  487. #ifdef GST_H264PARSE_MODIFICATION
  488. unsigned int length = 0;
  489. gboolean isnalu = FALSE;
  490. if (nalu->offset + nalu->size >= 3) {
  491. while (length < (nalu->offset + nalu->size - 3)) {
  492. if (nalu->data[length] == 0x00 &&
  493. nalu->data[length+1] == 0x00 &&
  494. nalu->data[length+2] == 0x01) {
  495. isnalu = TRUE;
  496. h264parse->format = GST_H264_PARSE_FORMAT_BYTE;
  497. GST_INFO_OBJECT(h264parse, "H264 format is NALU");
  498. break;
  499. } else {
  500. length++;
  501. }
  502. }
  503. }
  504. if (!isnalu)
  505. #endif
  506. {
  507. GST_INFO_OBJECT (h264parse, "collecting NAL in AVC frame");
  508. buf = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
  509. nalu->data + nalu->offset, nalu->size);
  510. gst_adapter_push (h264parse->frame_out, buf);
  511. }
  512. }
  513. }
  514. /* caller guarantees at least 2 bytes of nal payload for each nal
  515. * returns TRUE if next_nal indicates that nal terminates an AU */
  516. static inline gboolean
  517. gst_h264_parse_collect_nal (GstH264Parse * h264parse, const guint8 * data,
  518. guint size, GstH264NalUnit * nalu)
  519. {
  520. gboolean complete;
  521. GstH264ParserResult parse_res;
  522. GstH264NalUnitType nal_type = nalu->type;
  523. GstH264NalUnit nnalu;
  524. GST_DEBUG_OBJECT (h264parse, "parsing collected nal");
  525. parse_res = gst_h264_parser_identify_nalu (h264parse->nalparser, data,
  526. nalu->offset + nalu->size, size, &nnalu);
  527. if (parse_res == GST_H264_PARSER_ERROR)
  528. return FALSE;
  529. /* determine if AU complete */
  530. GST_LOG_OBJECT (h264parse, "nal type: %d", nal_type);
  531. /* coded slice NAL starts a picture,
  532. * i.e. other types become aggregated in front of it */
  533. h264parse->picture_start |= (nal_type == GST_H264_NAL_SLICE ||
  534. nal_type == GST_H264_NAL_SLICE_DPA || nal_type == GST_H264_NAL_SLICE_IDR);
  535. /* consider a coded slices (IDR or not) to start a picture,
  536. * (so ending the previous one) if first_mb_in_slice == 0
  537. * (non-0 is part of previous one) */
  538. /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4,
  539. * but in practice it works in sane cases, needs not much parsing,
  540. * and also works with broken frame_num in NAL
  541. * (where spec-wise would fail) */
  542. nal_type = nnalu.type;
  543. complete = h264parse->picture_start && (nal_type >= GST_H264_NAL_SEI &&
  544. nal_type <= GST_H264_NAL_AU_DELIMITER);
  545. GST_LOG_OBJECT (h264parse, "next nal type: %d", nal_type);
  546. complete |= h264parse->picture_start &&
  547. (nal_type == GST_H264_NAL_SLICE ||
  548. nal_type == GST_H264_NAL_SLICE_DPA ||
  549. nal_type == GST_H264_NAL_SLICE_IDR) &&
  550. /* first_mb_in_slice == 0 considered start of frame */
  551. (nnalu.data[nnalu.offset + 1] & 0x80);
  552. GST_LOG_OBJECT (h264parse, "au complete: %d", complete);
  553. return complete;
  554. }
  555. /* FIXME move into baseparse, or anything equivalent;
  556. * see https://bugzilla.gnome.org/show_bug.cgi?id=650093 */
  557. #define GST_BASE_PARSE_FRAME_FLAG_PARSING 0x10000
  558. static gboolean
  559. gst_h264_parse_check_valid_frame (GstBaseParse * parse,
  560. GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
  561. {
  562. GstH264Parse *h264parse = GST_H264_PARSE (parse);
  563. GstBuffer *buffer = frame->buffer;
  564. guint8 *data;
  565. guint size, current_off = 0;
  566. gboolean drain;
  567. GstH264NalParser *nalparser = h264parse->nalparser;
  568. GstH264NalUnit nalu;
  569. /* expect at least 3 bytes startcode == sc, and 2 bytes NALU payload */
  570. if (G_UNLIKELY (GST_BUFFER_SIZE (buffer) < 5))
  571. return FALSE;
  572. /* need to configure aggregation */
  573. if (G_UNLIKELY (h264parse->format == GST_H264_PARSE_FORMAT_NONE))
  574. gst_h264_parse_negotiate (h264parse, NULL);
  575. /* avoid stale cached parsing state */
  576. if (!(frame->flags & GST_BASE_PARSE_FRAME_FLAG_PARSING)) {
  577. GST_LOG_OBJECT (h264parse, "parsing new frame");
  578. gst_h264_parse_reset_frame (h264parse);
  579. frame->flags |= GST_BASE_PARSE_FRAME_FLAG_PARSING;
  580. } else {
  581. GST_LOG_OBJECT (h264parse, "resuming frame parsing");
  582. }
  583. data = GST_BUFFER_DATA (buffer);
  584. size = GST_BUFFER_SIZE (buffer);
  585. drain = FALSE;
  586. nalu = h264parse->nalu;
  587. current_off = h264parse->current_off;
  588. g_assert (current_off < size);
  589. GST_DEBUG_OBJECT (h264parse, "last parse position %u", current_off);
  590. while (TRUE) {
  591. GstH264ParserResult pres;
  592. if (h264parse->packetized_chunked)
  593. pres =
  594. gst_h264_parser_identify_nalu_unchecked (nalparser, data, current_off,
  595. size, &nalu);
  596. else
  597. pres =
  598. gst_h264_parser_identify_nalu (nalparser, data, current_off, size,
  599. &nalu);
  600. switch (pres) {
  601. case GST_H264_PARSER_OK:
  602. GST_DEBUG_OBJECT (h264parse, "complete nal found. "
  603. "current offset: %u, Nal offset: %u, Nal Size: %u",
  604. current_off, nalu.offset, nalu.size);
  605. GST_DEBUG_OBJECT (h264parse, "current off. %u",
  606. nalu.offset + nalu.size);
  607. if (!h264parse->nalu.size && !h264parse->nalu.valid)
  608. h264parse->nalu = nalu;
  609. /* need 2 bytes of next nal */
  610. if (!h264parse->packetized_chunked &&
  611. (nalu.offset + nalu.size + 4 + 2 > size)) {
  612. if (GST_BASE_PARSE_DRAINING (parse)) {
  613. drain = TRUE;
  614. } else {
  615. GST_DEBUG_OBJECT (h264parse, "need more bytes of next nal");
  616. current_off = nalu.sc_offset;
  617. goto more;
  618. }
  619. } else if (h264parse->packetized_chunked) {
  620. /* normal next nal based collection not possible,
  621. * _chain will have to tell us whether this was last one for AU */
  622. drain = h264parse->packetized_last;
  623. }
  624. break;
  625. case GST_H264_PARSER_BROKEN_LINK:
  626. return FALSE;
  627. case GST_H264_PARSER_ERROR:
  628. current_off = size - 3;
  629. goto parsing_error;
  630. case GST_H264_PARSER_NO_NAL:
  631. /* don't expect to have found any NAL so far */
  632. g_assert (h264parse->nalu.size == 0);
  633. current_off = h264parse->nalu.sc_offset = size - 3;
  634. goto more;
  635. case GST_H264_PARSER_BROKEN_DATA:
  636. GST_WARNING_OBJECT (h264parse, "input stream is corrupt; "
  637. "it contains a NAL unit of length %d", nalu.size);
  638. /* broken nal at start -> arrange to skip it,
  639. * otherwise have it terminate current au
  640. * (and so it will be skipped on next frame round) */
  641. if (nalu.sc_offset == h264parse->nalu.sc_offset) {
  642. *skipsize = nalu.offset;
  643. GST_DEBUG_OBJECT (h264parse, "skipping broken nal");
  644. goto invalid;
  645. } else {
  646. nalu.size = 0;
  647. goto end;
  648. }
  649. case GST_H264_PARSER_NO_NAL_END:
  650. GST_DEBUG_OBJECT (h264parse, "not a complete nal found at offset %u",
  651. nalu.offset);
  652. current_off = nalu.sc_offset;
  653. /* We keep the reference to this nal so we start over the parsing
  654. * here */
  655. if (!h264parse->nalu.size && !h264parse->nalu.valid)
  656. h264parse->nalu = nalu;
  657. if (GST_BASE_PARSE_DRAINING (parse)) {
  658. drain = TRUE;
  659. GST_DEBUG_OBJECT (h264parse, "draining NAL %u %u %u", size,
  660. h264parse->nalu.offset, h264parse->nalu.size);
  661. /* Can't parse the nalu */
  662. if (size - h264parse->nalu.offset < 2) {
  663. *skipsize = nalu.offset;
  664. goto invalid;
  665. }
  666. /* We parse it anyway */
  667. nalu.size = size - nalu.offset;
  668. break;
  669. }
  670. goto more;
  671. }
  672. current_off = nalu.offset + nalu.size;
  673. GST_DEBUG_OBJECT (h264parse, "%p complete nal found. Off: %u, Size: %u",
  674. data, nalu.offset, nalu.size);
  675. gst_h264_parse_process_nal (h264parse, &nalu);
  676. /* simulate no next nal if none needed */
  677. drain = drain || (h264parse->align == GST_H264_PARSE_ALIGN_NAL);
  678. /* In packetized mode we know there's only on NALU in each input packet,
  679. * but we may not have seen the whole AU already, possibly need more */
  680. if (h264parse->packetized_chunked) {
  681. if (drain)
  682. break;
  683. /* next NALU expected at end of current data */
  684. current_off = size;
  685. goto more;
  686. }
  687. /* if no next nal, we know it's complete here */
  688. if (drain || gst_h264_parse_collect_nal (h264parse, data, size, &nalu))
  689. break;
  690. GST_DEBUG_OBJECT (h264parse, "Looking for more");
  691. }
  692. end:
  693. *skipsize = h264parse->nalu.sc_offset;
  694. *framesize = nalu.offset + nalu.size - h264parse->nalu.sc_offset;
  695. h264parse->current_off = current_off;
  696. return TRUE;
  697. parsing_error:
  698. GST_DEBUG_OBJECT (h264parse, "error parsing Nal Unit");
  699. more:
  700. /* ask for best next available */
  701. *framesize = G_MAXUINT;
  702. if (!h264parse->nalu.size) {
  703. /* skip up to initial startcode */
  704. *skipsize = h264parse->nalu.sc_offset;
  705. /* but mind some stuff will have been skipped */
  706. g_assert (current_off >= *skipsize);
  707. current_off -= *skipsize;
  708. h264parse->nalu.sc_offset = 0;
  709. } else {
  710. *skipsize = 0;
  711. }
  712. /* Restart parsing from here next time */
  713. h264parse->current_off = current_off;
  714. return FALSE;
  715. invalid:
  716. gst_h264_parse_reset_frame (h264parse);
  717. return FALSE;
  718. }
  719. /* byte together avc codec data based on collected pps and sps so far */
  720. static GstBuffer *
  721. gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
  722. {
  723. GstBuffer *buf, *nal;
  724. gint i, sps_size = 0, pps_size = 0, num_sps = 0, num_pps = 0;
  725. guint8 profile_idc = 0, profile_comp = 0, level_idc = 0;
  726. gboolean found = FALSE;
  727. guint8 *data;
  728. /* only nal payload in stored nals */
  729. for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
  730. if ((nal = h264parse->sps_nals[i])) {
  731. num_sps++;
  732. /* size bytes also count */
  733. sps_size += GST_BUFFER_SIZE (nal) + 2;
  734. if (GST_BUFFER_SIZE (nal) >= 4) {
  735. found = TRUE;
  736. profile_idc = (GST_BUFFER_DATA (nal))[1];
  737. profile_comp = (GST_BUFFER_DATA (nal))[2];
  738. level_idc = (GST_BUFFER_DATA (nal))[3];
  739. }
  740. }
  741. }
  742. for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
  743. if ((nal = h264parse->pps_nals[i])) {
  744. num_pps++;
  745. /* size bytes also count */
  746. pps_size += GST_BUFFER_SIZE (nal) + 2;
  747. }
  748. }
  749. GST_DEBUG_OBJECT (h264parse,
  750. "constructing codec_data: num_sps=%d, num_pps=%d", num_sps, num_pps);
  751. if (!found || !num_pps)
  752. return NULL;
  753. buf = gst_buffer_new_and_alloc (5 + 1 + sps_size + 1 + pps_size);
  754. data = GST_BUFFER_DATA (buf);
  755. data[0] = 1; /* AVC Decoder Configuration Record ver. 1 */
  756. data[1] = profile_idc; /* profile_idc */
  757. data[2] = profile_comp; /* profile_compability */
  758. data[3] = level_idc; /* level_idc */
  759. #ifdef GST_H264PARSE_MODIFICATION
  760. data[4] = 0xfc | ( h264parse->nal_length_size - 1); /* nal_length_size_minus1 */
  761. #else
  762. data[4] = 0xfc | (4 - 1); /* nal_length_size_minus1 */
  763. #endif
  764. data[5] = 0xe0 | num_sps; /* number of SPSs */
  765. data += 6;
  766. for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
  767. if ((nal = h264parse->sps_nals[i])) {
  768. GST_WRITE_UINT16_BE (data, GST_BUFFER_SIZE (nal));
  769. memcpy (data + 2, GST_BUFFER_DATA (nal), GST_BUFFER_SIZE (nal));
  770. data += 2 + GST_BUFFER_SIZE (nal);
  771. }
  772. }
  773. data[0] = num_pps;
  774. data++;
  775. for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
  776. if ((nal = h264parse->pps_nals[i])) {
  777. GST_WRITE_UINT16_BE (data, GST_BUFFER_SIZE (nal));
  778. memcpy (data + 2, GST_BUFFER_DATA (nal), GST_BUFFER_SIZE (nal));
  779. data += 2 + GST_BUFFER_SIZE (nal);
  780. }
  781. }
  782. return buf;
  783. }
  784. static void
  785. gst_h264_parse_get_par (GstH264Parse * h264parse, gint * num, gint * den)
  786. {
  787. gint par_n, par_d;
  788. if (h264parse->upstream_par_n != -1 && h264parse->upstream_par_d != -1) {
  789. *num = h264parse->upstream_par_n;
  790. *den = h264parse->upstream_par_d;
  791. return;
  792. }
  793. par_n = par_d = 0;
  794. switch (h264parse->aspect_ratio_idc) {
  795. case 0:
  796. par_n = par_d = 0;
  797. break;
  798. case 1:
  799. par_n = 1;
  800. par_d = 1;
  801. break;
  802. case 2:
  803. par_n = 12;
  804. par_d = 11;
  805. break;
  806. case 3:
  807. par_n = 10;
  808. par_d = 11;
  809. break;
  810. case 4:
  811. par_n = 16;
  812. par_d = 11;
  813. break;
  814. case 5:
  815. par_n = 40;
  816. par_d = 33;
  817. break;
  818. case 6:
  819. par_n = 24;
  820. par_d = 11;
  821. break;
  822. case 7:
  823. par_n = 20;
  824. par_d = 11;
  825. break;
  826. case 8:
  827. par_n = 32;
  828. par_d = 11;
  829. break;
  830. case 9:
  831. par_n = 80;
  832. par_d = 33;
  833. break;
  834. case 10:
  835. par_n = 18;
  836. par_d = 11;
  837. break;
  838. case 11:
  839. par_n = 15;
  840. par_d = 11;
  841. break;
  842. case 12:
  843. par_n = 64;
  844. par_d = 33;
  845. break;
  846. case 13:
  847. par_n = 160;
  848. par_d = 99;
  849. break;
  850. case 14:
  851. par_n = 4;
  852. par_d = 3;
  853. break;
  854. case 15:
  855. par_n = 3;
  856. par_d = 2;
  857. break;
  858. case 16:
  859. par_n = 2;
  860. par_d = 1;
  861. break;
  862. case 255:
  863. par_n = h264parse->sar_width;
  864. par_d = h264parse->sar_height;
  865. break;
  866. default:
  867. par_n = par_d = 0;
  868. }
  869. *num = par_n;
  870. *den = par_d;
  871. }
  872. static void
  873. gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
  874. {
  875. GstH264SPS *sps;
  876. GstCaps *sink_caps;
  877. gboolean modified = FALSE;
  878. GstBuffer *buf = NULL;
  879. if (G_UNLIKELY (!GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (h264parse))))
  880. modified = TRUE;
  881. else if (G_UNLIKELY (!h264parse->update_caps))
  882. return;
  883. /* if this is being called from the first _setcaps call, caps on the sinkpad
  884. * aren't set yet and so they need to be passed as an argument */
  885. if (caps)
  886. sink_caps = caps;
  887. else
  888. sink_caps = GST_PAD_CAPS (GST_BASE_PARSE_SINK_PAD (h264parse));
  889. /* carry over input caps as much as possible; override with our own stuff */
  890. if (sink_caps)
  891. gst_caps_ref (sink_caps);
  892. else
  893. sink_caps = gst_caps_new_simple ("video/x-h264", NULL);
  894. sps = h264parse->nalparser->last_sps;
  895. GST_DEBUG_OBJECT (h264parse, "sps: %p", sps);
  896. /* only codec-data for nice-and-clean au aligned packetized avc format */
  897. if (h264parse->format == GST_H264_PARSE_FORMAT_AVC &&
  898. h264parse->align == GST_H264_PARSE_ALIGN_AU) {
  899. buf = gst_h264_parse_make_codec_data (h264parse);
  900. if (buf && h264parse->codec_data) {
  901. if (GST_BUFFER_SIZE (buf) != GST_BUFFER_SIZE (h264parse->codec_data) ||
  902. memcmp (GST_BUFFER_DATA (buf),
  903. GST_BUFFER_DATA (h264parse->codec_data), GST_BUFFER_SIZE (buf)))
  904. modified = TRUE;
  905. } else {
  906. if (h264parse->codec_data)
  907. buf = gst_buffer_ref (h264parse->codec_data);
  908. modified = TRUE;
  909. }
  910. }
  911. caps = NULL;
  912. if (G_UNLIKELY (!sps)) {
  913. caps = gst_caps_copy (sink_caps);
  914. } else {
  915. if (G_UNLIKELY (h264parse->width != sps->width ||
  916. h264parse->height != sps->height)) {
  917. GST_INFO_OBJECT (h264parse, "resolution changed %dx%d",
  918. sps->width, sps->height);
  919. h264parse->width = sps->width;
  920. h264parse->height = sps->height;
  921. modified = TRUE;
  922. }
  923. /* 0/1 is set as the default in the codec parser */
  924. if (sps->vui_parameters.timing_info_present_flag &&
  925. !(sps->fps_num == 0 && sps->fps_den == 1)) {
  926. if (G_UNLIKELY (h264parse->fps_num != sps->fps_num
  927. || h264parse->fps_den != sps->fps_den)) {
  928. GST_INFO_OBJECT (h264parse, "framerate changed %d/%d",
  929. sps->fps_num, sps->fps_den);
  930. h264parse->fps_num = sps->fps_num;
  931. h264parse->fps_den = sps->fps_den;
  932. gst_base_parse_set_frame_rate (GST_BASE_PARSE (h264parse),
  933. h264parse->fps_num, h264parse->fps_den, 0, 0);
  934. modified = TRUE;
  935. }
  936. }
  937. if (sps->vui_parameters.aspect_ratio_info_present_flag) {
  938. if (G_UNLIKELY (h264parse->aspect_ratio_idc !=
  939. sps->vui_parameters.aspect_ratio_idc)) {
  940. h264parse->aspect_ratio_idc = sps->vui_parameters.aspect_ratio_idc;
  941. GST_INFO_OBJECT (h264parse, "aspect ratio idc changed %d",
  942. h264parse->aspect_ratio_idc);
  943. modified = TRUE;
  944. }
  945. /* 255 means sar_width and sar_height present */
  946. if (G_UNLIKELY (sps->vui_parameters.aspect_ratio_idc == 255 &&
  947. (h264parse->sar_width != sps->vui_parameters.sar_width ||
  948. h264parse->sar_height != sps->vui_parameters.sar_height))) {
  949. h264parse->sar_width = sps->vui_parameters.sar_width;
  950. h264parse->sar_height = sps->vui_parameters.sar_height;
  951. GST_INFO_OBJECT (h264parse, "aspect ratio SAR changed %d/%d",
  952. h264parse->sar_width, h264parse->sar_height);
  953. modified = TRUE;
  954. }
  955. }
  956. if (G_UNLIKELY (modified)) {
  957. caps = gst_caps_copy (sink_caps);
  958. /* sps should give this */
  959. gst_caps_set_simple (caps, "width", G_TYPE_INT, sps->width,
  960. "height", G_TYPE_INT, sps->height, NULL);
  961. /* but not necessarily or reliably this */
  962. if (h264parse->fps_num > 0 && h264parse->fps_den > 0)
  963. gst_caps_set_simple (caps, "framerate",
  964. GST_TYPE_FRACTION, h264parse->fps_num, h264parse->fps_den, NULL);
  965. }
  966. }
  967. if (caps) {
  968. gint par_n, par_d;
  969. gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE,
  970. "stream-format", G_TYPE_STRING,
  971. gst_h264_parse_get_string (h264parse, TRUE, h264parse->format),
  972. "alignment", G_TYPE_STRING,
  973. gst_h264_parse_get_string (h264parse, FALSE, h264parse->align), NULL);
  974. gst_h264_parse_get_par (h264parse, &par_n, &par_d);
  975. if (par_n != 0 && par_d != 0) {
  976. GST_INFO_OBJECT (h264parse, "PAR %d/%d", par_n, par_d);
  977. gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
  978. par_n, par_d, NULL);
  979. }
  980. if (buf) {
  981. gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
  982. gst_buffer_replace (&h264parse->codec_data, buf);
  983. gst_buffer_unref (buf);
  984. buf = NULL;
  985. } else {
  986. GstStructure *s;
  987. /* remove any left-over codec-data hanging around */
  988. s = gst_caps_get_structure (caps, 0);
  989. gst_structure_remove_field (s, "codec_data");
  990. }
  991. gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (h264parse), caps);
  992. gst_caps_unref (caps);
  993. }
  994. gst_caps_unref (sink_caps);
  995. if (buf)
  996. gst_buffer_unref (buf);
  997. }
  998. static void
  999. gst_h264_parse_get_timestamp (GstH264Parse * h264parse,
  1000. GstClockTime * out_ts, GstClockTime * out_dur, gboolean frame)
  1001. {
  1002. GstH264SPS *sps = h264parse->nalparser->last_sps;
  1003. GstClockTime upstream;
  1004. gint duration = 1;
  1005. g_return_if_fail (out_dur != NULL);
  1006. g_return_if_fail (out_ts != NULL);
  1007. upstream = *out_ts;
  1008. if (!frame) {
  1009. GST_LOG_OBJECT (h264parse, "no frame data -> 0 duration");
  1010. *out_dur = 0;
  1011. goto exit;
  1012. } else {
  1013. *out_ts = upstream;
  1014. }
  1015. if (!sps) {
  1016. GST_DEBUG_OBJECT (h264parse, "referred SPS invalid");
  1017. goto exit;
  1018. } else if (!sps->vui_parameters.timing_info_present_flag) {
  1019. GST_DEBUG_OBJECT (h264parse,
  1020. "unable to compute timestamp: timing info not present");
  1021. goto exit;
  1022. } else if (sps->vui_parameters.time_scale == 0) {
  1023. GST_DEBUG_OBJECT (h264parse,
  1024. "unable to compute timestamp: time_scale = 0 "
  1025. "(this is forbidden in spec; bitstream probably contains error)");
  1026. goto exit;
  1027. }
  1028. if (h264parse->sei_pic_struct_pres_flag &&
  1029. h264parse->sei_pic_struct != (guint8) - 1) {
  1030. /* Note that when h264parse->sei_pic_struct == -1 (unspecified), there
  1031. * are ways to infer its value. This is related to computing the
  1032. * TopFieldOrderCnt and BottomFieldOrderCnt, which looks
  1033. * complicated and thus not implemented for the time being. Yet
  1034. * the value we have here is correct for many applications
  1035. */
  1036. switch (h264parse->sei_pic_struct) {
  1037. case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
  1038. case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
  1039. duration = 1;
  1040. break;
  1041. case GST_H264_SEI_PIC_STRUCT_FRAME:
  1042. case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
  1043. case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
  1044. duration = 2;
  1045. break;
  1046. case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
  1047. case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
  1048. duration = 3;
  1049. break;
  1050. case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
  1051. duration = 4;
  1052. break;
  1053. case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
  1054. duration = 6;
  1055. break;
  1056. default:
  1057. GST_DEBUG_OBJECT (h264parse,
  1058. "h264parse->sei_pic_struct of unknown value %d. Not parsed",
  1059. h264parse->sei_pic_struct);
  1060. break;
  1061. }
  1062. } else {
  1063. duration = h264parse->field_pic_flag ? 1 : 2;
  1064. }
  1065. GST_LOG_OBJECT (h264parse, "frame tick duration %d", duration);
  1066. /*
  1067. * h264parse.264 C.1.2 Timing of coded picture removal (equivalent to DTS):
  1068. * Tr,n(0) = initial_cpb_removal_delay[ SchedSelIdx ] / 90000
  1069. * Tr,n(n) = Tr,n(nb) + Tc * cpb_removal_delay(n)
  1070. * where
  1071. * Tc = num_units_in_tick / time_scale
  1072. */
  1073. if (h264parse->ts_trn_nb != GST_CLOCK_TIME_NONE) {
  1074. GST_LOG_OBJECT (h264parse, "buffering based ts");
  1075. /* buffering period is present */
  1076. if (upstream != GST_CLOCK_TIME_NONE) {
  1077. /* If upstream timestamp is valid, we respect it and adjust current
  1078. * reference point */
  1079. h264parse->ts_trn_nb = upstream -
  1080. (GstClockTime) gst_util_uint64_scale_int
  1081. (h264parse->sei_cpb_removal_delay * GST_SECOND,
  1082. sps->vui_parameters.num_units_in_tick,
  1083. sps->vui_parameters.time_scale);
  1084. } else {
  1085. /* If no upstream timestamp is given, we write in new timestamp */
  1086. upstream = h264parse->dts = h264parse->ts_trn_nb +
  1087. (GstClockTime) gst_util_uint64_scale_int
  1088. (h264parse->sei_cpb_removal_delay * GST_SECOND,
  1089. sps->vui_parameters.num_units_in_tick,
  1090. sps->vui_parameters.time_scale);
  1091. }
  1092. } else {
  1093. GstClockTime dur;
  1094. GST_LOG_OBJECT (h264parse, "duration based ts");
  1095. /* naive method: no removal delay specified
  1096. * track upstream timestamp and provide best guess frame duration */
  1097. dur = gst_util_uint64_scale_int (duration * GST_SECOND,
  1098. sps->vui_parameters.num_units_in_tick, sps->vui_parameters.time_scale);
  1099. /* sanity check */
  1100. if (dur < GST_MSECOND) {
  1101. GST_DEBUG_OBJECT (h264parse, "discarding dur %" GST_TIME_FORMAT,
  1102. GST_TIME_ARGS (dur));
  1103. } else {
  1104. *out_dur = dur;
  1105. }
  1106. }
  1107. exit:
  1108. if (GST_CLOCK_TIME_IS_VALID (upstream))
  1109. *out_ts = h264parse->dts = upstream;
  1110. if (GST_CLOCK_TIME_IS_VALID (*out_dur) &&
  1111. GST_CLOCK_TIME_IS_VALID (h264parse->dts))
  1112. h264parse->dts += *out_dur;
  1113. }
  1114. static GstFlowReturn
  1115. gst_h264_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
  1116. {
  1117. GstH264Parse *h264parse;
  1118. GstBuffer *buffer;
  1119. guint av;
  1120. h264parse = GST_H264_PARSE (parse);
  1121. buffer = frame->buffer;
  1122. gst_h264_parse_update_src_caps (h264parse, NULL);
  1123. /* don't mess with timestamps if provided by upstream,
  1124. * particularly since our ts not that good they handle seeking etc */
  1125. if (h264parse->do_ts)
  1126. gst_h264_parse_get_timestamp (h264parse,
  1127. &GST_BUFFER_TIMESTAMP (buffer), &GST_BUFFER_DURATION (buffer),
  1128. h264parse->frame_start);
  1129. if (h264parse->keyframe)
  1130. GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  1131. else
  1132. GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  1133. /* replace with transformed AVC output if applicable */
  1134. av = gst_adapter_available (h264parse->frame_out);
  1135. if (av) {
  1136. GstBuffer *buf;
  1137. buf = gst_adapter_take_buffer (h264parse->frame_out, av);
  1138. gst_buffer_copy_metadata (buf, buffer, GST_BUFFER_COPY_ALL);
  1139. gst_buffer_replace (&frame->buffer, buf);
  1140. gst_buffer_unref (buf);
  1141. }
  1142. return GST_FLOW_OK;
  1143. }
  1144. /* sends a codec NAL downstream, decorating and transforming as needed.
  1145. * No ownership is taken of @nal */
  1146. static GstFlowReturn
  1147. gst_h264_parse_push_codec_buffer (GstH264Parse * h264parse, GstBuffer * nal,
  1148. GstClockTime ts)
  1149. {
  1150. nal = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
  1151. GST_BUFFER_DATA (nal), GST_BUFFER_SIZE (nal));
  1152. GST_BUFFER_TIMESTAMP (nal) = ts;
  1153. GST_BUFFER_DURATION (nal) = 0;
  1154. gst_buffer_set_caps (nal, GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (h264parse)));
  1155. return gst_pad_push (GST_BASE_PARSE_SRC_PAD (h264parse), nal);
  1156. }
  1157. static GstEvent *
  1158. check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment,
  1159. GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts)
  1160. {
  1161. GstClockTime running_time, stream_time;
  1162. gboolean all_headers;
  1163. guint count;
  1164. GstEvent *event = NULL;
  1165. g_return_val_if_fail (segment != NULL, NULL);
  1166. if (pending_event == NULL)
  1167. goto out;
  1168. if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
  1169. timestamp == GST_CLOCK_TIME_NONE)
  1170. goto out;
  1171. running_time = gst_segment_to_running_time (segment,
  1172. GST_FORMAT_TIME, timestamp);
  1173. GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
  1174. GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
  1175. if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
  1176. running_time < pending_key_unit_ts)
  1177. goto out;
  1178. #if 0
  1179. if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
  1180. GST_DEBUG ("pending force key unit, waiting for keyframe");
  1181. goto out;
  1182. }
  1183. #endif
  1184. stream_time = gst_segment_to_stream_time (segment,
  1185. GST_FORMAT_TIME, timestamp);
  1186. gst_video_event_parse_upstream_force_key_unit (pending_event,
  1187. NULL, &all_headers, &count);
  1188. event =
  1189. gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
  1190. running_time, all_headers, count);
  1191. gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
  1192. out:
  1193. return event;
  1194. }
  1195. static void
  1196. gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
  1197. {
  1198. GstClockTime running_time;
  1199. guint count;
  1200. gboolean have_sps, have_pps;
  1201. gint i;
  1202. parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
  1203. gst_event_replace (&parse->force_key_unit_event, NULL);
  1204. gst_video_event_parse_downstream_force_key_unit (event,
  1205. NULL, NULL, &running_time, NULL, &count);
  1206. GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
  1207. "%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
  1208. GST_TIME_ARGS (running_time), count);
  1209. gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
  1210. have_sps = have_pps = FALSE;
  1211. for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
  1212. if (parse->sps_nals[i] != NULL) {
  1213. have_sps = TRUE;
  1214. break;
  1215. }
  1216. }
  1217. for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
  1218. if (parse->pps_nals[i] != NULL) {
  1219. have_pps = TRUE;
  1220. break;
  1221. }
  1222. }
  1223. GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
  1224. have_sps, have_pps);
  1225. /* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
  1226. parse->push_codec = TRUE;
  1227. }
  1228. static GstFlowReturn
  1229. gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
  1230. {
  1231. GstH264Parse *h264parse;
  1232. GstBuffer *buffer;
  1233. GstEvent *event;
  1234. h264parse = GST_H264_PARSE (parse);
  1235. buffer = frame->buffer;
  1236. if ((event = check_pending_key_unit_event (h264parse->force_key_unit_event,
  1237. &parse->segment, GST_BUFFER_TIMESTAMP (buffer),
  1238. GST_BUFFER_FLAGS (buffer), h264parse->pending_key_unit_ts))) {
  1239. gst_h264_parse_prepare_key_unit (h264parse, event);
  1240. }
  1241. /* periodic SPS/PPS sending */
  1242. if (h264parse->interval > 0 || h264parse->push_codec) {
  1243. GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
  1244. guint64 diff;
  1245. /* init */
  1246. if (!GST_CLOCK_TIME_IS_VALID (h264parse->last_report)) {
  1247. h264parse->last_report = timestamp;
  1248. }
  1249. if (h264parse->idr_pos >= 0) {
  1250. GST_LOG_OBJECT (h264parse, "IDR nal at offset %d", h264parse->idr_pos);
  1251. if (timestamp > h264parse->last_report)
  1252. diff = timestamp - h264parse->last_report;
  1253. else
  1254. diff = 0;
  1255. GST_LOG_OBJECT (h264parse,
  1256. "now %" GST_TIME_FORMAT ", last SPS/PPS %" GST_TIME_FORMAT,
  1257. GST_TIME_ARGS (timestamp), GST_TIME_ARGS (h264parse->last_report));
  1258. GST_DEBUG_OBJECT (h264parse,
  1259. "interval since last SPS/PPS %" GST_TIME_FORMAT,
  1260. GST_TIME_ARGS (diff));
  1261. if (GST_TIME_AS_SECONDS (diff) >= h264parse->interval ||
  1262. h264parse->push_codec) {
  1263. GstBuffer *codec_nal;
  1264. gint i;
  1265. GstClockTime new_ts;
  1266. /* avoid overwriting a perfectly fine timestamp */
  1267. new_ts = GST_CLOCK_TIME_IS_VALID (timestamp) ? timestamp :
  1268. h264parse->last_report;
  1269. if (h264parse->align == GST_H264_PARSE_ALIGN_NAL) {
  1270. /* send separate config NAL buffers */
  1271. GST_DEBUG_OBJECT (h264parse, "- sending SPS/PPS");
  1272. for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
  1273. if ((codec_nal = h264parse->sps_nals[i])) {
  1274. GST_DEBUG_OBJECT (h264parse, "sending SPS nal");
  1275. gst_h264_parse_push_codec_buffer (h264parse, codec_nal,
  1276. timestamp);
  1277. h264parse->last_report = new_ts;
  1278. }
  1279. }
  1280. for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
  1281. if ((codec_nal = h264parse->pps_nals[i])) {
  1282. GST_DEBUG_OBJECT (h264parse, "sending PPS nal");
  1283. gst_h264_parse_push_codec_buffer (h264parse, codec_nal,
  1284. timestamp);
  1285. h264parse->last_report = new_ts;
  1286. }
  1287. }
  1288. } else {
  1289. /* insert config NALs into AU */
  1290. GstByteWriter bw;
  1291. GstBuffer *new_buf;
  1292. const gboolean bs = h264parse->format == GST_H264_PARSE_FORMAT_BYTE;
  1293. gst_byte_writer_init_with_size (&bw, GST_BUFFER_SIZE (buffer), FALSE);
  1294. gst_byte_writer_put_data (&bw, GST_BUFFER_DATA (buffer),
  1295. h264parse->idr_pos);
  1296. GST_DEBUG_OBJECT (h264parse, "- inserting SPS/PPS");
  1297. for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
  1298. if ((codec_nal = h264parse->sps_nals[i])) {
  1299. GST_DEBUG_OBJECT (h264parse, "inserting SPS nal");
  1300. gst_byte_writer_put_uint32_be (&bw,
  1301. bs ? 1 : GST_BUFFER_SIZE (codec_nal));
  1302. gst_byte_writer_put_data (&bw, GST_BUFFER_DATA (codec_nal),
  1303. GST_BUFFER_SIZE (codec_nal));
  1304. h264parse->last_report = new_ts;
  1305. }
  1306. }
  1307. for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
  1308. if ((codec_nal = h264parse->pps_nals[i])) {
  1309. GST_DEBUG_OBJECT (h264parse, "inserting PPS nal");
  1310. gst_byte_writer_put_uint32_be (&bw,
  1311. bs ? 1 : GST_BUFFER_SIZE (codec_nal));
  1312. gst_byte_writer_put_data (&bw, GST_BUFFER_DATA (codec_nal),
  1313. GST_BUFFER_SIZE (codec_nal));
  1314. h264parse->last_report = new_ts;
  1315. }
  1316. }
  1317. gst_byte_writer_put_data (&bw,
  1318. GST_BUFFER_DATA (buffer) + h264parse->idr_pos,
  1319. GST_BUFFER_SIZE (buffer) - h264parse->idr_pos);
  1320. /* collect result and push */
  1321. new_buf = gst_byte_writer_reset_and_get_buffer (&bw);
  1322. gst_buffer_copy_metadata (new_buf, buffer, GST_BUFFER_COPY_ALL);
  1323. /* should already be keyframe/IDR, but it may not have been,
  1324. * so mark it as such to avoid being discarded by picky decoder */
  1325. GST_BUFFER_FLAG_UNSET (new_buf, GST_BUFFER_FLAG_DELTA_UNIT);
  1326. gst_buffer_replace (&frame->buffer, new_buf);
  1327. gst_buffer_unref (new_buf);
  1328. }
  1329. }
  1330. /* we pu…