/Play/nal.js

http://github.com/mbebenita/Broadway · JavaScript · 648 lines · 451 code · 81 blank · 116 comment · 131 complexity · a50e9c99b47a5afae897393b470f8415 MD5 · raw file

  1. var Video = (function() {
  2. function constructor() {
  3. }
  4. return constructor;
  5. })();
  6. /**
  7. * Represents a video decoder capturing all of its internal state.
  8. */
  9. var Decoder = (function() {
  10. function constructor() {
  11. this.SequenceParameterSets = [];
  12. this.PictureParameterSets = [];
  13. this.Video = new Video();
  14. }
  15. constructor.prototype = {
  16. decode : function(ptr) {
  17. var reader = new AnnexBNALUnitReader(ptr);
  18. var i = 0;
  19. do {
  20. var nal = reader.readNALUnit();
  21. if (nal != null) {
  22. traceln("+ NAL Unit " + (i++));
  23. var unit = nal.createUnit();
  24. traceln("| nal_size: " + (nal.rbsp.length + 1));
  25. traceln("| nal_type: " + nal.nal_type);
  26. traceln("| nal_ref_idc: " + nal.nal_ref_idc);
  27. if (unit != null) {
  28. unit.decode(new Bitstream(nal.rbsp));
  29. // println(unit.toString());
  30. }
  31. }
  32. } while (nal != null);
  33. }
  34. };
  35. return constructor;
  36. })();
  37. /*
  38. * Represents an Annex B (B) byte stream that encodes NAL Units. In the Annex B
  39. * byte stream NAL Units are prefixed by a 3 byte start code prefix. The actual
  40. * NAL Unit payload data is interleaved with 'emulation prevention' bytes.
  41. */
  42. var AnnexBNALUnitReader = (function () {
  43. var ptr = null;
  44. function constructor(ptr) {
  45. this.ptr = ptr;
  46. }
  47. constructor.prototype = {
  48. readNALUnit : function() {
  49. if (this.ptr == null) {
  50. return null;
  51. }
  52. var res = this.findNextAnnexBNALUnit(this.ptr);
  53. this.ptr = res.next;
  54. return new NALUnit(res.nal);
  55. },
  56. /*
  57. * Finds the next NAL unit from an Annex B byte stream.
  58. */
  59. findNextAnnexBNALUnit : function findNextAnnexBNALUnit(ptr) {
  60. var i = 0, size = ptr.length;
  61. /* look for start_code_prefix */
  62. while (ptr[i] == 0 && i < size) {
  63. i += 1;
  64. }
  65. if (i >= size) {
  66. error("cannot find any start_code_prefix");
  67. } else if (ptr[i] != 0x1) {
  68. /* start_code_prefix is not at the beginning, continue */
  69. i = -1;
  70. }
  71. i++;
  72. var j = i, start = i, end = start, foundStartCode = 0;
  73. while (!foundStartCode) {
  74. /* see 2 consecutive zero bytes */
  75. while ((j + 1 < size) && (ptr[j] != 0 || ptr[j + 1] != 0)) {
  76. j += 1;
  77. }
  78. end = j; /* stop and check for start code */
  79. /* keep reading for zero byte */
  80. while (j + 2 < size && ptr[j + 2] == 0) {
  81. j += 1;
  82. }
  83. if (j + 2 >= size) {
  84. size -= start;
  85. return {
  86. start : start,
  87. nal : ptr.subarray(start, start + size),
  88. next : null
  89. };
  90. }
  91. if (ptr[j + 2] == 0x1) {
  92. foundStartCode = 1;
  93. } else {
  94. /* could be emulation code 0x3 */
  95. j += 2; /* continue the search */
  96. }
  97. }
  98. size = end - start;
  99. return {
  100. start : start,
  101. nal : ptr.subarray(start, start + size),
  102. next : ptr.subarray(end)
  103. };
  104. }
  105. };
  106. return constructor;
  107. })();
  108. /**
  109. * Indicates the type of the NAL Unit. The decoder needs to know how slices are coded. Some information can change from
  110. * slice to slice and can be encoded in each slice. Other information such as coding parameters, picture format, size,
  111. * type of entropy coder, bit rate, etc. does not change as often and does not need to be retransmitted as often.
  112. *
  113. * Combinations of coding parameters are stored on both the encoder and decoder side in various tables.
  114. *
  115. * There are two parameter sets: Picture Parameter Set (PPS) contains information about the slices of one or more
  116. * pictures. Sequence Parameter Set (SPS) contains information about the sequence.
  117. *
  118. * Instantaneous Decoding Refresh (IDR) picture contains only slices with I and SI slice types. IDR pictures invalidate
  119. * all reference pictures in the buffer prior to itself and can thus confine drifting errors. IDR pictures are always
  120. * the first in a sequence of pictures.
  121. *
  122. * Baseline Profile and Main Profile does not include NAL Unit Types: 2, 3, 4.
  123. */
  124. NALU_TYPE = {
  125. SLICE : 1,
  126. DPA : 2,
  127. DPB : 3,
  128. DPC : 4,
  129. IDR : 5,
  130. SEI : 6,
  131. /**
  132. * Sequence Parameter Set
  133. */
  134. SPS : 7,
  135. /**
  136. * Picture Parameter Set
  137. */
  138. PPS : 8,
  139. /**
  140. * Access Unit Delimiter
  141. */
  142. AUD : 9,
  143. EOSEQ : 10,
  144. EOSTREAM : 11,
  145. FILL : 12,
  146. PREFIX : 14,
  147. SUB_SPS : 15,
  148. SLC_EXT : 20,
  149. VDRD : 24
  150. };
  151. /**
  152. * Indicates the importance of the NAL Unit for the reconstruction process. The higher the value the more important the
  153. * NAL Unit. For instance, a value of 0 indicates that the NAL Unit is not used as a reference by any other units can be
  154. * safely DISCARDED.
  155. */
  156. NALU_REF_IDC = {
  157. NALU_PRIORITY_HIGHEST : 3,
  158. NALU_PRIORITY_HIGH : 2,
  159. NALU_PRIORITY_LOW : 1,
  160. NALU_PRIORITY_DISPOSABLE : 0
  161. };
  162. SLICE_TYPE = {
  163. /**
  164. * Intra prediction (I) and/or prediction from one reference per macroblock partition (P).
  165. */
  166. P_SLICE : 0,
  167. /**
  168. * Intra prediction (I), prediction from one reference frame (P) or biprediction from two references (B).
  169. */
  170. B_SLICE : 1,
  171. /** Intra prediction only. */
  172. I_SLICE : 2,
  173. SP_SLICE : 3,
  174. SI_SLICE : 4,
  175. P_ALL_SLICE : 5,
  176. B_ALL_SLICE : 6,
  177. I_ALL_SLICE : 7,
  178. SP_ALL_SLICE : 8,
  179. SI_ALL_SLICE : 9
  180. };
  181. /**
  182. * Represents a NAL (Network Abstraction Layer) Unit
  183. *
  184. * NAL Unit Header Format:
  185. *
  186. * forbidden_zero_bit (F): Usually set to 0 at source, set to 1 to indicate
  187. * errors in the NAL Unit.
  188. *
  189. * nal_ref_idc (NRI): Indicates the importance of the NAL Unit, from 0 (low) to
  190. * 3 (high).
  191. *
  192. * nal_unit_type (TYPE): Indicates the type of the NAL Unit. Although this field
  193. * encodes 32 possible values, only 12 are used by H.264.
  194. *
  195. * Payload: A buffer that contains an encapsulated byte sequence payload (EBSP)
  196. * which needs to be decoded to a raw byte sequence payload (RBSP) before
  197. * further processing.
  198. *
  199. * <1> <-2-> <-----5-----> <--------- ? --------->
  200. * +---+-----+-------------+-----------------------+ | F | NRI | TYPE | Payload |
  201. * +---+-----+-------------+-----------------------+
  202. */
  203. var NALUnit = (function () {
  204. function constructor(ptr) {
  205. var forbidden_zero_bit;
  206. if (ptr.length == 0) {
  207. unexpected();
  208. } else {
  209. forbidden_zero_bit = ptr[0] >> 7;
  210. if (forbidden_zero_bit != 0) {
  211. unexpected();
  212. }
  213. this.nal_ref_idc = (ptr[0] & 0x60) >> 5;
  214. this.nal_type = ptr[0] & 0x1F;
  215. this.rbsp = ptr.subarray(1, 1 + convertEBSPToRBSP(ptr.subarray(1)));
  216. return;
  217. }
  218. }
  219. /**
  220. * Converts an encapsulated byte sequence payload (EBSP) to a raw byte sequence payload (RBSP).
  221. */
  222. function convertEBSPToRBSP(ptr) {
  223. var j = 0;
  224. var count = 0;
  225. var size = ptr.length;
  226. for (var i = 0; i < size; i++) {
  227. var x = ptr[i];
  228. if (count == 2 && x == 0x03) {
  229. i += 1;
  230. count = 0;
  231. }
  232. ptr[j++] = x;
  233. if (x == 0x00) {
  234. count += 1;
  235. } else {
  236. count = 0;
  237. }
  238. }
  239. return j;
  240. }
  241. constructor.prototype = {
  242. toString : function toString() {
  243. return getProperties(this);
  244. },
  245. createUnit : function() {
  246. switch (this.nal_type) {
  247. case NALU_TYPE.SPS:
  248. return new SPS();
  249. case NALU_TYPE.PPS:
  250. return new PPS();
  251. case NALU_TYPE.SLICE:
  252. case NALU_TYPE.IDR:
  253. return new Slice(this.nal_type);
  254. default:
  255. return null;
  256. // unexpected();
  257. }
  258. }
  259. };
  260. return constructor;
  261. })();
  262. var Slice = (function() {
  263. function constructor(nal_unit_type) {
  264. this.nal_unit_type = nal_unit_type;
  265. this.header = new SliceHeader();
  266. this.data = new SliceData(this.header);
  267. }
  268. constructor.prototype.decode = function (stream) {
  269. var header = this.header;
  270. var video = decoder.video;
  271. header.decode(this.nal_unit_type, stream);
  272. if (this.nal_unit_type == NALU_TYPE.IDR) {
  273. // video.prevFrameNumber = 0;
  274. }
  275. // this.data.decode(stream);
  276. };
  277. constructor.prototype.toString = function () {
  278. return "Slice: " + getProperties(this, true);
  279. };
  280. return constructor;
  281. })();
  282. /**
  283. * Represents a Slice's Data
  284. */
  285. var SliceData = (function() {
  286. function constructor(header) {
  287. this.header = header;
  288. }
  289. constructor.prototype.decode = function (stream) {
  290. };
  291. constructor.prototype.toString = function () {
  292. return "Slice Data: " + getProperties(this, true);
  293. };
  294. return constructor;
  295. })();
  296. /**
  297. * Represents a Slice Header
  298. *
  299. * Clause 7.4.3
  300. */
  301. var SliceHeader = (function() {
  302. function constructor() { }
  303. constructor.prototype.decode = function (nal_unit_type, stream) {
  304. var video = decoder.Video;
  305. traceln("| + Slice Header");
  306. this.first_mb_in_slice = stream.uev();
  307. traceln("| | first_mb_in_slice: " + this.first_mb_in_slice);
  308. this.slice_type = stream.uev();
  309. traceln("| | slice_type: " + this.slice_type);
  310. if (this.first_mb_in_slice != 0) {
  311. notImplemented();
  312. }
  313. this.pic_parameter_set_id = stream.uev();
  314. traceln("| | pic_parameter_set_id: " + this.pic_parameter_set_id);
  315. assertRange(this.pic_parameter_set_id, 0, 255);
  316. var currentPPS = video.CurrentPPS = decoder.PictureParameterSets[this.pic_parameter_set_id];
  317. if (currentPPS == null) {
  318. unexpected("pic_parameter_set_id: " + this.pic_parameter_set_id);
  319. }
  320. var currentSPS = video.CurrentSPS = decoder.SequenceParameterSets[currentPPS.seq_parameter_set_id];
  321. if (currentSPS == null) {
  322. unexpected();
  323. }
  324. if (video.CurrentPPS !== currentPPS) {
  325. video.CurrentPPS = currentPPS;
  326. // notImplemented("currentPPS.seq_parameter_set_id: " + currentPPS.seq_parameter_set_id +
  327. // " != decoder.Video.seq_parameter_set_id: " + decoder.Video.seq_parameter_set_id);
  328. }
  329. /* derived variables from SPS */
  330. video.MaxFrameNum = 1 << (currentSPS.log2_max_frame_num_minus4 + 4);
  331. // MC_OPTIMIZE
  332. video.PicWidthInMbs = currentSPS.pic_width_in_mbs_minus1 + 1;
  333. video.PicWidthInSamplesL = video.PicWidthInMbs * 16;
  334. video.PicWidthInSamplesC = video.PicWidthInMbs * 8;
  335. video.PicHeightInMapUnits = currentSPS.pic_height_in_map_units_minus1 + 1;
  336. video.PicSizeInMapUnits = video.PicWidthInMbs * video.PicHeightInMapUnits;
  337. video.FrameHeightInMbs = (2 - currentSPS.frame_mbs_only_flag) * video.PicHeightInMapUnits;
  338. /* derived from PPS */
  339. video.SliceGroupChangeRate = currentPPS.slice_group_change_rate_minus1 + 1;
  340. this.frame_num = stream.readBits(currentSPS.log2_max_frame_num_minus4 + 4);
  341. traceln("| | frame_num: " + this.frame_num);
  342. /* Book 5.3.4, if the frame_mbs_only_flag is set to zero, special coding of fields or interlaced video
  343. * is enabled. */
  344. if (!currentSPS.frame_mbs_only_flag) {
  345. /* Clause 7.4.3, a field_pic_flag set to zero indicates the slice is a coded frame, otherwise it's
  346. * a coded field. We don't support interlaced video. */
  347. this.field_pic_flag = stream.readBit();
  348. traceln("| | field_pic_flag: " + this.field_pic_flag);
  349. assertFalse (this.field_pic_flag);
  350. }
  351. /* derived variables from slice header */
  352. video.PicHeightInMbs = video.FrameHeightInMbs;
  353. video.PicHeightInSamplesL = video.PicHeightInMbs * 16;
  354. video.PicHeightInSamplesC = video.PicHeightInMbs * 8;
  355. video.PicSizeInMbs = video.PicWidthInMbs * video.PicHeightInMbs;
  356. if (this.first_mb_in_slice >= video.PicSizeInMbs) {
  357. unexpected();
  358. }
  359. video.MaxPicNum = video.MaxFrameNum;
  360. video.CurrPicNum = this.frame_num;
  361. if (nal_unit_type == NALU_TYPE.IDR) {
  362. if (this.frame_num != 0) {
  363. unexpected();
  364. }
  365. this.idr_pic_id = stream.uev();
  366. traceln("| | idr_pic_id: " + this.idr_pic_id);
  367. }
  368. this.delta_pic_order_cnt_bottom = 0; /* default value */
  369. this.delta_pic_order_cnt = [0, 0];
  370. if (currentSPS.pic_order_cnt_type == 0) {
  371. this.pic_order_cnt_lsb = stream.readBits(currentSPS.log2_max_pic_order_cnt_lsb_minus4 + 4);
  372. traceln("| | pic_order_cnt_lsb: " + this.pic_order_cnt_lsb);
  373. video.MaxPicOrderCntLsb = 1 << (currentSPS.log2_max_pic_order_cnt_lsb_minus4 + 4);
  374. if (this.pic_order_cnt_lsb > video.MaxPicOrderCntLsb - 1) {
  375. unexpected();
  376. }
  377. if (currentPPS.pic_order_present_flag) {
  378. notImplemented();
  379. this.delta_pic_order_cnt_bottom = stream.sev32();
  380. traceln("| | delta_pic_order_cnt_bottom: " + this.delta_pic_order_cnt_bottom);
  381. }
  382. }
  383. if (currentSPS.pic_order_cnt_type == 1 && !currentSPS.delta_pic_order_always_zero_flag) {
  384. this.delta_pic_order_cnt[0] = stream.sev32();
  385. traceln("| | delta_pic_order_cnt[0]: " + this.delta_pic_order_cnt[0]);
  386. if (currentPPS.pic_order_present_flag) {
  387. this.delta_pic_order_cnt[1] = stream.sev32();
  388. traceln("| | delta_pic_order_cnt[1]: " + this.delta_pic_order_cnt[1]);
  389. }
  390. }
  391. this.redundant_pic_cnt = 0; /* default value */
  392. if (currentPPS.redundant_pic_cnt_present_flag) {
  393. // MC_CHECK
  394. this.redundant_pic_cnt = stream.uev();
  395. if (this.redundant_pic_cnt > 127) /* out of range */
  396. unexpected();
  397. if (this.redundant_pic_cnt > 0) /* redundant picture */
  398. unexpected(); /* not supported */
  399. }
  400. this.num_ref_idx_l0_active_minus1 = currentPPS.num_ref_idx_l0_active_minus1;
  401. this.num_ref_idx_l1_active_minus1 = currentPPS.num_ref_idx_l1_active_minus1;
  402. if (this.slice_type == SLICE_TYPE.P_SLICE) {
  403. this.num_ref_idx_active_override_flag = stream.readBit();
  404. traceln("| | num_ref_idx_active_override_flag: " + this.num_ref_idx_active_override_flag);
  405. if (this.num_ref_idx_active_override_flag) {
  406. this.num_ref_idx_l0_active_minus1 = stream.uev();
  407. traceln("| | num_ref_idx_l0_active_minus1: " + this.num_ref_idx_l0_active_minus1);
  408. } else {
  409. /* the following condition is not allowed if the flag is zero */
  410. if ((slice_type == SLICE_TYPE.P_SLICE) && currentPPS.num_ref_idx_l0_active_minus1 > 15) {
  411. unexpected(); /* not allowed */
  412. }
  413. }
  414. }
  415. if (this.num_ref_idx_l0_active_minus1 > 15 || this.num_ref_idx_l1_active_minus1 > 15) {
  416. unexpected(); /* not allowed */
  417. }
  418. /* if MbaffFrameFlag =1,
  419. max value of index is num_ref_idx_l0_active_minus1 for frame MBs and
  420. 2*this.num_ref_idx_l0_active_minus1 + 1 for field MBs */
  421. this.ref_pic_list_reordering(video, stream);
  422. if (video.nal_ref_idc) {
  423. this.dec_ref_pic_marking(video, stream, this);
  424. }
  425. this.slice_qp_delta = stream.sev();
  426. traceln("| | slice_qp_delta: " + this.slice_qp_delta);
  427. video.QPy = 26 + currentPPS.pic_init_qp_minus26 + this.slice_qp_delta;
  428. if (video.QPy > 51 || video.QPy < 0) {
  429. video.QPy = clip(0, 51, video.QPy);
  430. }
  431. video.QPc = mapQPi2QPc[clip(0, 51, video.QPy + video.CurrentPPS.chroma_qp_index_offset)];
  432. video.QPy_div_6 = (video.QPy * 43) >>> 8;
  433. video.QPy_mod_6 = video.QPy - 6 * video.QPy_div_6;
  434. video.QPc_div_6 = (video.QPc * 43) >>> 8;
  435. video.QPc_mod_6 = video.QPc - 6 * video.QPc_div_6;
  436. this.slice_alpha_c0_offset_div2 = 0;
  437. this.slice_beta_offset_div_2 = 0;
  438. this.disable_deblocking_filter_idc = 0;
  439. video.FilterOffsetA = video.FilterOffsetB = 0;
  440. if (currentPPS.deblocking_filter_control_present_flag) {
  441. this.disable_deblocking_filter_idc = stream.uev();
  442. traceln("| | disable_deblocking_filter_idc: " + this.disable_deblocking_filter_idc);
  443. if (this.disable_deblocking_filter_idc > 2) {
  444. unexpected(); /* out of range */
  445. }
  446. if (this.disable_deblocking_filter_idc != 1) {
  447. this.slice_alpha_c0_offset_div2 = stream.sev();
  448. traceln("| | slice_alpha_c0_offset_div2: " + this.slice_alpha_c0_offset_div2);
  449. if (this.slice_alpha_c0_offset_div2 < -6 || this.slice_alpha_c0_offset_div2 > 6) {
  450. unexpected();
  451. }
  452. video.FilterOffsetA = this.slice_alpha_c0_offset_div2 << 1;
  453. this.slice_beta_offset_div_2 = stream.sev();
  454. traceln("| | slice_beta_offset_div_2: " + this.slice_beta_offset_div_2);
  455. if (this.slice_beta_offset_div_2 < -6 || this.slice_beta_offset_div_2 > 6) {
  456. unexpected();
  457. }
  458. video.FilterOffsetB = this.slice_beta_offset_div_2 << 1;
  459. }
  460. }
  461. if (currentPPS.num_slice_groups_minus1 > 0 &&
  462. currentPPS.slice_group_map_type >= 3 &&
  463. currentPPS.slice_group_map_type <= 5) {
  464. /* Ceil(Log2(PicSizeInMapUnits/(float)SliceGroupChangeRate + 1)) */
  465. temp = video.PicSizeInMapUnits / video.SliceGroupChangeRate;
  466. if (video.PicSizeInMapUnits % video.SliceGroupChangeRate) {
  467. temp++;
  468. }
  469. i = 0;
  470. temp++;
  471. while (temp)
  472. {
  473. temp >>= 1;
  474. i++;
  475. }
  476. this.slice_group_change_cycle = stream.readBits(i);
  477. traceln("| | slice_group_change_cycle: " + this.slice_group_change_cycle);
  478. video.MapUnitsInSliceGroup0 = min(this.slice_group_change_cycle * video.SliceGroupChangeRate, video.PicSizeInMapUnits);
  479. }
  480. };
  481. /**
  482. * Book 5.3.3.2
  483. * Clause 7.3.3.1
  484. *
  485. * The reference picture list order can be change for the current slice only using this command. The
  486. * ref_pic_list_reordering_flag indicates that such an operation should occur.
  487. */
  488. constructor.prototype.ref_pic_list_reordering = function(video, stream) {
  489. if (this.slice_type != SLICE_TYPE.I_SLICE) {
  490. this.ref_pic_list_reordering_flag_l0 = stream.readBit();
  491. traceln("| | ref_pic_list_reordering_flag_l0: " + this.ref_pic_list_reordering_flag_l0);
  492. if (this.ref_pic_list_reordering_flag_l0) {
  493. traceln("| | + Reference Picture List Reordering Commands");
  494. var i = 0;
  495. this.reordering_of_pic_nums_idc_l0 = [];
  496. this.abs_diff_pic_num_minus1_l0 = [];
  497. do {
  498. var res = this.reordering_of_pic_nums_idc_l0[i] = stream.uev();
  499. if (res == 0 || res == 1) {
  500. this.abs_diff_pic_num_minus1_l0[i] = stream.uev();
  501. traceln("| | abs_diff_pic_num_minus1_l0[" + i + "]: " + this.abs_diff_pic_num_minus1_l0[i]);
  502. assertFalse (res == 0 && this.abs_diff_pic_num_minus1_l0[i] > video.MaxPicNum / 2 - 1);
  503. assertFalse (res == 1 && this.abs_diff_pic_num_minus1_l0[i] > video.MaxPicNum / 2 - 2);
  504. } else if (res == 2) {
  505. this.long_term_pic_num_l0[i] = stream.uev();
  506. traceln("| | long_term_pic_num_l0[" + i + "]: " + this.long_term_pic_num_l0[i]);
  507. }
  508. i++;
  509. } while (this.reordering_of_pic_nums_idc_l0[i - 1] != 3 && i <= this.num_ref_idx_l0_active_minus1 + 1);
  510. }
  511. }
  512. };
  513. /**
  514. * Clause 7.4.3.3
  515. */
  516. constructor.prototype.dec_ref_pic_marking = function(video, stream) {
  517. traceln("| | + dec_ref_pic_marking");
  518. if (video.nal_unit_type == NALU_TYPE.IDR) {
  519. this.no_output_of_prior_pics_flag = stream.readBit();
  520. traceln("| | | no_output_of_prior_pics_flag: " + this.no_output_of_prior_pics_flag);
  521. this.long_term_reference_flag = stream.readBit();
  522. traceln("| | | long_term_reference_flag: " + this.long_term_reference_flag);
  523. if (this.long_term_reference_flag == 0) {
  524. video.MaxLongTermFrameIdx = -1;
  525. } else {
  526. video.MaxLongTermFrameIdx = 0;
  527. video.LongTermFrameIdx = 0;
  528. }
  529. } else {
  530. this.adaptive_ref_pic_marking_mode_flag = stream.readBit();
  531. traceln("| | | adaptive_ref_pic_marking_mode_flag: " + this.adaptive_ref_pic_marking_mode_flag);
  532. if (this.adaptive_ref_pic_marking_mode_flag) {
  533. this.memory_management_control_operation = [];
  534. this.difference_of_pic_nums_minus1 = [];
  535. this.long_term_pic_num = [];
  536. this.max_long_term_frame_idx_plus1 = [];
  537. var i = 0;
  538. do {
  539. var res = this.memory_management_control_operation[i] = stream.uev();
  540. traceln("| | | memory_management_control_operation[" + i + "]: " + this.memory_management_control_operation[i]);
  541. if (res == 1 || res == 3) {
  542. this.difference_of_pic_nums_minus1[i] = stream.uev();
  543. traceln("| | | difference_of_pic_nums_minus1[" + i + "]: " + this.difference_of_pic_nums_minus1[i]);
  544. }
  545. if (res == 2) {
  546. this.long_term_pic_num[i] = stream.uev();
  547. traceln("| | | long_term_pic_num[" + i + "]: " + this.long_term_pic_num[i]);
  548. }
  549. if (res == 3 || res == 6) {
  550. this.long_term_frame_idx[i] = stream.uev();
  551. traceln("| | | long_term_frame_idx[" + i + "]: " + this.long_term_frame_idx[i]);
  552. }
  553. if (res == 4) {
  554. this.max_long_term_frame_idx_plus1[i] = stream.uev();
  555. traceln("| | | max_long_term_frame_idx_plus1[" + i + "]: " + this.max_long_term_frame_idx_plus1[i]);
  556. }
  557. i++;
  558. } while (this.memory_management_control_operation[i - 1] != 0 && i < MAX_DEC_REF_PIC_MARKING);
  559. assertFalse(i >= MAX_DEC_REF_PIC_MARKING);
  560. }
  561. }
  562. };
  563. constructor.prototype.toString = function () {
  564. return "Slice Header: " + getProperties(this, true);
  565. };
  566. return constructor;
  567. })();