PageRenderTime 62ms CodeModel.GetById 23ms RepoModel.GetById 0ms app.codeStats 1ms

/media/libvpx/vp8/encoder/bitstream.c

http://github.com/zpao/v8monkey
C | 1913 lines | 1465 code | 363 blank | 85 comment | 217 complexity | 14b3a542b5051c3d79e37447ee865635 MD5 | raw file
Possible License(s): MPL-2.0-no-copyleft-exception, LGPL-3.0, AGPL-1.0, LGPL-2.1, BSD-3-Clause, GPL-2.0, JSON, Apache-2.0, 0BSD
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include "vp8/common/header.h"
  11. #include "encodemv.h"
  12. #include "vp8/common/entropymode.h"
  13. #include "vp8/common/findnearmv.h"
  14. #include "mcomp.h"
  15. #include "vp8/common/systemdependent.h"
  16. #include <assert.h>
  17. #include <stdio.h>
  18. #include <limits.h>
  19. #include "vp8/common/pragmas.h"
  20. #include "vpx/vpx_encoder.h"
  21. #include "vpx_mem/vpx_mem.h"
  22. #include "bitstream.h"
  23. #include "vp8/common/defaultcoefcounts.h"
  24. const int vp8cx_base_skip_false_prob[128] =
  25. {
  26. 255, 255, 255, 255, 255, 255, 255, 255,
  27. 255, 255, 255, 255, 255, 255, 255, 255,
  28. 255, 255, 255, 255, 255, 255, 255, 255,
  29. 255, 255, 255, 255, 255, 255, 255, 255,
  30. 255, 255, 255, 255, 255, 255, 255, 255,
  31. 255, 255, 255, 255, 255, 255, 255, 255,
  32. 255, 255, 255, 255, 255, 255, 255, 255,
  33. 251, 248, 244, 240, 236, 232, 229, 225,
  34. 221, 217, 213, 208, 204, 199, 194, 190,
  35. 187, 183, 179, 175, 172, 168, 164, 160,
  36. 157, 153, 149, 145, 142, 138, 134, 130,
  37. 127, 124, 120, 117, 114, 110, 107, 104,
  38. 101, 98, 95, 92, 89, 86, 83, 80,
  39. 77, 74, 71, 68, 65, 62, 59, 56,
  40. 53, 50, 47, 44, 41, 38, 35, 32,
  41. 30, 28, 26, 24, 22, 20, 18, 16,
  42. };
  43. #if defined(SECTIONBITS_OUTPUT)
  44. unsigned __int64 Sectionbits[500];
  45. #endif
  46. #ifdef ENTROPY_STATS
  47. int intra_mode_stats[10][10][10];
  48. static unsigned int tree_update_hist [BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [ENTROPY_NODES] [2];
  49. extern unsigned int active_section;
  50. #endif
  51. #ifdef MODE_STATS
  52. int count_mb_seg[4] = { 0, 0, 0, 0 };
  53. #endif
  54. static void update_mode(
  55. vp8_writer *const w,
  56. int n,
  57. vp8_token tok [/* n */],
  58. vp8_tree tree,
  59. vp8_prob Pnew [/* n-1 */],
  60. vp8_prob Pcur [/* n-1 */],
  61. unsigned int bct [/* n-1 */] [2],
  62. const unsigned int num_events[/* n */]
  63. )
  64. {
  65. unsigned int new_b = 0, old_b = 0;
  66. int i = 0;
  67. vp8_tree_probs_from_distribution(
  68. n--, tok, tree,
  69. Pnew, bct, num_events,
  70. 256, 1
  71. );
  72. do
  73. {
  74. new_b += vp8_cost_branch(bct[i], Pnew[i]);
  75. old_b += vp8_cost_branch(bct[i], Pcur[i]);
  76. }
  77. while (++i < n);
  78. if (new_b + (n << 8) < old_b)
  79. {
  80. int i = 0;
  81. vp8_write_bit(w, 1);
  82. do
  83. {
  84. const vp8_prob p = Pnew[i];
  85. vp8_write_literal(w, Pcur[i] = p ? p : 1, 8);
  86. }
  87. while (++i < n);
  88. }
  89. else
  90. vp8_write_bit(w, 0);
  91. }
  92. static void update_mbintra_mode_probs(VP8_COMP *cpi)
  93. {
  94. VP8_COMMON *const x = & cpi->common;
  95. vp8_writer *const w = & cpi->bc;
  96. {
  97. vp8_prob Pnew [VP8_YMODES-1];
  98. unsigned int bct [VP8_YMODES-1] [2];
  99. update_mode(
  100. w, VP8_YMODES, vp8_ymode_encodings, vp8_ymode_tree,
  101. Pnew, x->fc.ymode_prob, bct, (unsigned int *)cpi->ymode_count
  102. );
  103. }
  104. {
  105. vp8_prob Pnew [VP8_UV_MODES-1];
  106. unsigned int bct [VP8_UV_MODES-1] [2];
  107. update_mode(
  108. w, VP8_UV_MODES, vp8_uv_mode_encodings, vp8_uv_mode_tree,
  109. Pnew, x->fc.uv_mode_prob, bct, (unsigned int *)cpi->uv_mode_count
  110. );
  111. }
  112. }
  113. static void write_ymode(vp8_writer *bc, int m, const vp8_prob *p)
  114. {
  115. vp8_write_token(bc, vp8_ymode_tree, p, vp8_ymode_encodings + m);
  116. }
  117. static void kfwrite_ymode(vp8_writer *bc, int m, const vp8_prob *p)
  118. {
  119. vp8_write_token(bc, vp8_kf_ymode_tree, p, vp8_kf_ymode_encodings + m);
  120. }
  121. static void write_uv_mode(vp8_writer *bc, int m, const vp8_prob *p)
  122. {
  123. vp8_write_token(bc, vp8_uv_mode_tree, p, vp8_uv_mode_encodings + m);
  124. }
  125. static void write_bmode(vp8_writer *bc, int m, const vp8_prob *p)
  126. {
  127. vp8_write_token(bc, vp8_bmode_tree, p, vp8_bmode_encodings + m);
  128. }
  129. static void write_split(vp8_writer *bc, int x)
  130. {
  131. vp8_write_token(
  132. bc, vp8_mbsplit_tree, vp8_mbsplit_probs, vp8_mbsplit_encodings + x
  133. );
  134. }
  135. static void pack_tokens_c(vp8_writer *w, const TOKENEXTRA *p, int xcount)
  136. {
  137. const TOKENEXTRA *const stop = p + xcount;
  138. unsigned int split;
  139. unsigned int shift;
  140. int count = w->count;
  141. unsigned int range = w->range;
  142. unsigned int lowvalue = w->lowvalue;
  143. while (p < stop)
  144. {
  145. const int t = p->Token;
  146. vp8_token *const a = vp8_coef_encodings + t;
  147. const vp8_extra_bit_struct *const b = vp8_extra_bits + t;
  148. int i = 0;
  149. const unsigned char *pp = p->context_tree;
  150. int v = a->value;
  151. int n = a->Len;
  152. if (p->skip_eob_node)
  153. {
  154. n--;
  155. i = 2;
  156. }
  157. do
  158. {
  159. const int bb = (v >> --n) & 1;
  160. split = 1 + (((range - 1) * pp[i>>1]) >> 8);
  161. i = vp8_coef_tree[i+bb];
  162. if (bb)
  163. {
  164. lowvalue += split;
  165. range = range - split;
  166. }
  167. else
  168. {
  169. range = split;
  170. }
  171. shift = vp8_norm[range];
  172. range <<= shift;
  173. count += shift;
  174. if (count >= 0)
  175. {
  176. int offset = shift - count;
  177. if ((lowvalue << (offset - 1)) & 0x80000000)
  178. {
  179. int x = w->pos - 1;
  180. while (x >= 0 && w->buffer[x] == 0xff)
  181. {
  182. w->buffer[x] = (unsigned char)0;
  183. x--;
  184. }
  185. w->buffer[x] += 1;
  186. }
  187. w->buffer[w->pos++] = (lowvalue >> (24 - offset));
  188. lowvalue <<= offset;
  189. shift = count;
  190. lowvalue &= 0xffffff;
  191. count -= 8 ;
  192. }
  193. lowvalue <<= shift;
  194. }
  195. while (n);
  196. if (b->base_val)
  197. {
  198. const int e = p->Extra, L = b->Len;
  199. if (L)
  200. {
  201. const unsigned char *pp = b->prob;
  202. int v = e >> 1;
  203. int n = L; /* number of bits in v, assumed nonzero */
  204. int i = 0;
  205. do
  206. {
  207. const int bb = (v >> --n) & 1;
  208. split = 1 + (((range - 1) * pp[i>>1]) >> 8);
  209. i = b->tree[i+bb];
  210. if (bb)
  211. {
  212. lowvalue += split;
  213. range = range - split;
  214. }
  215. else
  216. {
  217. range = split;
  218. }
  219. shift = vp8_norm[range];
  220. range <<= shift;
  221. count += shift;
  222. if (count >= 0)
  223. {
  224. int offset = shift - count;
  225. if ((lowvalue << (offset - 1)) & 0x80000000)
  226. {
  227. int x = w->pos - 1;
  228. while (x >= 0 && w->buffer[x] == 0xff)
  229. {
  230. w->buffer[x] = (unsigned char)0;
  231. x--;
  232. }
  233. w->buffer[x] += 1;
  234. }
  235. w->buffer[w->pos++] = (lowvalue >> (24 - offset));
  236. lowvalue <<= offset;
  237. shift = count;
  238. lowvalue &= 0xffffff;
  239. count -= 8 ;
  240. }
  241. lowvalue <<= shift;
  242. }
  243. while (n);
  244. }
  245. {
  246. split = (range + 1) >> 1;
  247. if (e & 1)
  248. {
  249. lowvalue += split;
  250. range = range - split;
  251. }
  252. else
  253. {
  254. range = split;
  255. }
  256. range <<= 1;
  257. if ((lowvalue & 0x80000000))
  258. {
  259. int x = w->pos - 1;
  260. while (x >= 0 && w->buffer[x] == 0xff)
  261. {
  262. w->buffer[x] = (unsigned char)0;
  263. x--;
  264. }
  265. w->buffer[x] += 1;
  266. }
  267. lowvalue <<= 1;
  268. if (!++count)
  269. {
  270. count = -8;
  271. w->buffer[w->pos++] = (lowvalue >> 24);
  272. lowvalue &= 0xffffff;
  273. }
  274. }
  275. }
  276. ++p;
  277. }
  278. w->count = count;
  279. w->lowvalue = lowvalue;
  280. w->range = range;
  281. }
  282. static void write_partition_size(unsigned char *cx_data, int size)
  283. {
  284. signed char csize;
  285. csize = size & 0xff;
  286. *cx_data = csize;
  287. csize = (size >> 8) & 0xff;
  288. *(cx_data + 1) = csize;
  289. csize = (size >> 16) & 0xff;
  290. *(cx_data + 2) = csize;
  291. }
  292. static void pack_tokens_into_partitions_c(VP8_COMP *cpi, unsigned char *cx_data, int num_part, int *size)
  293. {
  294. int i;
  295. unsigned char *ptr = cx_data;
  296. unsigned int shift;
  297. vp8_writer *w = &cpi->bc2;
  298. *size = 3 * (num_part - 1);
  299. cpi->partition_sz[0] += *size;
  300. ptr = cx_data + (*size);
  301. for (i = 0; i < num_part; i++)
  302. {
  303. vp8_start_encode(w, ptr);
  304. {
  305. unsigned int split;
  306. int count = w->count;
  307. unsigned int range = w->range;
  308. unsigned int lowvalue = w->lowvalue;
  309. int mb_row;
  310. for (mb_row = i; mb_row < cpi->common.mb_rows; mb_row += num_part)
  311. {
  312. TOKENEXTRA *p = cpi->tplist[mb_row].start;
  313. TOKENEXTRA *stop = cpi->tplist[mb_row].stop;
  314. while (p < stop)
  315. {
  316. const int t = p->Token;
  317. vp8_token *const a = vp8_coef_encodings + t;
  318. const vp8_extra_bit_struct *const b = vp8_extra_bits + t;
  319. int i = 0;
  320. const unsigned char *pp = p->context_tree;
  321. int v = a->value;
  322. int n = a->Len;
  323. if (p->skip_eob_node)
  324. {
  325. n--;
  326. i = 2;
  327. }
  328. do
  329. {
  330. const int bb = (v >> --n) & 1;
  331. split = 1 + (((range - 1) * pp[i>>1]) >> 8);
  332. i = vp8_coef_tree[i+bb];
  333. if (bb)
  334. {
  335. lowvalue += split;
  336. range = range - split;
  337. }
  338. else
  339. {
  340. range = split;
  341. }
  342. shift = vp8_norm[range];
  343. range <<= shift;
  344. count += shift;
  345. if (count >= 0)
  346. {
  347. int offset = shift - count;
  348. if ((lowvalue << (offset - 1)) & 0x80000000)
  349. {
  350. int x = w->pos - 1;
  351. while (x >= 0 && w->buffer[x] == 0xff)
  352. {
  353. w->buffer[x] = (unsigned char)0;
  354. x--;
  355. }
  356. w->buffer[x] += 1;
  357. }
  358. w->buffer[w->pos++] = (lowvalue >> (24 - offset));
  359. lowvalue <<= offset;
  360. shift = count;
  361. lowvalue &= 0xffffff;
  362. count -= 8 ;
  363. }
  364. lowvalue <<= shift;
  365. }
  366. while (n);
  367. if (b->base_val)
  368. {
  369. const int e = p->Extra, L = b->Len;
  370. if (L)
  371. {
  372. const unsigned char *pp = b->prob;
  373. int v = e >> 1;
  374. int n = L; /* number of bits in v, assumed nonzero */
  375. int i = 0;
  376. do
  377. {
  378. const int bb = (v >> --n) & 1;
  379. split = 1 + (((range - 1) * pp[i>>1]) >> 8);
  380. i = b->tree[i+bb];
  381. if (bb)
  382. {
  383. lowvalue += split;
  384. range = range - split;
  385. }
  386. else
  387. {
  388. range = split;
  389. }
  390. shift = vp8_norm[range];
  391. range <<= shift;
  392. count += shift;
  393. if (count >= 0)
  394. {
  395. int offset = shift - count;
  396. if ((lowvalue << (offset - 1)) & 0x80000000)
  397. {
  398. int x = w->pos - 1;
  399. while (x >= 0 && w->buffer[x] == 0xff)
  400. {
  401. w->buffer[x] = (unsigned char)0;
  402. x--;
  403. }
  404. w->buffer[x] += 1;
  405. }
  406. w->buffer[w->pos++] = (lowvalue >> (24 - offset));
  407. lowvalue <<= offset;
  408. shift = count;
  409. lowvalue &= 0xffffff;
  410. count -= 8 ;
  411. }
  412. lowvalue <<= shift;
  413. }
  414. while (n);
  415. }
  416. {
  417. split = (range + 1) >> 1;
  418. if (e & 1)
  419. {
  420. lowvalue += split;
  421. range = range - split;
  422. }
  423. else
  424. {
  425. range = split;
  426. }
  427. range <<= 1;
  428. if ((lowvalue & 0x80000000))
  429. {
  430. int x = w->pos - 1;
  431. while (x >= 0 && w->buffer[x] == 0xff)
  432. {
  433. w->buffer[x] = (unsigned char)0;
  434. x--;
  435. }
  436. w->buffer[x] += 1;
  437. }
  438. lowvalue <<= 1;
  439. if (!++count)
  440. {
  441. count = -8;
  442. w->buffer[w->pos++] = (lowvalue >> 24);
  443. lowvalue &= 0xffffff;
  444. }
  445. }
  446. }
  447. ++p;
  448. }
  449. }
  450. w->count = count;
  451. w->lowvalue = lowvalue;
  452. w->range = range;
  453. }
  454. vp8_stop_encode(w);
  455. *size += w->pos;
  456. /* The first partition size is set earlier */
  457. cpi->partition_sz[i + 1] = w->pos;
  458. if (i < (num_part - 1))
  459. {
  460. write_partition_size(cx_data, w->pos);
  461. cx_data += 3;
  462. ptr += w->pos;
  463. }
  464. }
  465. }
  466. static void pack_mb_row_tokens_c(VP8_COMP *cpi, vp8_writer *w)
  467. {
  468. unsigned int split;
  469. int count = w->count;
  470. unsigned int range = w->range;
  471. unsigned int lowvalue = w->lowvalue;
  472. unsigned int shift;
  473. int mb_row;
  474. for (mb_row = 0; mb_row < cpi->common.mb_rows; mb_row++)
  475. {
  476. TOKENEXTRA *p = cpi->tplist[mb_row].start;
  477. TOKENEXTRA *stop = cpi->tplist[mb_row].stop;
  478. while (p < stop)
  479. {
  480. const int t = p->Token;
  481. vp8_token *const a = vp8_coef_encodings + t;
  482. const vp8_extra_bit_struct *const b = vp8_extra_bits + t;
  483. int i = 0;
  484. const unsigned char *pp = p->context_tree;
  485. int v = a->value;
  486. int n = a->Len;
  487. if (p->skip_eob_node)
  488. {
  489. n--;
  490. i = 2;
  491. }
  492. do
  493. {
  494. const int bb = (v >> --n) & 1;
  495. split = 1 + (((range - 1) * pp[i>>1]) >> 8);
  496. i = vp8_coef_tree[i+bb];
  497. if (bb)
  498. {
  499. lowvalue += split;
  500. range = range - split;
  501. }
  502. else
  503. {
  504. range = split;
  505. }
  506. shift = vp8_norm[range];
  507. range <<= shift;
  508. count += shift;
  509. if (count >= 0)
  510. {
  511. int offset = shift - count;
  512. if ((lowvalue << (offset - 1)) & 0x80000000)
  513. {
  514. int x = w->pos - 1;
  515. while (x >= 0 && w->buffer[x] == 0xff)
  516. {
  517. w->buffer[x] = (unsigned char)0;
  518. x--;
  519. }
  520. w->buffer[x] += 1;
  521. }
  522. w->buffer[w->pos++] = (lowvalue >> (24 - offset));
  523. lowvalue <<= offset;
  524. shift = count;
  525. lowvalue &= 0xffffff;
  526. count -= 8 ;
  527. }
  528. lowvalue <<= shift;
  529. }
  530. while (n);
  531. if (b->base_val)
  532. {
  533. const int e = p->Extra, L = b->Len;
  534. if (L)
  535. {
  536. const unsigned char *pp = b->prob;
  537. int v = e >> 1;
  538. int n = L; /* number of bits in v, assumed nonzero */
  539. int i = 0;
  540. do
  541. {
  542. const int bb = (v >> --n) & 1;
  543. split = 1 + (((range - 1) * pp[i>>1]) >> 8);
  544. i = b->tree[i+bb];
  545. if (bb)
  546. {
  547. lowvalue += split;
  548. range = range - split;
  549. }
  550. else
  551. {
  552. range = split;
  553. }
  554. shift = vp8_norm[range];
  555. range <<= shift;
  556. count += shift;
  557. if (count >= 0)
  558. {
  559. int offset = shift - count;
  560. if ((lowvalue << (offset - 1)) & 0x80000000)
  561. {
  562. int x = w->pos - 1;
  563. while (x >= 0 && w->buffer[x] == 0xff)
  564. {
  565. w->buffer[x] = (unsigned char)0;
  566. x--;
  567. }
  568. w->buffer[x] += 1;
  569. }
  570. w->buffer[w->pos++] = (lowvalue >> (24 - offset));
  571. lowvalue <<= offset;
  572. shift = count;
  573. lowvalue &= 0xffffff;
  574. count -= 8 ;
  575. }
  576. lowvalue <<= shift;
  577. }
  578. while (n);
  579. }
  580. {
  581. split = (range + 1) >> 1;
  582. if (e & 1)
  583. {
  584. lowvalue += split;
  585. range = range - split;
  586. }
  587. else
  588. {
  589. range = split;
  590. }
  591. range <<= 1;
  592. if ((lowvalue & 0x80000000))
  593. {
  594. int x = w->pos - 1;
  595. while (x >= 0 && w->buffer[x] == 0xff)
  596. {
  597. w->buffer[x] = (unsigned char)0;
  598. x--;
  599. }
  600. w->buffer[x] += 1;
  601. }
  602. lowvalue <<= 1;
  603. if (!++count)
  604. {
  605. count = -8;
  606. w->buffer[w->pos++] = (lowvalue >> 24);
  607. lowvalue &= 0xffffff;
  608. }
  609. }
  610. }
  611. ++p;
  612. }
  613. }
  614. w->count = count;
  615. w->lowvalue = lowvalue;
  616. w->range = range;
  617. }
  618. static void write_mv_ref
  619. (
  620. vp8_writer *w, MB_PREDICTION_MODE m, const vp8_prob *p
  621. )
  622. {
  623. #if CONFIG_DEBUG
  624. assert(NEARESTMV <= m && m <= SPLITMV);
  625. #endif
  626. vp8_write_token(w, vp8_mv_ref_tree, p,
  627. vp8_mv_ref_encoding_array - NEARESTMV + m);
  628. }
  629. static void write_sub_mv_ref
  630. (
  631. vp8_writer *w, B_PREDICTION_MODE m, const vp8_prob *p
  632. )
  633. {
  634. #if CONFIG_DEBUG
  635. assert(LEFT4X4 <= m && m <= NEW4X4);
  636. #endif
  637. vp8_write_token(w, vp8_sub_mv_ref_tree, p,
  638. vp8_sub_mv_ref_encoding_array - LEFT4X4 + m);
  639. }
  640. static void write_mv
  641. (
  642. vp8_writer *w, const MV *mv, const int_mv *ref, const MV_CONTEXT *mvc
  643. )
  644. {
  645. MV e;
  646. e.row = mv->row - ref->as_mv.row;
  647. e.col = mv->col - ref->as_mv.col;
  648. vp8_encode_motion_vector(w, &e, mvc);
  649. }
  650. static void write_mb_features(vp8_writer *w, const MB_MODE_INFO *mi, const MACROBLOCKD *x)
  651. {
  652. // Encode the MB segment id.
  653. if (x->segmentation_enabled && x->update_mb_segmentation_map)
  654. {
  655. switch (mi->segment_id)
  656. {
  657. case 0:
  658. vp8_write(w, 0, x->mb_segment_tree_probs[0]);
  659. vp8_write(w, 0, x->mb_segment_tree_probs[1]);
  660. break;
  661. case 1:
  662. vp8_write(w, 0, x->mb_segment_tree_probs[0]);
  663. vp8_write(w, 1, x->mb_segment_tree_probs[1]);
  664. break;
  665. case 2:
  666. vp8_write(w, 1, x->mb_segment_tree_probs[0]);
  667. vp8_write(w, 0, x->mb_segment_tree_probs[2]);
  668. break;
  669. case 3:
  670. vp8_write(w, 1, x->mb_segment_tree_probs[0]);
  671. vp8_write(w, 1, x->mb_segment_tree_probs[2]);
  672. break;
  673. // TRAP.. This should not happen
  674. default:
  675. vp8_write(w, 0, x->mb_segment_tree_probs[0]);
  676. vp8_write(w, 0, x->mb_segment_tree_probs[1]);
  677. break;
  678. }
  679. }
  680. }
  681. static void pack_inter_mode_mvs(VP8_COMP *const cpi)
  682. {
  683. VP8_COMMON *const pc = & cpi->common;
  684. vp8_writer *const w = & cpi->bc;
  685. const MV_CONTEXT *mvc = pc->fc.mvc;
  686. const int *const rfct = cpi->count_mb_ref_frame_usage;
  687. const int rf_intra = rfct[INTRA_FRAME];
  688. const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
  689. MODE_INFO *m = pc->mi, *ms;
  690. const int mis = pc->mode_info_stride;
  691. int mb_row = -1;
  692. int prob_last_coded;
  693. int prob_gf_coded;
  694. int prob_skip_false = 0;
  695. ms = pc->mi - 1;
  696. cpi->mb.partition_info = cpi->mb.pi;
  697. // Calculate the probabilities to be used to code the reference frame based on actual useage this frame
  698. if (!(cpi->prob_intra_coded = rf_intra * 255 / (rf_intra + rf_inter)))
  699. cpi->prob_intra_coded = 1;
  700. prob_last_coded = rf_inter ? (rfct[LAST_FRAME] * 255) / rf_inter : 128;
  701. if (!prob_last_coded)
  702. prob_last_coded = 1;
  703. prob_gf_coded = (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME])
  704. ? (rfct[GOLDEN_FRAME] * 255) / (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) : 128;
  705. if (!prob_gf_coded)
  706. prob_gf_coded = 1;
  707. #ifdef ENTROPY_STATS
  708. active_section = 1;
  709. #endif
  710. if (pc->mb_no_coeff_skip)
  711. {
  712. prob_skip_false = cpi->skip_false_count * 256 / (cpi->skip_false_count + cpi->skip_true_count);
  713. if (prob_skip_false <= 1)
  714. prob_skip_false = 1;
  715. if (prob_skip_false > 255)
  716. prob_skip_false = 255;
  717. cpi->prob_skip_false = prob_skip_false;
  718. vp8_write_literal(w, prob_skip_false, 8);
  719. }
  720. vp8_write_literal(w, cpi->prob_intra_coded, 8);
  721. vp8_write_literal(w, prob_last_coded, 8);
  722. vp8_write_literal(w, prob_gf_coded, 8);
  723. update_mbintra_mode_probs(cpi);
  724. vp8_write_mvprobs(cpi);
  725. while (++mb_row < pc->mb_rows)
  726. {
  727. int mb_col = -1;
  728. while (++mb_col < pc->mb_cols)
  729. {
  730. const MB_MODE_INFO *const mi = & m->mbmi;
  731. const MV_REFERENCE_FRAME rf = mi->ref_frame;
  732. const MB_PREDICTION_MODE mode = mi->mode;
  733. MACROBLOCKD *xd = &cpi->mb.e_mbd;
  734. // Distance of Mb to the various image edges.
  735. // These specified to 8th pel as they are always compared to MV values that are in 1/8th pel units
  736. xd->mb_to_left_edge = -((mb_col * 16) << 3);
  737. xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3;
  738. xd->mb_to_top_edge = -((mb_row * 16)) << 3;
  739. xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
  740. #ifdef ENTROPY_STATS
  741. active_section = 9;
  742. #endif
  743. if (cpi->mb.e_mbd.update_mb_segmentation_map)
  744. write_mb_features(w, mi, &cpi->mb.e_mbd);
  745. if (pc->mb_no_coeff_skip)
  746. vp8_encode_bool(w, m->mbmi.mb_skip_coeff, prob_skip_false);
  747. if (rf == INTRA_FRAME)
  748. {
  749. vp8_write(w, 0, cpi->prob_intra_coded);
  750. #ifdef ENTROPY_STATS
  751. active_section = 6;
  752. #endif
  753. write_ymode(w, mode, pc->fc.ymode_prob);
  754. if (mode == B_PRED)
  755. {
  756. int j = 0;
  757. do
  758. write_bmode(w, m->bmi[j].as_mode, pc->fc.bmode_prob);
  759. while (++j < 16);
  760. }
  761. write_uv_mode(w, mi->uv_mode, pc->fc.uv_mode_prob);
  762. }
  763. else /* inter coded */
  764. {
  765. int_mv best_mv;
  766. vp8_prob mv_ref_p [VP8_MVREFS-1];
  767. vp8_write(w, 1, cpi->prob_intra_coded);
  768. if (rf == LAST_FRAME)
  769. vp8_write(w, 0, prob_last_coded);
  770. else
  771. {
  772. vp8_write(w, 1, prob_last_coded);
  773. vp8_write(w, (rf == GOLDEN_FRAME) ? 0 : 1, prob_gf_coded);
  774. }
  775. {
  776. int_mv n1, n2;
  777. int ct[4];
  778. vp8_find_near_mvs(xd, m, &n1, &n2, &best_mv, ct, rf, cpi->common.ref_frame_sign_bias);
  779. vp8_mv_ref_probs(mv_ref_p, ct);
  780. #ifdef ENTROPY_STATS
  781. accum_mv_refs(mode, ct);
  782. #endif
  783. }
  784. #ifdef ENTROPY_STATS
  785. active_section = 3;
  786. #endif
  787. write_mv_ref(w, mode, mv_ref_p);
  788. switch (mode) /* new, split require MVs */
  789. {
  790. case NEWMV:
  791. #ifdef ENTROPY_STATS
  792. active_section = 5;
  793. #endif
  794. write_mv(w, &mi->mv.as_mv, &best_mv, mvc);
  795. break;
  796. case SPLITMV:
  797. {
  798. int j = 0;
  799. #ifdef MODE_STATS
  800. ++count_mb_seg [mi->partitioning];
  801. #endif
  802. write_split(w, mi->partitioning);
  803. do
  804. {
  805. B_PREDICTION_MODE blockmode;
  806. int_mv blockmv;
  807. const int *const L = vp8_mbsplits [mi->partitioning];
  808. int k = -1; /* first block in subset j */
  809. int mv_contz;
  810. int_mv leftmv, abovemv;
  811. blockmode = cpi->mb.partition_info->bmi[j].mode;
  812. blockmv = cpi->mb.partition_info->bmi[j].mv;
  813. #if CONFIG_DEBUG
  814. while (j != L[++k])
  815. if (k >= 16)
  816. assert(0);
  817. #else
  818. while (j != L[++k]);
  819. #endif
  820. leftmv.as_int = left_block_mv(m, k);
  821. abovemv.as_int = above_block_mv(m, k, mis);
  822. mv_contz = vp8_mv_cont(&leftmv, &abovemv);
  823. write_sub_mv_ref(w, blockmode, vp8_sub_mv_ref_prob2 [mv_contz]);
  824. if (blockmode == NEW4X4)
  825. {
  826. #ifdef ENTROPY_STATS
  827. active_section = 11;
  828. #endif
  829. write_mv(w, &blockmv.as_mv, &best_mv, (const MV_CONTEXT *) mvc);
  830. }
  831. }
  832. while (++j < cpi->mb.partition_info->count);
  833. }
  834. break;
  835. default:
  836. break;
  837. }
  838. }
  839. ++m;
  840. cpi->mb.partition_info++;
  841. }
  842. ++m; /* skip L prediction border */
  843. cpi->mb.partition_info++;
  844. }
  845. }
  846. static void write_kfmodes(VP8_COMP *cpi)
  847. {
  848. vp8_writer *const bc = & cpi->bc;
  849. const VP8_COMMON *const c = & cpi->common;
  850. /* const */
  851. MODE_INFO *m = c->mi;
  852. int mb_row = -1;
  853. int prob_skip_false = 0;
  854. if (c->mb_no_coeff_skip)
  855. {
  856. prob_skip_false = cpi->skip_false_count * 256 / (cpi->skip_false_count + cpi->skip_true_count);
  857. if (prob_skip_false <= 1)
  858. prob_skip_false = 1;
  859. if (prob_skip_false >= 255)
  860. prob_skip_false = 255;
  861. cpi->prob_skip_false = prob_skip_false;
  862. vp8_write_literal(bc, prob_skip_false, 8);
  863. }
  864. while (++mb_row < c->mb_rows)
  865. {
  866. int mb_col = -1;
  867. while (++mb_col < c->mb_cols)
  868. {
  869. const int ym = m->mbmi.mode;
  870. if (cpi->mb.e_mbd.update_mb_segmentation_map)
  871. write_mb_features(bc, &m->mbmi, &cpi->mb.e_mbd);
  872. if (c->mb_no_coeff_skip)
  873. vp8_encode_bool(bc, m->mbmi.mb_skip_coeff, prob_skip_false);
  874. kfwrite_ymode(bc, ym, c->kf_ymode_prob);
  875. if (ym == B_PRED)
  876. {
  877. const int mis = c->mode_info_stride;
  878. int i = 0;
  879. do
  880. {
  881. const B_PREDICTION_MODE A = above_block_mode(m, i, mis);
  882. const B_PREDICTION_MODE L = left_block_mode(m, i);
  883. const int bm = m->bmi[i].as_mode;
  884. #ifdef ENTROPY_STATS
  885. ++intra_mode_stats [A] [L] [bm];
  886. #endif
  887. write_bmode(bc, bm, c->kf_bmode_prob [A] [L]);
  888. }
  889. while (++i < 16);
  890. }
  891. write_uv_mode(bc, (m++)->mbmi.uv_mode, c->kf_uv_mode_prob);
  892. }
  893. m++; // skip L prediction border
  894. }
  895. }
  896. /* This function is used for debugging probability trees. */
  897. static void print_prob_tree(vp8_prob
  898. coef_probs[BLOCK_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS][ENTROPY_NODES])
  899. {
  900. /* print coef probability tree */
  901. int i,j,k,l;
  902. FILE* f = fopen("enc_tree_probs.txt", "a");
  903. fprintf(f, "{\n");
  904. for (i = 0; i < BLOCK_TYPES; i++)
  905. {
  906. fprintf(f, " {\n");
  907. for (j = 0; j < COEF_BANDS; j++)
  908. {
  909. fprintf(f, " {\n");
  910. for (k = 0; k < PREV_COEF_CONTEXTS; k++)
  911. {
  912. fprintf(f, " {");
  913. for (l = 0; l < ENTROPY_NODES; l++)
  914. {
  915. fprintf(f, "%3u, ",
  916. (unsigned int)(coef_probs [i][j][k][l]));
  917. }
  918. fprintf(f, " }\n");
  919. }
  920. fprintf(f, " }\n");
  921. }
  922. fprintf(f, " }\n");
  923. }
  924. fprintf(f, "}\n");
  925. fclose(f);
  926. }
  927. static void sum_probs_over_prev_coef_context(
  928. const unsigned int probs[PREV_COEF_CONTEXTS][MAX_ENTROPY_TOKENS],
  929. unsigned int* out)
  930. {
  931. int i, j;
  932. for (i=0; i < MAX_ENTROPY_TOKENS; ++i)
  933. {
  934. for (j=0; j < PREV_COEF_CONTEXTS; ++j)
  935. {
  936. const int tmp = out[i];
  937. out[i] += probs[j][i];
  938. /* check for wrap */
  939. if (out[i] < tmp)
  940. out[i] = UINT_MAX;
  941. }
  942. }
  943. }
  944. static int prob_update_savings(const unsigned int *ct,
  945. const vp8_prob oldp, const vp8_prob newp,
  946. const vp8_prob upd)
  947. {
  948. const int old_b = vp8_cost_branch(ct, oldp);
  949. const int new_b = vp8_cost_branch(ct, newp);
  950. const int update_b = 8 +
  951. ((vp8_cost_one(upd) - vp8_cost_zero(upd)) >> 8);
  952. return old_b - new_b - update_b;
  953. }
  954. static int independent_coef_context_savings(VP8_COMP *cpi)
  955. {
  956. int savings = 0;
  957. int i = 0;
  958. do
  959. {
  960. int j = 0;
  961. do
  962. {
  963. int k = 0;
  964. unsigned int prev_coef_count_sum[MAX_ENTROPY_TOKENS] = {0};
  965. int prev_coef_savings[MAX_ENTROPY_TOKENS] = {0};
  966. /* Calculate new probabilities given the constraint that
  967. * they must be equal over the prev coef contexts
  968. */
  969. if (cpi->common.frame_type == KEY_FRAME)
  970. {
  971. /* Reset to default probabilities at key frames */
  972. sum_probs_over_prev_coef_context(vp8_default_coef_counts[i][j],
  973. prev_coef_count_sum);
  974. }
  975. else
  976. {
  977. sum_probs_over_prev_coef_context(cpi->coef_counts[i][j],
  978. prev_coef_count_sum);
  979. }
  980. do
  981. {
  982. /* at every context */
  983. /* calc probs and branch cts for this frame only */
  984. //vp8_prob new_p [ENTROPY_NODES];
  985. //unsigned int branch_ct [ENTROPY_NODES] [2];
  986. int t = 0; /* token/prob index */
  987. vp8_tree_probs_from_distribution(
  988. MAX_ENTROPY_TOKENS, vp8_coef_encodings, vp8_coef_tree,
  989. cpi->frame_coef_probs[i][j][k],
  990. cpi->frame_branch_ct [i][j][k],
  991. prev_coef_count_sum,
  992. 256, 1);
  993. do
  994. {
  995. const unsigned int *ct = cpi->frame_branch_ct [i][j][k][t];
  996. const vp8_prob newp = cpi->frame_coef_probs [i][j][k][t];
  997. const vp8_prob oldp = cpi->common.fc.coef_probs [i][j][k][t];
  998. const vp8_prob upd = vp8_coef_update_probs [i][j][k][t];
  999. const int s = prob_update_savings(ct, oldp, newp, upd);
  1000. if (cpi->common.frame_type != KEY_FRAME ||
  1001. (cpi->common.frame_type == KEY_FRAME && newp != oldp))
  1002. prev_coef_savings[t] += s;
  1003. }
  1004. while (++t < ENTROPY_NODES);
  1005. }
  1006. while (++k < PREV_COEF_CONTEXTS);
  1007. k = 0;
  1008. do
  1009. {
  1010. /* We only update probabilities if we can save bits, except
  1011. * for key frames where we have to update all probabilities
  1012. * to get the equal probabilities across the prev coef
  1013. * contexts.
  1014. */
  1015. if (prev_coef_savings[k] > 0 ||
  1016. cpi->common.frame_type == KEY_FRAME)
  1017. savings += prev_coef_savings[k];
  1018. }
  1019. while (++k < ENTROPY_NODES);
  1020. }
  1021. while (++j < COEF_BANDS);
  1022. }
  1023. while (++i < BLOCK_TYPES);
  1024. return savings;
  1025. }
  1026. static int default_coef_context_savings(VP8_COMP *cpi)
  1027. {
  1028. int savings = 0;
  1029. int i = 0;
  1030. do
  1031. {
  1032. int j = 0;
  1033. do
  1034. {
  1035. int k = 0;
  1036. do
  1037. {
  1038. /* at every context */
  1039. /* calc probs and branch cts for this frame only */
  1040. //vp8_prob new_p [ENTROPY_NODES];
  1041. //unsigned int branch_ct [ENTROPY_NODES] [2];
  1042. int t = 0; /* token/prob index */
  1043. vp8_tree_probs_from_distribution(
  1044. MAX_ENTROPY_TOKENS, vp8_coef_encodings, vp8_coef_tree,
  1045. cpi->frame_coef_probs [i][j][k],
  1046. cpi->frame_branch_ct [i][j][k],
  1047. cpi->coef_counts [i][j][k],
  1048. 256, 1
  1049. );
  1050. do
  1051. {
  1052. const unsigned int *ct = cpi->frame_branch_ct [i][j][k][t];
  1053. const vp8_prob newp = cpi->frame_coef_probs [i][j][k][t];
  1054. const vp8_prob oldp = cpi->common.fc.coef_probs [i][j][k][t];
  1055. const vp8_prob upd = vp8_coef_update_probs [i][j][k][t];
  1056. const int s = prob_update_savings(ct, oldp, newp, upd);
  1057. if (s > 0)
  1058. {
  1059. savings += s;
  1060. }
  1061. }
  1062. while (++t < ENTROPY_NODES);
  1063. }
  1064. while (++k < PREV_COEF_CONTEXTS);
  1065. }
  1066. while (++j < COEF_BANDS);
  1067. }
  1068. while (++i < BLOCK_TYPES);
  1069. return savings;
  1070. }
  1071. int vp8_estimate_entropy_savings(VP8_COMP *cpi)
  1072. {
  1073. int savings = 0;
  1074. const int *const rfct = cpi->count_mb_ref_frame_usage;
  1075. const int rf_intra = rfct[INTRA_FRAME];
  1076. const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
  1077. int new_intra, new_last, gf_last, oldtotal, newtotal;
  1078. int ref_frame_cost[MAX_REF_FRAMES];
  1079. vp8_clear_system_state(); //__asm emms;
  1080. if (cpi->common.frame_type != KEY_FRAME)
  1081. {
  1082. if (!(new_intra = rf_intra * 255 / (rf_intra + rf_inter)))
  1083. new_intra = 1;
  1084. new_last = rf_inter ? (rfct[LAST_FRAME] * 255) / rf_inter : 128;
  1085. gf_last = (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME])
  1086. ? (rfct[GOLDEN_FRAME] * 255) / (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) : 128;
  1087. // new costs
  1088. ref_frame_cost[INTRA_FRAME] = vp8_cost_zero(new_intra);
  1089. ref_frame_cost[LAST_FRAME] = vp8_cost_one(new_intra)
  1090. + vp8_cost_zero(new_last);
  1091. ref_frame_cost[GOLDEN_FRAME] = vp8_cost_one(new_intra)
  1092. + vp8_cost_one(new_last)
  1093. + vp8_cost_zero(gf_last);
  1094. ref_frame_cost[ALTREF_FRAME] = vp8_cost_one(new_intra)
  1095. + vp8_cost_one(new_last)
  1096. + vp8_cost_one(gf_last);
  1097. newtotal =
  1098. rfct[INTRA_FRAME] * ref_frame_cost[INTRA_FRAME] +
  1099. rfct[LAST_FRAME] * ref_frame_cost[LAST_FRAME] +
  1100. rfct[GOLDEN_FRAME] * ref_frame_cost[GOLDEN_FRAME] +
  1101. rfct[ALTREF_FRAME] * ref_frame_cost[ALTREF_FRAME];
  1102. // old costs
  1103. ref_frame_cost[INTRA_FRAME] = vp8_cost_zero(cpi->prob_intra_coded);
  1104. ref_frame_cost[LAST_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
  1105. + vp8_cost_zero(cpi->prob_last_coded);
  1106. ref_frame_cost[GOLDEN_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
  1107. + vp8_cost_one(cpi->prob_last_coded)
  1108. + vp8_cost_zero(cpi->prob_gf_coded);
  1109. ref_frame_cost[ALTREF_FRAME] = vp8_cost_one(cpi->prob_intra_coded)
  1110. + vp8_cost_one(cpi->prob_last_coded)
  1111. + vp8_cost_one(cpi->prob_gf_coded);
  1112. oldtotal =
  1113. rfct[INTRA_FRAME] * ref_frame_cost[INTRA_FRAME] +
  1114. rfct[LAST_FRAME] * ref_frame_cost[LAST_FRAME] +
  1115. rfct[GOLDEN_FRAME] * ref_frame_cost[GOLDEN_FRAME] +
  1116. rfct[ALTREF_FRAME] * ref_frame_cost[ALTREF_FRAME];
  1117. savings += (oldtotal - newtotal) / 256;
  1118. }
  1119. if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
  1120. savings += independent_coef_context_savings(cpi);
  1121. else
  1122. savings += default_coef_context_savings(cpi);
  1123. return savings;
  1124. }
  1125. static void update_coef_probs(VP8_COMP *cpi)
  1126. {
  1127. int i = 0;
  1128. vp8_writer *const w = & cpi->bc;
  1129. int savings = 0;
  1130. vp8_clear_system_state(); //__asm emms;
  1131. do
  1132. {
  1133. int j = 0;
  1134. do
  1135. {
  1136. int k = 0;
  1137. int prev_coef_savings[ENTROPY_NODES] = {0};
  1138. if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
  1139. {
  1140. for (k = 0; k < PREV_COEF_CONTEXTS; ++k)
  1141. {
  1142. int t; /* token/prob index */
  1143. for (t = 0; t < ENTROPY_NODES; ++t)
  1144. {
  1145. const unsigned int *ct = cpi->frame_branch_ct [i][j]
  1146. [k][t];
  1147. const vp8_prob newp = cpi->frame_coef_probs[i][j][k][t];
  1148. const vp8_prob oldp = cpi->common.fc.coef_probs[i][j]
  1149. [k][t];
  1150. const vp8_prob upd = vp8_coef_update_probs[i][j][k][t];
  1151. prev_coef_savings[t] +=
  1152. prob_update_savings(ct, oldp, newp, upd);
  1153. }
  1154. }
  1155. k = 0;
  1156. }
  1157. do
  1158. {
  1159. //note: use result from vp8_estimate_entropy_savings, so no need to call vp8_tree_probs_from_distribution here.
  1160. /* at every context */
  1161. /* calc probs and branch cts for this frame only */
  1162. //vp8_prob new_p [ENTROPY_NODES];
  1163. //unsigned int branch_ct [ENTROPY_NODES] [2];
  1164. int t = 0; /* token/prob index */
  1165. //vp8_tree_probs_from_distribution(
  1166. // MAX_ENTROPY_TOKENS, vp8_coef_encodings, vp8_coef_tree,
  1167. // new_p, branch_ct, (unsigned int *)cpi->coef_counts [i][j][k],
  1168. // 256, 1
  1169. // );
  1170. do
  1171. {
  1172. const vp8_prob newp = cpi->frame_coef_probs [i][j][k][t];
  1173. vp8_prob *Pold = cpi->common.fc.coef_probs [i][j][k] + t;
  1174. const vp8_prob upd = vp8_coef_update_probs [i][j][k][t];
  1175. int s = prev_coef_savings[t];
  1176. int u = 0;
  1177. if (!(cpi->oxcf.error_resilient_mode &
  1178. VPX_ERROR_RESILIENT_PARTITIONS))
  1179. {
  1180. s = prob_update_savings(
  1181. cpi->frame_branch_ct [i][j][k][t],
  1182. *Pold, newp, upd);
  1183. }
  1184. if (s > 0)
  1185. u = 1;
  1186. /* Force updates on key frames if the new is different,
  1187. * so that we can be sure we end up with equal probabilities
  1188. * over the prev coef contexts.
  1189. */
  1190. if ((cpi->oxcf.error_resilient_mode &
  1191. VPX_ERROR_RESILIENT_PARTITIONS) &&
  1192. cpi->common.frame_type == KEY_FRAME && newp != *Pold)
  1193. u = 1;
  1194. vp8_write(w, u, upd);
  1195. #ifdef ENTROPY_STATS
  1196. ++ tree_update_hist [i][j][k][t] [u];
  1197. #endif
  1198. if (u)
  1199. {
  1200. /* send/use new probability */
  1201. *Pold = newp;
  1202. vp8_write_literal(w, newp, 8);
  1203. savings += s;
  1204. }
  1205. }
  1206. while (++t < ENTROPY_NODES);
  1207. /* Accum token counts for generation of default statistics */
  1208. #ifdef ENTROPY_STATS
  1209. t = 0;
  1210. do
  1211. {
  1212. context_counters [i][j][k][t] += cpi->coef_counts [i][j][k][t];
  1213. }
  1214. while (++t < MAX_ENTROPY_TOKENS);
  1215. #endif
  1216. }
  1217. while (++k < PREV_COEF_CONTEXTS);
  1218. }
  1219. while (++j < COEF_BANDS);
  1220. }
  1221. while (++i < BLOCK_TYPES);
  1222. }
  1223. #ifdef PACKET_TESTING
  1224. FILE *vpxlogc = 0;
  1225. #endif
  1226. static void put_delta_q(vp8_writer *bc, int delta_q)
  1227. {
  1228. if (delta_q != 0)
  1229. {
  1230. vp8_write_bit(bc, 1);
  1231. vp8_write_literal(bc, abs(delta_q), 4);
  1232. if (delta_q < 0)
  1233. vp8_write_bit(bc, 1);
  1234. else
  1235. vp8_write_bit(bc, 0);
  1236. }
  1237. else
  1238. vp8_write_bit(bc, 0);
  1239. }
  1240. void vp8_pack_bitstream(VP8_COMP *cpi, unsigned char *dest, unsigned long *size)
  1241. {
  1242. int i, j;
  1243. VP8_HEADER oh;
  1244. VP8_COMMON *const pc = & cpi->common;
  1245. vp8_writer *const bc = & cpi->bc;
  1246. MACROBLOCKD *const xd = & cpi->mb.e_mbd;
  1247. int extra_bytes_packed = 0;
  1248. unsigned char *cx_data = dest;
  1249. const int *mb_feature_data_bits;
  1250. oh.show_frame = (int) pc->show_frame;
  1251. oh.type = (int)pc->frame_type;
  1252. oh.version = pc->version;
  1253. oh.first_partition_length_in_bytes = 0;
  1254. mb_feature_data_bits = vp8_mb_feature_data_bits;
  1255. cx_data += 3;
  1256. #if defined(SECTIONBITS_OUTPUT)
  1257. Sectionbits[active_section = 1] += sizeof(VP8_HEADER) * 8 * 256;
  1258. #endif
  1259. //vp8_kf_default_bmode_probs() is called in vp8_setup_key_frame() once for each
  1260. //K frame before encode frame. pc->kf_bmode_prob doesn't get changed anywhere
  1261. //else. No need to call it again here. --yw
  1262. //vp8_kf_default_bmode_probs( pc->kf_bmode_prob);
  1263. // every keyframe send startcode, width, height, scale factor, clamp and color type
  1264. if (oh.type == KEY_FRAME)
  1265. {
  1266. int v;
  1267. // Start / synch code
  1268. cx_data[0] = 0x9D;
  1269. cx_data[1] = 0x01;
  1270. cx_data[2] = 0x2a;
  1271. v = (pc->horiz_scale << 14) | pc->Width;
  1272. cx_data[3] = v;
  1273. cx_data[4] = v >> 8;
  1274. v = (pc->vert_scale << 14) | pc->Height;
  1275. cx_data[5] = v;
  1276. cx_data[6] = v >> 8;
  1277. extra_bytes_packed = 7;
  1278. cx_data += extra_bytes_packed ;
  1279. vp8_start_encode(bc, cx_data);
  1280. // signal clr type
  1281. vp8_write_bit(bc, pc->clr_type);
  1282. vp8_write_bit(bc, pc->clamp_type);
  1283. }
  1284. else
  1285. vp8_start_encode(bc, cx_data);
  1286. // Signal whether or not Segmentation is enabled
  1287. vp8_write_bit(bc, (xd->segmentation_enabled) ? 1 : 0);
  1288. // Indicate which features are enabled
  1289. if (xd->segmentation_enabled)
  1290. {
  1291. // Signal whether or not the segmentation map is being updated.
  1292. vp8_write_bit(bc, (xd->update_mb_segmentation_map) ? 1 : 0);
  1293. vp8_write_bit(bc, (xd->update_mb_segmentation_data) ? 1 : 0);
  1294. if (xd->update_mb_segmentation_data)
  1295. {
  1296. signed char Data;
  1297. vp8_write_bit(bc, (xd->mb_segement_abs_delta) ? 1 : 0);
  1298. // For each segmentation feature (Quant and loop filter level)
  1299. for (i = 0; i < MB_LVL_MAX; i++)
  1300. {
  1301. // For each of the segments
  1302. for (j = 0; j < MAX_MB_SEGMENTS; j++)
  1303. {
  1304. Data = xd->segment_feature_data[i][j];
  1305. // Frame level data
  1306. if (Data)
  1307. {
  1308. vp8_write_bit(bc, 1);
  1309. if (Data < 0)
  1310. {
  1311. Data = - Data;
  1312. vp8_write_literal(bc, Data, mb_feature_data_bits[i]);
  1313. vp8_write_bit(bc, 1);
  1314. }
  1315. else
  1316. {
  1317. vp8_write_literal(bc, Data, mb_feature_data_bits[i]);
  1318. vp8_write_bit(bc, 0);
  1319. }
  1320. }
  1321. else
  1322. vp8_write_bit(bc, 0);
  1323. }
  1324. }
  1325. }
  1326. if (xd->update_mb_segmentation_map)
  1327. {
  1328. // Write the probs used to decode the segment id for each macro block.
  1329. for (i = 0; i < MB_FEATURE_TREE_PROBS; i++)
  1330. {
  1331. int Data = xd->mb_segment_tree_probs[i];
  1332. if (Data != 255)
  1333. {
  1334. vp8_write_bit(bc, 1);
  1335. vp8_write_literal(bc, Data, 8);
  1336. }
  1337. else
  1338. vp8_write_bit(bc, 0);
  1339. }
  1340. }
  1341. }
  1342. // Code to determine whether or not to update the scan order.
  1343. vp8_write_bit(bc, pc->filter_type);
  1344. vp8_write_literal(bc, pc->filter_level, 6);
  1345. vp8_write_literal(bc, pc->sharpness_level, 3);
  1346. // Write out loop filter deltas applied at the MB level based on mode or ref frame (if they are enabled).
  1347. vp8_write_bit(bc, (xd->mode_ref_lf_delta_enabled) ? 1 : 0);
  1348. if (xd->mode_ref_lf_delta_enabled)
  1349. {
  1350. // Do the deltas need to be updated
  1351. int send_update = xd->mode_ref_lf_delta_update
  1352. || cpi->oxcf.error_resilient_mode;
  1353. vp8_write_bit(bc, send_update);
  1354. if (send_update)
  1355. {
  1356. int Data;
  1357. // Send update
  1358. for (i = 0; i < MAX_REF_LF_DELTAS; i++)
  1359. {
  1360. Data = xd->ref_lf_deltas[i];
  1361. // Frame level data
  1362. if (xd->ref_lf_deltas[i] != xd->last_ref_lf_deltas[i]
  1363. || cpi->oxcf.error_resilient_mode)
  1364. {
  1365. xd->last_ref_lf_deltas[i] = xd->ref_lf_deltas[i];
  1366. vp8_write_bit(bc, 1);
  1367. if (Data > 0)
  1368. {
  1369. vp8_write_literal(bc, (Data & 0x3F), 6);
  1370. vp8_write_bit(bc, 0); // sign
  1371. }
  1372. else
  1373. {
  1374. Data = -Data;
  1375. vp8_write_literal(bc, (Data & 0x3F), 6);
  1376. vp8_write_bit(bc, 1); // sign
  1377. }
  1378. }
  1379. else
  1380. vp8_write_bit(bc, 0);
  1381. }
  1382. // Send update
  1383. for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
  1384. {
  1385. Data = xd->mode_lf_deltas[i];
  1386. if (xd->mode_lf_deltas[i] != xd->last_mode_lf_deltas[i]
  1387. || cpi->oxcf.error_resilient_mode)
  1388. {
  1389. xd->last_mode_lf_deltas[i] = xd->mode_lf_deltas[i];
  1390. vp8_write_bit(bc, 1);
  1391. if (Data > 0)
  1392. {
  1393. vp8_write_literal(bc, (Data & 0x3F), 6);
  1394. vp8_write_bit(bc, 0); // sign
  1395. }
  1396. else
  1397. {
  1398. Data = -Data;
  1399. vp8_write_literal(bc, (Data & 0x3F), 6);
  1400. vp8_write_bit(bc, 1); // sign
  1401. }
  1402. }
  1403. else
  1404. vp8_write_bit(bc, 0);
  1405. }
  1406. }
  1407. }
  1408. //signal here is multi token partition is enabled
  1409. vp8_write_literal(bc, pc->multi_token_partition, 2);
  1410. // Frame Qbaseline quantizer index
  1411. vp8_write_literal(bc, pc->base_qindex, 7);
  1412. // Transmit Dc, Second order and Uv quantizer delta information
  1413. put_delta_q(bc, pc->y1dc_delta_q);
  1414. put_delta_q(bc, pc->y2dc_delta_q);
  1415. put_delta_q(bc, pc->y2ac_delta_q);
  1416. put_delta_q(bc, pc->uvdc_delta_q);
  1417. put_delta_q(bc, pc->uvac_delta_q);
  1418. // When there is a key frame all reference buffers are updated using the new key frame
  1419. if (pc->frame_type != KEY_FRAME)
  1420. {
  1421. // Should the GF or ARF be updated using the transmitted frame or buffer
  1422. vp8_write_bit(bc, pc->refresh_golden_frame);
  1423. vp8_write_bit(bc, pc->refresh_alt_ref_frame);
  1424. // If not being updated from current frame should either GF or ARF be updated from another buffer
  1425. if (!pc->refresh_golden_frame)
  1426. vp8_write_literal(bc, pc->copy_buffer_to_gf, 2);
  1427. if (!pc->refresh_alt_ref_frame)
  1428. vp8_write_literal(bc, pc->copy_buffer_to_arf, 2);
  1429. // Indicate reference frame sign bias for Golden and ARF frames (always 0 for last frame buffer)
  1430. vp8_write_bit(bc, pc->ref_frame_sign_bias[GOLDEN_FRAME]);
  1431. vp8_write_bit(bc, pc->ref_frame_sign_bias[ALTREF_FRAME]);
  1432. }
  1433. if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
  1434. {
  1435. if (pc->frame_type == KEY_FRAME)
  1436. pc->refresh_entropy_probs = 1;
  1437. else
  1438. pc->refresh_entropy_probs = 0;
  1439. }
  1440. vp8_write_bit(bc, pc->refresh_entropy_probs);
  1441. if (pc->frame_type != KEY_FRAME)
  1442. vp8_write_bit(bc, pc->refresh_last_frame);
  1443. #ifdef ENTROPY_STATS
  1444. if (pc->frame_type == INTER_FRAME)
  1445. active_section = 0;
  1446. else
  1447. active_section = 7;
  1448. #endif
  1449. vp8_clear_system_state(); //__asm emms;
  1450. //************************************************
  1451. // save a copy for later refresh
  1452. {
  1453. vpx_memcpy(&cpi->common.lfc, &cpi->common.fc, sizeof(cpi->common.fc));
  1454. }
  1455. update_coef_probs(cpi);
  1456. #ifdef ENTROPY_STATS
  1457. active_section = 2;
  1458. #endif
  1459. // Write out the mb_no_coeff_skip flag
  1460. vp8_write_bit(bc, pc->mb_no_coeff_skip);
  1461. if (pc->frame_type == KEY_FRAME)
  1462. {
  1463. write_kfmodes(cpi);
  1464. #ifdef ENTROPY_STATS
  1465. active_section = 8;
  1466. #endif
  1467. }
  1468. else
  1469. {
  1470. pack_inter_mode_mvs(cpi);
  1471. #ifdef ENTROPY_STATS
  1472. active_section = 1;
  1473. #endif
  1474. }
  1475. vp8_stop_encode(bc);
  1476. oh.first_partition_length_in_bytes = cpi->bc.pos;
  1477. /* update frame tag */
  1478. {
  1479. int v = (oh.first_partition_length_in_bytes << 5) |
  1480. (oh.show_frame << 4) |
  1481. (oh.version << 1) |
  1482. oh.type;
  1483. dest[0] = v;
  1484. dest[1] = v >> 8;
  1485. dest[2] = v >> 16;
  1486. }
  1487. *size = VP8_HEADER_SIZE + extra_bytes_packed + cpi->bc.pos;
  1488. cpi->partition_sz[0] = *size;
  1489. if (pc->multi_token_partition != ONE_PARTITION)
  1490. {
  1491. int num_part;
  1492. int asize;
  1493. num_part = 1 << pc->multi_token_partition;
  1494. pack_tokens_into_partitions(cpi, cx_data + bc->pos, num_part, &asize);
  1495. *size += asize;
  1496. }
  1497. else
  1498. {
  1499. vp8_start_encode(&cpi->bc2, cx_data + bc->pos);
  1500. #if CONFIG_MULTITHREAD
  1501. if (cpi->b_multi_threaded)
  1502. pack_mb_row_tokens(cpi, &cpi->bc2);
  1503. else
  1504. #endif
  1505. pack_tokens(&cpi->bc2, cpi->tok, cpi->tok_count);
  1506. vp8_stop_encode(&cpi->bc2);
  1507. *size += cpi->bc2.pos;
  1508. cpi->partition_sz[1] = cpi->bc2.pos;
  1509. }
  1510. }
  1511. #ifdef ENTROPY_STATS
  1512. void print_tree_update_probs()
  1513. {
  1514. int i, j, k, l;
  1515. FILE *f = fopen("context.c", "a");
  1516. int Sum;
  1517. fprintf(f, "\n/* Update probabilities for token entropy tree. */\n\n");
  1518. fprintf(f, "const vp8_prob tree_update_probs[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [ENTROPY_NODES] = {\n");
  1519. for (i = 0; i < BLOCK_TYPES; i++)
  1520. {
  1521. fprintf(f, " { \n");
  1522. for (j = 0; j < COEF_BANDS; j++)
  1523. {
  1524. fprintf(f, " {\n");
  1525. for (k = 0; k < PREV_COEF_CONTEXTS; k++)
  1526. {
  1527. fprintf(f, " {");
  1528. for (l = 0; l < ENTROPY_NODES; l++)
  1529. {
  1530. Sum = tree_update_hist[i][j][k][l][0] + tree_update_hist[i][j][k][l][1];
  1531. if (Sum > 0)
  1532. {
  1533. if (((tree_update_hist[i][j][k][l][0] * 255) / Sum) > 0)
  1534. fprintf(f, "%3ld, ", (tree_update_hist[i][j][k][l][0] * 255) / Sum);
  1535. else
  1536. fprintf(f, "%3ld, ", 1);
  1537. }
  1538. else
  1539. fprintf(f, "%3ld, ", 128);
  1540. }
  1541. fprintf(f, "},\n");
  1542. }
  1543. fprintf(f, " },\n");
  1544. }
  1545. fprintf(f, " },\n");
  1546. }
  1547. fprintf(f, "};\n");
  1548. fclose(f);
  1549. }
  1550. #endif