PageRenderTime 73ms CodeModel.GetById 19ms RepoModel.GetById 0ms app.codeStats 1ms

/media/libvpx/vp8/encoder/onyx_if.c

http://github.com/zpao/v8monkey
C | 5152 lines | 3740 code | 972 blank | 440 comment | 740 complexity | 724943c382813bdcad79d2b2804f0307 MD5 | raw file
Possible License(s): MPL-2.0-no-copyleft-exception, LGPL-3.0, AGPL-1.0, LGPL-2.1, BSD-3-Clause, GPL-2.0, JSON, Apache-2.0, 0BSD
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include "vpx_config.h"
  11. #include "vp8/common/onyxc_int.h"
  12. #include "onyx_int.h"
  13. #include "vp8/common/systemdependent.h"
  14. #include "quantize.h"
  15. #include "vp8/common/alloccommon.h"
  16. #include "mcomp.h"
  17. #include "firstpass.h"
  18. #include "psnr.h"
  19. #include "vpx_scale/vpxscale.h"
  20. #include "vp8/common/extend.h"
  21. #include "ratectrl.h"
  22. #include "vp8/common/quant_common.h"
  23. #include "segmentation.h"
  24. #include "vp8/common/g_common.h"
  25. #include "vpx_scale/yv12extend.h"
  26. #if CONFIG_POSTPROC
  27. #include "vp8/common/postproc.h"
  28. #endif
  29. #include "vpx_mem/vpx_mem.h"
  30. #include "vp8/common/swapyv12buffer.h"
  31. #include "vp8/common/threading.h"
  32. #include "vpx_ports/vpx_timer.h"
  33. #include "temporal_filter.h"
  34. #if ARCH_ARM
  35. #include "vpx_ports/arm.h"
  36. #endif
  37. #include <math.h>
  38. #include <stdio.h>
  39. #include <limits.h>
  40. #if CONFIG_RUNTIME_CPU_DETECT
  41. #define IF_RTCD(x) (x)
  42. #define RTCD(x) &cpi->common.rtcd.x
  43. #else
  44. #define IF_RTCD(x) NULL
  45. #define RTCD(x) NULL
  46. #endif
  47. extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
  48. extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val);
  49. extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
  50. extern void vp8_dmachine_specific_config(VP8_COMP *cpi);
  51. extern void vp8_cmachine_specific_config(VP8_COMP *cpi);
  52. extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag);
  53. extern void print_parms(VP8_CONFIG *ocf, char *filenam);
  54. extern unsigned int vp8_get_processor_freq();
  55. extern void print_tree_update_probs();
  56. extern void vp8cx_create_encoder_threads(VP8_COMP *cpi);
  57. extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi);
  58. #if HAVE_ARMV7
  59. extern void vp8_yv12_copy_frame_func_neon(YV12_BUFFER_CONFIG *src_ybc, YV12_BUFFER_CONFIG *dst_ybc);
  60. extern void vp8_yv12_copy_src_frame_func_neon(YV12_BUFFER_CONFIG *src_ybc, YV12_BUFFER_CONFIG *dst_ybc);
  61. #endif
  62. int vp8_estimate_entropy_savings(VP8_COMP *cpi);
  63. int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest, const vp8_variance_rtcd_vtable_t *rtcd);
  64. extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance);
  65. static void set_default_lf_deltas(VP8_COMP *cpi);
  66. extern const int vp8_gf_interval_table[101];
  67. #if CONFIG_INTERNAL_STATS
  68. #include "math.h"
  69. extern double vp8_calc_ssim
  70. (
  71. YV12_BUFFER_CONFIG *source,
  72. YV12_BUFFER_CONFIG *dest,
  73. int lumamask,
  74. double *weight,
  75. const vp8_variance_rtcd_vtable_t *rtcd
  76. );
  77. extern double vp8_calc_ssimg
  78. (
  79. YV12_BUFFER_CONFIG *source,
  80. YV12_BUFFER_CONFIG *dest,
  81. double *ssim_y,
  82. double *ssim_u,
  83. double *ssim_v,
  84. const vp8_variance_rtcd_vtable_t *rtcd
  85. );
  86. #endif
  87. #ifdef OUTPUT_YUV_SRC
  88. FILE *yuv_file;
  89. #endif
  90. #if 0
  91. FILE *framepsnr;
  92. FILE *kf_list;
  93. FILE *keyfile;
  94. #endif
  95. #if 0
  96. extern int skip_true_count;
  97. extern int skip_false_count;
  98. #endif
  99. #ifdef ENTROPY_STATS
  100. extern int intra_mode_stats[10][10][10];
  101. #endif
  102. #ifdef SPEEDSTATS
  103. unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
  104. unsigned int tot_pm = 0;
  105. unsigned int cnt_pm = 0;
  106. unsigned int tot_ef = 0;
  107. unsigned int cnt_ef = 0;
  108. #endif
  109. #ifdef MODE_STATS
  110. extern unsigned __int64 Sectionbits[50];
  111. extern int y_modes[5] ;
  112. extern int uv_modes[4] ;
  113. extern int b_modes[10] ;
  114. extern int inter_y_modes[10] ;
  115. extern int inter_uv_modes[4] ;
  116. extern unsigned int inter_b_modes[15];
  117. #endif
  118. extern void (*vp8_short_fdct4x4)(short *input, short *output, int pitch);
  119. extern void (*vp8_short_fdct8x4)(short *input, short *output, int pitch);
  120. extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
  121. extern const int qrounding_factors[129];
  122. extern const int qzbin_factors[129];
  123. extern void vp8cx_init_quantizer(VP8_COMP *cpi);
  124. extern const int vp8cx_base_skip_false_prob[128];
  125. // Tables relating active max Q to active min Q
  126. static const int kf_low_motion_minq[QINDEX_RANGE] =
  127. {
  128. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  129. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  130. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  131. 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
  132. 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6,
  133. 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
  134. 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
  135. 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23
  136. };
  137. static const int kf_high_motion_minq[QINDEX_RANGE] =
  138. {
  139. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  140. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  141. 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3,
  142. 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6,
  143. 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
  144. 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
  145. 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21,
  146. 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30
  147. };
  148. static const int gf_low_motion_minq[QINDEX_RANGE] =
  149. {
  150. 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
  151. 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,
  152. 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,
  153. 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,
  154. 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,
  155. 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,
  156. 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,
  157. 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58
  158. };
  159. static const int gf_mid_motion_minq[QINDEX_RANGE] =
  160. {
  161. 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4,
  162. 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
  163. 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14,
  164. 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,
  165. 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,
  166. 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,
  167. 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48,
  168. 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64
  169. };
  170. static const int gf_high_motion_minq[QINDEX_RANGE] =
  171. {
  172. 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4,
  173. 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
  174. 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16,
  175. 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,
  176. 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,
  177. 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,
  178. 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54,
  179. 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80
  180. };
  181. static const int inter_minq[QINDEX_RANGE] =
  182. {
  183. 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9,
  184. 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20,
  185. 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31,
  186. 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43,
  187. 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56,
  188. 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70,
  189. 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85,
  190. 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100
  191. };
  192. void vp8_initialize()
  193. {
  194. static int init_done = 0;
  195. if (!init_done)
  196. {
  197. vp8_scale_machine_specific_config();
  198. vp8_initialize_common();
  199. //vp8_dmachine_specific_config();
  200. vp8_tokenize_initialize();
  201. init_done = 1;
  202. }
  203. }
  204. #ifdef PACKET_TESTING
  205. extern FILE *vpxlogc;
  206. #endif
  207. static void setup_features(VP8_COMP *cpi)
  208. {
  209. // Set up default state for MB feature flags
  210. cpi->mb.e_mbd.segmentation_enabled = 0;
  211. cpi->mb.e_mbd.update_mb_segmentation_map = 0;
  212. cpi->mb.e_mbd.update_mb_segmentation_data = 0;
  213. vpx_memset(cpi->mb.e_mbd.mb_segment_tree_probs, 255, sizeof(cpi->mb.e_mbd.mb_segment_tree_probs));
  214. vpx_memset(cpi->mb.e_mbd.segment_feature_data, 0, sizeof(cpi->mb.e_mbd.segment_feature_data));
  215. cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
  216. cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
  217. vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  218. vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  219. vpx_memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  220. vpx_memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  221. set_default_lf_deltas(cpi);
  222. }
  223. static void dealloc_compressor_data(VP8_COMP *cpi)
  224. {
  225. vpx_free(cpi->tplist);
  226. cpi->tplist = NULL;
  227. // Delete last frame MV storage buffers
  228. vpx_free(cpi->lfmv);
  229. cpi->lfmv = 0;
  230. vpx_free(cpi->lf_ref_frame_sign_bias);
  231. cpi->lf_ref_frame_sign_bias = 0;
  232. vpx_free(cpi->lf_ref_frame);
  233. cpi->lf_ref_frame = 0;
  234. // Delete sementation map
  235. vpx_free(cpi->segmentation_map);
  236. cpi->segmentation_map = 0;
  237. vpx_free(cpi->active_map);
  238. cpi->active_map = 0;
  239. vp8_de_alloc_frame_buffers(&cpi->common);
  240. vp8_yv12_de_alloc_frame_buffer(&cpi->last_frame_uf);
  241. vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
  242. #if VP8_TEMPORAL_ALT_REF
  243. vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
  244. #endif
  245. vp8_lookahead_destroy(cpi->lookahead);
  246. vpx_free(cpi->tok);
  247. cpi->tok = 0;
  248. // Structure used to monitor GF usage
  249. vpx_free(cpi->gf_active_flags);
  250. cpi->gf_active_flags = 0;
  251. // Activity mask based per mb zbin adjustments
  252. vpx_free(cpi->mb_activity_map);
  253. cpi->mb_activity_map = 0;
  254. vpx_free(cpi->mb_norm_activity_map);
  255. cpi->mb_norm_activity_map = 0;
  256. vpx_free(cpi->mb.pip);
  257. cpi->mb.pip = 0;
  258. #if !(CONFIG_REALTIME_ONLY)
  259. vpx_free(cpi->twopass.total_stats);
  260. cpi->twopass.total_stats = 0;
  261. vpx_free(cpi->twopass.this_frame_stats);
  262. cpi->twopass.this_frame_stats = 0;
  263. #endif
  264. }
  265. static void enable_segmentation(VP8_PTR ptr)
  266. {
  267. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  268. // Set the appropriate feature bit
  269. cpi->mb.e_mbd.segmentation_enabled = 1;
  270. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  271. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  272. }
  273. static void disable_segmentation(VP8_PTR ptr)
  274. {
  275. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  276. // Clear the appropriate feature bit
  277. cpi->mb.e_mbd.segmentation_enabled = 0;
  278. }
  279. // Valid values for a segment are 0 to 3
  280. // Segmentation map is arrange as [Rows][Columns]
  281. static void set_segmentation_map(VP8_PTR ptr, unsigned char *segmentation_map)
  282. {
  283. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  284. // Copy in the new segmentation map
  285. vpx_memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols));
  286. // Signal that the map should be updated.
  287. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  288. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  289. }
  290. // The values given for each segment can be either deltas (from the default value chosen for the frame) or absolute values.
  291. //
  292. // Valid range for abs values is (0-127 for MB_LVL_ALT_Q) , (0-63 for SEGMENT_ALT_LF)
  293. // Valid range for delta values are (+/-127 for MB_LVL_ALT_Q) , (+/-63 for SEGMENT_ALT_LF)
  294. //
  295. // abs_delta = SEGMENT_DELTADATA (deltas) abs_delta = SEGMENT_ABSDATA (use the absolute values given).
  296. //
  297. //
  298. static void set_segment_data(VP8_PTR ptr, signed char *feature_data, unsigned char abs_delta)
  299. {
  300. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  301. cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
  302. vpx_memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data));
  303. }
  304. static void segmentation_test_function(VP8_PTR ptr)
  305. {
  306. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  307. unsigned char *seg_map;
  308. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  309. // Create a temporary map for segmentation data.
  310. CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
  311. // MB loop to set local segmentation map
  312. /*for ( i = 0; i < cpi->common.mb_rows; i++ )
  313. {
  314. for ( j = 0; j < cpi->common.mb_cols; j++ )
  315. {
  316. //seg_map[(i*cpi->common.mb_cols) + j] = (j % 2) + ((i%2)* 2);
  317. //if ( j < cpi->common.mb_cols/2 )
  318. // Segment 1 around the edge else 0
  319. if ( (i == 0) || (j == 0) || (i == (cpi->common.mb_rows-1)) || (j == (cpi->common.mb_cols-1)) )
  320. seg_map[(i*cpi->common.mb_cols) + j] = 1;
  321. //else if ( (i < 2) || (j < 2) || (i > (cpi->common.mb_rows-3)) || (j > (cpi->common.mb_cols-3)) )
  322. // seg_map[(i*cpi->common.mb_cols) + j] = 2;
  323. //else if ( (i < 5) || (j < 5) || (i > (cpi->common.mb_rows-6)) || (j > (cpi->common.mb_cols-6)) )
  324. // seg_map[(i*cpi->common.mb_cols) + j] = 3;
  325. else
  326. seg_map[(i*cpi->common.mb_cols) + j] = 0;
  327. }
  328. }*/
  329. // Set the segmentation Map
  330. set_segmentation_map(ptr, seg_map);
  331. // Activate segmentation.
  332. enable_segmentation(ptr);
  333. // Set up the quant segment data
  334. feature_data[MB_LVL_ALT_Q][0] = 0;
  335. feature_data[MB_LVL_ALT_Q][1] = 4;
  336. feature_data[MB_LVL_ALT_Q][2] = 0;
  337. feature_data[MB_LVL_ALT_Q][3] = 0;
  338. // Set up the loop segment data
  339. feature_data[MB_LVL_ALT_LF][0] = 0;
  340. feature_data[MB_LVL_ALT_LF][1] = 0;
  341. feature_data[MB_LVL_ALT_LF][2] = 0;
  342. feature_data[MB_LVL_ALT_LF][3] = 0;
  343. // Initialise the feature data structure
  344. // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
  345. set_segment_data(ptr, &feature_data[0][0], SEGMENT_DELTADATA);
  346. // Delete sementation map
  347. vpx_free(seg_map);
  348. seg_map = 0;
  349. }
  350. // A simple function to cyclically refresh the background at a lower Q
  351. static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment)
  352. {
  353. unsigned char *seg_map;
  354. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  355. int i;
  356. int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
  357. int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
  358. // Create a temporary map for segmentation data.
  359. CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
  360. cpi->cyclic_refresh_q = Q;
  361. for (i = Q; i > 0; i--)
  362. {
  363. if (vp8_bits_per_mb[cpi->common.frame_type][i] >= ((vp8_bits_per_mb[cpi->common.frame_type][Q]*(Q + 128)) / 64))
  364. //if ( vp8_bits_per_mb[cpi->common.frame_type][i] >= ((vp8_bits_per_mb[cpi->common.frame_type][Q]*((2*Q)+96))/64) )
  365. {
  366. break;
  367. }
  368. }
  369. cpi->cyclic_refresh_q = i;
  370. // Only update for inter frames
  371. if (cpi->common.frame_type != KEY_FRAME)
  372. {
  373. // Cycle through the macro_block rows
  374. // MB loop to set local segmentation map
  375. for (i = cpi->cyclic_refresh_mode_index; i < mbs_in_frame; i++)
  376. {
  377. // If the MB is as a candidate for clean up then mark it for possible boost/refresh (segment 1)
  378. // The segment id may get reset to 0 later if the MB gets coded anything other than last frame 0,0
  379. // as only (last frame 0,0) MBs are eligable for refresh : that is to say Mbs likely to be background blocks.
  380. if (cpi->cyclic_refresh_map[i] == 0)
  381. {
  382. seg_map[i] = 1;
  383. }
  384. else
  385. {
  386. seg_map[i] = 0;
  387. // Skip blocks that have been refreshed recently anyway.
  388. if (cpi->cyclic_refresh_map[i] < 0)
  389. //cpi->cyclic_refresh_map[i] = cpi->cyclic_refresh_map[i] / 16;
  390. cpi->cyclic_refresh_map[i]++;
  391. }
  392. if (block_count > 0)
  393. block_count--;
  394. else
  395. break;
  396. }
  397. // If we have gone through the frame reset to the start
  398. cpi->cyclic_refresh_mode_index = i;
  399. if (cpi->cyclic_refresh_mode_index >= mbs_in_frame)
  400. cpi->cyclic_refresh_mode_index = 0;
  401. }
  402. // Set the segmentation Map
  403. set_segmentation_map((VP8_PTR)cpi, seg_map);
  404. // Activate segmentation.
  405. enable_segmentation((VP8_PTR)cpi);
  406. // Set up the quant segment data
  407. feature_data[MB_LVL_ALT_Q][0] = 0;
  408. feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
  409. feature_data[MB_LVL_ALT_Q][2] = 0;
  410. feature_data[MB_LVL_ALT_Q][3] = 0;
  411. // Set up the loop segment data
  412. feature_data[MB_LVL_ALT_LF][0] = 0;
  413. feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
  414. feature_data[MB_LVL_ALT_LF][2] = 0;
  415. feature_data[MB_LVL_ALT_LF][3] = 0;
  416. // Initialise the feature data structure
  417. // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
  418. set_segment_data((VP8_PTR)cpi, &feature_data[0][0], SEGMENT_DELTADATA);
  419. // Delete sementation map
  420. vpx_free(seg_map);
  421. seg_map = 0;
  422. }
  423. static void set_default_lf_deltas(VP8_COMP *cpi)
  424. {
  425. cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
  426. cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
  427. vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  428. vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  429. // Test of ref frame deltas
  430. cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
  431. cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
  432. cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
  433. cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
  434. cpi->mb.e_mbd.mode_lf_deltas[0] = 4; // BPRED
  435. cpi->mb.e_mbd.mode_lf_deltas[1] = -2; // Zero
  436. cpi->mb.e_mbd.mode_lf_deltas[2] = 2; // New mv
  437. cpi->mb.e_mbd.mode_lf_deltas[3] = 4; // Split mv
  438. }
  439. void vp8_set_speed_features(VP8_COMP *cpi)
  440. {
  441. SPEED_FEATURES *sf = &cpi->sf;
  442. int Mode = cpi->compressor_speed;
  443. int Speed = cpi->Speed;
  444. int i;
  445. VP8_COMMON *cm = &cpi->common;
  446. int last_improved_quant = sf->improved_quant;
  447. // Initialise default mode frequency sampling variables
  448. for (i = 0; i < MAX_MODES; i ++)
  449. {
  450. cpi->mode_check_freq[i] = 0;
  451. cpi->mode_test_hit_counts[i] = 0;
  452. cpi->mode_chosen_counts[i] = 0;
  453. }
  454. cpi->mbs_tested_so_far = 0;
  455. // best quality defaults
  456. sf->RD = 1;
  457. sf->search_method = NSTEP;
  458. sf->improved_quant = 1;
  459. sf->improved_dct = 1;
  460. sf->auto_filter = 1;
  461. sf->recode_loop = 1;
  462. sf->quarter_pixel_search = 1;
  463. sf->half_pixel_search = 1;
  464. sf->iterative_sub_pixel = 1;
  465. sf->optimize_coefficients = 1;
  466. sf->use_fastquant_for_pick = 0;
  467. sf->no_skip_block4x4_search = 1;
  468. sf->first_step = 0;
  469. sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
  470. sf->improved_mv_pred = 1;
  471. // default thresholds to 0
  472. for (i = 0; i < MAX_MODES; i++)
  473. sf->thresh_mult[i] = 0;
  474. switch (Mode)
  475. {
  476. #if !(CONFIG_REALTIME_ONLY)
  477. case 0: // best quality mode
  478. sf->thresh_mult[THR_ZEROMV ] = 0;
  479. sf->thresh_mult[THR_ZEROG ] = 0;
  480. sf->thresh_mult[THR_ZEROA ] = 0;
  481. sf->thresh_mult[THR_NEARESTMV] = 0;
  482. sf->thresh_mult[THR_NEARESTG ] = 0;
  483. sf->thresh_mult[THR_NEARESTA ] = 0;
  484. sf->thresh_mult[THR_NEARMV ] = 0;
  485. sf->thresh_mult[THR_NEARG ] = 0;
  486. sf->thresh_mult[THR_NEARA ] = 0;
  487. sf->thresh_mult[THR_DC ] = 0;
  488. sf->thresh_mult[THR_V_PRED ] = 1000;
  489. sf->thresh_mult[THR_H_PRED ] = 1000;
  490. sf->thresh_mult[THR_B_PRED ] = 2000;
  491. sf->thresh_mult[THR_TM ] = 1000;
  492. sf->thresh_mult[THR_NEWMV ] = 1000;
  493. sf->thresh_mult[THR_NEWG ] = 1000;
  494. sf->thresh_mult[THR_NEWA ] = 1000;
  495. sf->thresh_mult[THR_SPLITMV ] = 2500;
  496. sf->thresh_mult[THR_SPLITG ] = 5000;
  497. sf->thresh_mult[THR_SPLITA ] = 5000;
  498. sf->first_step = 0;
  499. sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
  500. break;
  501. case 1:
  502. case 3:
  503. sf->thresh_mult[THR_NEARESTMV] = 0;
  504. sf->thresh_mult[THR_ZEROMV ] = 0;
  505. sf->thresh_mult[THR_DC ] = 0;
  506. sf->thresh_mult[THR_NEARMV ] = 0;
  507. sf->thresh_mult[THR_V_PRED ] = 1000;
  508. sf->thresh_mult[THR_H_PRED ] = 1000;
  509. sf->thresh_mult[THR_B_PRED ] = 2500;
  510. sf->thresh_mult[THR_TM ] = 1000;
  511. sf->thresh_mult[THR_NEARESTG ] = 1000;
  512. sf->thresh_mult[THR_NEARESTA ] = 1000;
  513. sf->thresh_mult[THR_ZEROG ] = 1000;
  514. sf->thresh_mult[THR_ZEROA ] = 1000;
  515. sf->thresh_mult[THR_NEARG ] = 1000;
  516. sf->thresh_mult[THR_NEARA ] = 1000;
  517. #if 1
  518. sf->thresh_mult[THR_ZEROMV ] = 0;
  519. sf->thresh_mult[THR_ZEROG ] = 0;
  520. sf->thresh_mult[THR_ZEROA ] = 0;
  521. sf->thresh_mult[THR_NEARESTMV] = 0;
  522. sf->thresh_mult[THR_NEARESTG ] = 0;
  523. sf->thresh_mult[THR_NEARESTA ] = 0;
  524. sf->thresh_mult[THR_NEARMV ] = 0;
  525. sf->thresh_mult[THR_NEARG ] = 0;
  526. sf->thresh_mult[THR_NEARA ] = 0;
  527. // sf->thresh_mult[THR_DC ] = 0;
  528. // sf->thresh_mult[THR_V_PRED ] = 1000;
  529. // sf->thresh_mult[THR_H_PRED ] = 1000;
  530. // sf->thresh_mult[THR_B_PRED ] = 2000;
  531. // sf->thresh_mult[THR_TM ] = 1000;
  532. sf->thresh_mult[THR_NEWMV ] = 1000;
  533. sf->thresh_mult[THR_NEWG ] = 1000;
  534. sf->thresh_mult[THR_NEWA ] = 1000;
  535. sf->thresh_mult[THR_SPLITMV ] = 1700;
  536. sf->thresh_mult[THR_SPLITG ] = 4500;
  537. sf->thresh_mult[THR_SPLITA ] = 4500;
  538. #else
  539. sf->thresh_mult[THR_NEWMV ] = 1500;
  540. sf->thresh_mult[THR_NEWG ] = 1500;
  541. sf->thresh_mult[THR_NEWA ] = 1500;
  542. sf->thresh_mult[THR_SPLITMV ] = 5000;
  543. sf->thresh_mult[THR_SPLITG ] = 10000;
  544. sf->thresh_mult[THR_SPLITA ] = 10000;
  545. #endif
  546. if (Speed > 0)
  547. {
  548. /* Disable coefficient optimization above speed 0 */
  549. sf->optimize_coefficients = 0;
  550. sf->use_fastquant_for_pick = 1;
  551. sf->no_skip_block4x4_search = 0;
  552. sf->first_step = 1;
  553. cpi->mode_check_freq[THR_SPLITG] = 2;
  554. cpi->mode_check_freq[THR_SPLITA] = 2;
  555. cpi->mode_check_freq[THR_SPLITMV] = 0;
  556. }
  557. if (Speed > 1)
  558. {
  559. cpi->mode_check_freq[THR_SPLITG] = 4;
  560. cpi->mode_check_freq[THR_SPLITA] = 4;
  561. cpi->mode_check_freq[THR_SPLITMV] = 2;
  562. sf->thresh_mult[THR_TM ] = 1500;
  563. sf->thresh_mult[THR_V_PRED ] = 1500;
  564. sf->thresh_mult[THR_H_PRED ] = 1500;
  565. sf->thresh_mult[THR_B_PRED ] = 5000;
  566. if (cpi->ref_frame_flags & VP8_LAST_FLAG)
  567. {
  568. sf->thresh_mult[THR_NEWMV ] = 2000;
  569. sf->thresh_mult[THR_SPLITMV ] = 10000;
  570. }
  571. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  572. {
  573. sf->thresh_mult[THR_NEARESTG ] = 1500;
  574. sf->thresh_mult[THR_ZEROG ] = 1500;
  575. sf->thresh_mult[THR_NEARG ] = 1500;
  576. sf->thresh_mult[THR_NEWG ] = 2000;
  577. sf->thresh_mult[THR_SPLITG ] = 20000;
  578. }
  579. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  580. {
  581. sf->thresh_mult[THR_NEARESTA ] = 1500;
  582. sf->thresh_mult[THR_ZEROA ] = 1500;
  583. sf->thresh_mult[THR_NEARA ] = 1500;
  584. sf->thresh_mult[THR_NEWA ] = 2000;
  585. sf->thresh_mult[THR_SPLITA ] = 20000;
  586. }
  587. }
  588. if (Speed > 2)
  589. {
  590. cpi->mode_check_freq[THR_SPLITG] = 15;
  591. cpi->mode_check_freq[THR_SPLITA] = 15;
  592. cpi->mode_check_freq[THR_SPLITMV] = 7;
  593. sf->thresh_mult[THR_TM ] = 2000;
  594. sf->thresh_mult[THR_V_PRED ] = 2000;
  595. sf->thresh_mult[THR_H_PRED ] = 2000;
  596. sf->thresh_mult[THR_B_PRED ] = 7500;
  597. if (cpi->ref_frame_flags & VP8_LAST_FLAG)
  598. {
  599. sf->thresh_mult[THR_NEWMV ] = 2000;
  600. sf->thresh_mult[THR_SPLITMV ] = 25000;
  601. }
  602. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  603. {
  604. sf->thresh_mult[THR_NEARESTG ] = 2000;
  605. sf->thresh_mult[THR_ZEROG ] = 2000;
  606. sf->thresh_mult[THR_NEARG ] = 2000;
  607. sf->thresh_mult[THR_NEWG ] = 2500;
  608. sf->thresh_mult[THR_SPLITG ] = 50000;
  609. }
  610. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  611. {
  612. sf->thresh_mult[THR_NEARESTA ] = 2000;
  613. sf->thresh_mult[THR_ZEROA ] = 2000;
  614. sf->thresh_mult[THR_NEARA ] = 2000;
  615. sf->thresh_mult[THR_NEWA ] = 2500;
  616. sf->thresh_mult[THR_SPLITA ] = 50000;
  617. }
  618. sf->improved_quant = 0;
  619. sf->improved_dct = 0;
  620. // Only do recode loop on key frames, golden frames and
  621. // alt ref frames
  622. sf->recode_loop = 2;
  623. }
  624. if (Speed > 3)
  625. {
  626. sf->thresh_mult[THR_SPLITA ] = INT_MAX;
  627. sf->thresh_mult[THR_SPLITG ] = INT_MAX;
  628. sf->thresh_mult[THR_SPLITMV ] = INT_MAX;
  629. cpi->mode_check_freq[THR_V_PRED] = 0;
  630. cpi->mode_check_freq[THR_H_PRED] = 0;
  631. cpi->mode_check_freq[THR_B_PRED] = 0;
  632. cpi->mode_check_freq[THR_NEARG] = 0;
  633. cpi->mode_check_freq[THR_NEWG] = 0;
  634. cpi->mode_check_freq[THR_NEARA] = 0;
  635. cpi->mode_check_freq[THR_NEWA] = 0;
  636. sf->auto_filter = 1;
  637. sf->recode_loop = 0; // recode loop off
  638. sf->RD = 0; // Turn rd off
  639. }
  640. if (Speed > 4)
  641. {
  642. sf->auto_filter = 0; // Faster selection of loop filter
  643. cpi->mode_check_freq[THR_V_PRED] = 2;
  644. cpi->mode_check_freq[THR_H_PRED] = 2;
  645. cpi->mode_check_freq[THR_B_PRED] = 2;
  646. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  647. {
  648. cpi->mode_check_freq[THR_NEARG] = 2;
  649. cpi->mode_check_freq[THR_NEWG] = 4;
  650. }
  651. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  652. {
  653. cpi->mode_check_freq[THR_NEARA] = 2;
  654. cpi->mode_check_freq[THR_NEWA] = 4;
  655. }
  656. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  657. {
  658. sf->thresh_mult[THR_NEARESTG ] = 2000;
  659. sf->thresh_mult[THR_ZEROG ] = 2000;
  660. sf->thresh_mult[THR_NEARG ] = 2000;
  661. sf->thresh_mult[THR_NEWG ] = 4000;
  662. }
  663. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  664. {
  665. sf->thresh_mult[THR_NEARESTA ] = 2000;
  666. sf->thresh_mult[THR_ZEROA ] = 2000;
  667. sf->thresh_mult[THR_NEARA ] = 2000;
  668. sf->thresh_mult[THR_NEWA ] = 4000;
  669. }
  670. }
  671. break;
  672. #endif
  673. case 2:
  674. sf->optimize_coefficients = 0;
  675. sf->recode_loop = 0;
  676. sf->auto_filter = 1;
  677. sf->iterative_sub_pixel = 1;
  678. sf->thresh_mult[THR_NEARESTMV] = 0;
  679. sf->thresh_mult[THR_ZEROMV ] = 0;
  680. sf->thresh_mult[THR_DC ] = 0;
  681. sf->thresh_mult[THR_TM ] = 0;
  682. sf->thresh_mult[THR_NEARMV ] = 0;
  683. sf->thresh_mult[THR_V_PRED ] = 1000;
  684. sf->thresh_mult[THR_H_PRED ] = 1000;
  685. sf->thresh_mult[THR_B_PRED ] = 2500;
  686. sf->thresh_mult[THR_NEARESTG ] = 1000;
  687. sf->thresh_mult[THR_ZEROG ] = 1000;
  688. sf->thresh_mult[THR_NEARG ] = 1000;
  689. sf->thresh_mult[THR_NEARESTA ] = 1000;
  690. sf->thresh_mult[THR_ZEROA ] = 1000;
  691. sf->thresh_mult[THR_NEARA ] = 1000;
  692. sf->thresh_mult[THR_NEWMV ] = 2000;
  693. sf->thresh_mult[THR_NEWG ] = 2000;
  694. sf->thresh_mult[THR_NEWA ] = 2000;
  695. sf->thresh_mult[THR_SPLITMV ] = 5000;
  696. sf->thresh_mult[THR_SPLITG ] = 10000;
  697. sf->thresh_mult[THR_SPLITA ] = 10000;
  698. sf->search_method = NSTEP;
  699. if (Speed > 0)
  700. {
  701. cpi->mode_check_freq[THR_SPLITG] = 4;
  702. cpi->mode_check_freq[THR_SPLITA] = 4;
  703. cpi->mode_check_freq[THR_SPLITMV] = 2;
  704. sf->thresh_mult[THR_DC ] = 0;
  705. sf->thresh_mult[THR_TM ] = 1000;
  706. sf->thresh_mult[THR_V_PRED ] = 2000;
  707. sf->thresh_mult[THR_H_PRED ] = 2000;
  708. sf->thresh_mult[THR_B_PRED ] = 5000;
  709. if (cpi->ref_frame_flags & VP8_LAST_FLAG)
  710. {
  711. sf->thresh_mult[THR_NEARESTMV] = 0;
  712. sf->thresh_mult[THR_ZEROMV ] = 0;
  713. sf->thresh_mult[THR_NEARMV ] = 0;
  714. sf->thresh_mult[THR_NEWMV ] = 2000;
  715. sf->thresh_mult[THR_SPLITMV ] = 10000;
  716. }
  717. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  718. {
  719. sf->thresh_mult[THR_NEARESTG ] = 1000;
  720. sf->thresh_mult[THR_ZEROG ] = 1000;
  721. sf->thresh_mult[THR_NEARG ] = 1000;
  722. sf->thresh_mult[THR_NEWG ] = 2000;
  723. sf->thresh_mult[THR_SPLITG ] = 20000;
  724. }
  725. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  726. {
  727. sf->thresh_mult[THR_NEARESTA ] = 1000;
  728. sf->thresh_mult[THR_ZEROA ] = 1000;
  729. sf->thresh_mult[THR_NEARA ] = 1000;
  730. sf->thresh_mult[THR_NEWA ] = 2000;
  731. sf->thresh_mult[THR_SPLITA ] = 20000;
  732. }
  733. sf->improved_quant = 0;
  734. sf->improved_dct = 0;
  735. sf->use_fastquant_for_pick = 1;
  736. sf->no_skip_block4x4_search = 0;
  737. sf->first_step = 1;
  738. }
  739. if (Speed > 1)
  740. {
  741. cpi->mode_check_freq[THR_SPLITMV] = 7;
  742. cpi->mode_check_freq[THR_SPLITG] = 15;
  743. cpi->mode_check_freq[THR_SPLITA] = 15;
  744. sf->thresh_mult[THR_TM ] = 2000;
  745. sf->thresh_mult[THR_V_PRED ] = 2000;
  746. sf->thresh_mult[THR_H_PRED ] = 2000;
  747. sf->thresh_mult[THR_B_PRED ] = 5000;
  748. if (cpi->ref_frame_flags & VP8_LAST_FLAG)
  749. {
  750. sf->thresh_mult[THR_NEWMV ] = 2000;
  751. sf->thresh_mult[THR_SPLITMV ] = 25000;
  752. }
  753. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  754. {
  755. sf->thresh_mult[THR_NEARESTG ] = 2000;
  756. sf->thresh_mult[THR_ZEROG ] = 2000;
  757. sf->thresh_mult[THR_NEARG ] = 2000;
  758. sf->thresh_mult[THR_NEWG ] = 2500;
  759. sf->thresh_mult[THR_SPLITG ] = 50000;
  760. }
  761. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  762. {
  763. sf->thresh_mult[THR_NEARESTA ] = 2000;
  764. sf->thresh_mult[THR_ZEROA ] = 2000;
  765. sf->thresh_mult[THR_NEARA ] = 2000;
  766. sf->thresh_mult[THR_NEWA ] = 2500;
  767. sf->thresh_mult[THR_SPLITA ] = 50000;
  768. }
  769. }
  770. if (Speed > 2)
  771. {
  772. sf->auto_filter = 0; // Faster selection of loop filter
  773. cpi->mode_check_freq[THR_V_PRED] = 2;
  774. cpi->mode_check_freq[THR_H_PRED] = 2;
  775. cpi->mode_check_freq[THR_B_PRED] = 2;
  776. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  777. {
  778. cpi->mode_check_freq[THR_NEARG] = 2;
  779. cpi->mode_check_freq[THR_NEWG] = 4;
  780. }
  781. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  782. {
  783. cpi->mode_check_freq[THR_NEARA] = 2;
  784. cpi->mode_check_freq[THR_NEWA] = 4;
  785. }
  786. sf->thresh_mult[THR_SPLITMV ] = INT_MAX;
  787. sf->thresh_mult[THR_SPLITG ] = INT_MAX;
  788. sf->thresh_mult[THR_SPLITA ] = INT_MAX;
  789. }
  790. if (Speed > 3)
  791. {
  792. sf->RD = 0;
  793. sf->auto_filter = 1;
  794. }
  795. if (Speed > 4)
  796. {
  797. sf->auto_filter = 0; // Faster selection of loop filter
  798. sf->search_method = HEX;
  799. //sf->search_method = DIAMOND;
  800. sf->iterative_sub_pixel = 0;
  801. cpi->mode_check_freq[THR_V_PRED] = 4;
  802. cpi->mode_check_freq[THR_H_PRED] = 4;
  803. cpi->mode_check_freq[THR_B_PRED] = 4;
  804. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  805. {
  806. cpi->mode_check_freq[THR_NEARG] = 2;
  807. cpi->mode_check_freq[THR_NEWG] = 4;
  808. }
  809. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  810. {
  811. cpi->mode_check_freq[THR_NEARA] = 2;
  812. cpi->mode_check_freq[THR_NEWA] = 4;
  813. }
  814. sf->thresh_mult[THR_TM ] = 2000;
  815. sf->thresh_mult[THR_B_PRED ] = 5000;
  816. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  817. {
  818. sf->thresh_mult[THR_NEARESTG ] = 2000;
  819. sf->thresh_mult[THR_ZEROG ] = 2000;
  820. sf->thresh_mult[THR_NEARG ] = 2000;
  821. sf->thresh_mult[THR_NEWG ] = 4000;
  822. }
  823. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  824. {
  825. sf->thresh_mult[THR_NEARESTA ] = 2000;
  826. sf->thresh_mult[THR_ZEROA ] = 2000;
  827. sf->thresh_mult[THR_NEARA ] = 2000;
  828. sf->thresh_mult[THR_NEWA ] = 4000;
  829. }
  830. }
  831. if (Speed > 5)
  832. {
  833. // Disable split MB intra prediction mode
  834. sf->thresh_mult[THR_B_PRED] = INT_MAX;
  835. }
  836. if (Speed > 6)
  837. {
  838. unsigned int i, sum = 0;
  839. unsigned int total_mbs = cm->MBs;
  840. int thresh;
  841. int total_skip;
  842. int min = 2000;
  843. if (cpi->oxcf.encode_breakout > 2000)
  844. min = cpi->oxcf.encode_breakout;
  845. min >>= 7;
  846. for (i = 0; i < min; i++)
  847. {
  848. sum += cpi->error_bins[i];
  849. }
  850. total_skip = sum;
  851. sum = 0;
  852. // i starts from 2 to make sure thresh started from 2048
  853. for (; i < 1024; i++)
  854. {
  855. sum += cpi->error_bins[i];
  856. if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip))
  857. break;
  858. }
  859. i--;
  860. thresh = (i << 7);
  861. if (thresh < 2000)
  862. thresh = 2000;
  863. if (cpi->ref_frame_flags & VP8_LAST_FLAG)
  864. {
  865. sf->thresh_mult[THR_NEWMV] = thresh;
  866. sf->thresh_mult[THR_NEARESTMV ] = thresh >> 1;
  867. sf->thresh_mult[THR_NEARMV ] = thresh >> 1;
  868. }
  869. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  870. {
  871. sf->thresh_mult[THR_NEWG] = thresh << 1;
  872. sf->thresh_mult[THR_NEARESTG ] = thresh;
  873. sf->thresh_mult[THR_NEARG ] = thresh;
  874. }
  875. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  876. {
  877. sf->thresh_mult[THR_NEWA] = thresh << 1;
  878. sf->thresh_mult[THR_NEARESTA ] = thresh;
  879. sf->thresh_mult[THR_NEARA ] = thresh;
  880. }
  881. // Disable other intra prediction modes
  882. sf->thresh_mult[THR_TM] = INT_MAX;
  883. sf->thresh_mult[THR_V_PRED] = INT_MAX;
  884. sf->thresh_mult[THR_H_PRED] = INT_MAX;
  885. sf->improved_mv_pred = 0;
  886. }
  887. if (Speed > 8)
  888. {
  889. sf->quarter_pixel_search = 0;
  890. }
  891. if (Speed > 9)
  892. {
  893. int Tmp = cpi->Speed - 8;
  894. if (Tmp > 4)
  895. Tmp = 4;
  896. if (cpi->ref_frame_flags & VP8_GOLD_FLAG)
  897. {
  898. cpi->mode_check_freq[THR_ZEROG] = 1 << (Tmp - 1);
  899. cpi->mode_check_freq[THR_NEARESTG] = 1 << (Tmp - 1);
  900. cpi->mode_check_freq[THR_NEARG] = 1 << Tmp;
  901. cpi->mode_check_freq[THR_NEWG] = 1 << (Tmp + 1);
  902. }
  903. if (cpi->ref_frame_flags & VP8_ALT_FLAG)
  904. {
  905. cpi->mode_check_freq[THR_ZEROA] = 1 << (Tmp - 1);
  906. cpi->mode_check_freq[THR_NEARESTA] = 1 << (Tmp - 1);
  907. cpi->mode_check_freq[THR_NEARA] = 1 << Tmp;
  908. cpi->mode_check_freq[THR_NEWA] = 1 << (Tmp + 1);
  909. }
  910. cpi->mode_check_freq[THR_NEWMV] = 1 << (Tmp - 1);
  911. }
  912. cm->filter_type = NORMAL_LOOPFILTER;
  913. if (Speed >= 14)
  914. cm->filter_type = SIMPLE_LOOPFILTER;
  915. if (Speed >= 15)
  916. {
  917. sf->half_pixel_search = 0; // This has a big hit on quality. Last resort
  918. }
  919. vpx_memset(cpi->error_bins, 0, sizeof(cpi->error_bins));
  920. }; /* switch */
  921. /* disable frame modes if flags not set */
  922. if (!(cpi->ref_frame_flags & VP8_LAST_FLAG))
  923. {
  924. sf->thresh_mult[THR_NEWMV ] = INT_MAX;
  925. sf->thresh_mult[THR_NEARESTMV] = INT_MAX;
  926. sf->thresh_mult[THR_ZEROMV ] = INT_MAX;
  927. sf->thresh_mult[THR_NEARMV ] = INT_MAX;
  928. sf->thresh_mult[THR_SPLITMV ] = INT_MAX;
  929. }
  930. if (!(cpi->ref_frame_flags & VP8_GOLD_FLAG))
  931. {
  932. sf->thresh_mult[THR_NEARESTG ] = INT_MAX;
  933. sf->thresh_mult[THR_ZEROG ] = INT_MAX;
  934. sf->thresh_mult[THR_NEARG ] = INT_MAX;
  935. sf->thresh_mult[THR_NEWG ] = INT_MAX;
  936. sf->thresh_mult[THR_SPLITG ] = INT_MAX;
  937. }
  938. if (!(cpi->ref_frame_flags & VP8_ALT_FLAG))
  939. {
  940. sf->thresh_mult[THR_NEARESTA ] = INT_MAX;
  941. sf->thresh_mult[THR_ZEROA ] = INT_MAX;
  942. sf->thresh_mult[THR_NEARA ] = INT_MAX;
  943. sf->thresh_mult[THR_NEWA ] = INT_MAX;
  944. sf->thresh_mult[THR_SPLITA ] = INT_MAX;
  945. }
  946. // Slow quant, dct and trellis not worthwhile for first pass
  947. // so make sure they are always turned off.
  948. if ( cpi->pass == 1 )
  949. {
  950. sf->improved_quant = 0;
  951. sf->optimize_coefficients = 0;
  952. sf->improved_dct = 0;
  953. }
  954. if (cpi->sf.search_method == NSTEP)
  955. {
  956. vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
  957. }
  958. else if (cpi->sf.search_method == DIAMOND)
  959. {
  960. vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
  961. }
  962. if (cpi->sf.improved_dct)
  963. {
  964. cpi->mb.vp8_short_fdct8x4 = FDCT_INVOKE(&cpi->rtcd.fdct, short8x4);
  965. cpi->mb.vp8_short_fdct4x4 = FDCT_INVOKE(&cpi->rtcd.fdct, short4x4);
  966. }
  967. else
  968. {
  969. cpi->mb.vp8_short_fdct8x4 = FDCT_INVOKE(&cpi->rtcd.fdct, fast8x4);
  970. cpi->mb.vp8_short_fdct4x4 = FDCT_INVOKE(&cpi->rtcd.fdct, fast4x4);
  971. }
  972. cpi->mb.short_walsh4x4 = FDCT_INVOKE(&cpi->rtcd.fdct, walsh_short4x4);
  973. if (cpi->sf.improved_quant)
  974. {
  975. cpi->mb.quantize_b = QUANTIZE_INVOKE(&cpi->rtcd.quantize,
  976. quantb);
  977. cpi->mb.quantize_b_pair = QUANTIZE_INVOKE(&cpi->rtcd.quantize,
  978. quantb_pair);
  979. }
  980. else
  981. {
  982. cpi->mb.quantize_b = QUANTIZE_INVOKE(&cpi->rtcd.quantize,
  983. fastquantb);
  984. cpi->mb.quantize_b_pair = QUANTIZE_INVOKE(&cpi->rtcd.quantize,
  985. fastquantb_pair);
  986. }
  987. if (cpi->sf.improved_quant != last_improved_quant)
  988. vp8cx_init_quantizer(cpi);
  989. #if CONFIG_RUNTIME_CPU_DETECT
  990. cpi->mb.e_mbd.rtcd = &cpi->common.rtcd;
  991. #endif
  992. if (cpi->sf.iterative_sub_pixel == 1)
  993. {
  994. cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
  995. }
  996. else if (cpi->sf.quarter_pixel_search)
  997. {
  998. cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
  999. }
  1000. else if (cpi->sf.half_pixel_search)
  1001. {
  1002. cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
  1003. }
  1004. else
  1005. {
  1006. cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
  1007. }
  1008. if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1)
  1009. cpi->mb.optimize = 1;
  1010. else
  1011. cpi->mb.optimize = 0;
  1012. if (cpi->common.full_pixel)
  1013. cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
  1014. #ifdef SPEEDSTATS
  1015. frames_at_speed[cpi->Speed]++;
  1016. #endif
  1017. }
  1018. static void alloc_raw_frame_buffers(VP8_COMP *cpi)
  1019. {
  1020. int width = (cpi->oxcf.Width + 15) & ~15;
  1021. int height = (cpi->oxcf.Height + 15) & ~15;
  1022. cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
  1023. cpi->oxcf.lag_in_frames);
  1024. if(!cpi->lookahead)
  1025. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1026. "Failed to allocate lag buffers");
  1027. #if VP8_TEMPORAL_ALT_REF
  1028. if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer,
  1029. width, height, VP8BORDERINPIXELS))
  1030. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1031. "Failed to allocate altref buffer");
  1032. #endif
  1033. }
  1034. static int vp8_alloc_partition_data(VP8_COMP *cpi)
  1035. {
  1036. vpx_free(cpi->mb.pip);
  1037. cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) *
  1038. (cpi->common.mb_rows + 1),
  1039. sizeof(PARTITION_INFO));
  1040. if(!cpi->mb.pip)
  1041. return 1;
  1042. cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
  1043. return 0;
  1044. }
  1045. void vp8_alloc_compressor_data(VP8_COMP *cpi)
  1046. {
  1047. VP8_COMMON *cm = & cpi->common;
  1048. int width = cm->Width;
  1049. int height = cm->Height;
  1050. if (vp8_alloc_frame_buffers(cm, width, height))
  1051. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1052. "Failed to allocate frame buffers");
  1053. if (vp8_alloc_partition_data(cpi))
  1054. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1055. "Failed to allocate partition data");
  1056. if ((width & 0xf) != 0)
  1057. width += 16 - (width & 0xf);
  1058. if ((height & 0xf) != 0)
  1059. height += 16 - (height & 0xf);
  1060. if (vp8_yv12_alloc_frame_buffer(&cpi->last_frame_uf,
  1061. width, height, VP8BORDERINPIXELS))
  1062. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1063. "Failed to allocate last frame buffer");
  1064. if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source,
  1065. width, height, VP8BORDERINPIXELS))
  1066. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1067. "Failed to allocate scaled source buffer");
  1068. vpx_free(cpi->tok);
  1069. {
  1070. unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
  1071. CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
  1072. }
  1073. // Data used for real time vc mode to see if gf needs refreshing
  1074. cpi->inter_zz_count = 0;
  1075. cpi->gf_bad_count = 0;
  1076. cpi->gf_update_recommended = 0;
  1077. // Structures used to minitor GF usage
  1078. vpx_free(cpi->gf_active_flags);
  1079. CHECK_MEM_ERROR(cpi->gf_active_flags,
  1080. vpx_calloc(1, cm->mb_rows * cm->mb_cols));
  1081. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  1082. vpx_free(cpi->mb_activity_map);
  1083. CHECK_MEM_ERROR(cpi->mb_activity_map,
  1084. vpx_calloc(sizeof(unsigned int),
  1085. cm->mb_rows * cm->mb_cols));
  1086. vpx_free(cpi->mb_norm_activity_map);
  1087. CHECK_MEM_ERROR(cpi->mb_norm_activity_map,
  1088. vpx_calloc(sizeof(unsigned int),
  1089. cm->mb_rows * cm->mb_cols));
  1090. #if !(CONFIG_REALTIME_ONLY)
  1091. vpx_free(cpi->twopass.total_stats);
  1092. cpi->twopass.total_stats = vpx_calloc(1, sizeof(FIRSTPASS_STATS));
  1093. vpx_free(cpi->twopass.this_frame_stats);
  1094. cpi->twopass.this_frame_stats = vpx_calloc(1, sizeof(FIRSTPASS_STATS));
  1095. if(!cpi->twopass.total_stats || !cpi->twopass.this_frame_stats)
  1096. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1097. "Failed to allocate firstpass stats");
  1098. #endif
  1099. #if CONFIG_MULTITHREAD
  1100. if (width < 640)
  1101. cpi->mt_sync_range = 1;
  1102. else if (width <= 1280)
  1103. cpi->mt_sync_range = 4;
  1104. else if (width <= 2560)
  1105. cpi->mt_sync_range = 8;
  1106. else
  1107. cpi->mt_sync_range = 16;
  1108. #endif
  1109. vpx_free(cpi->tplist);
  1110. CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cpi->common.mb_rows));
  1111. }
  1112. // Quant MOD
  1113. static const int q_trans[] =
  1114. {
  1115. 0, 1, 2, 3, 4, 5, 7, 8,
  1116. 9, 10, 12, 13, 15, 17, 18, 19,
  1117. 20, 21, 23, 24, 25, 26, 27, 28,
  1118. 29, 30, 31, 33, 35, 37, 39, 41,
  1119. 43, 45, 47, 49, 51, 53, 55, 57,
  1120. 59, 61, 64, 67, 70, 73, 76, 79,
  1121. 82, 85, 88, 91, 94, 97, 100, 103,
  1122. 106, 109, 112, 115, 118, 121, 124, 127,
  1123. };
  1124. int vp8_reverse_trans(int x)
  1125. {
  1126. int i;
  1127. for (i = 0; i < 64; i++)
  1128. if (q_trans[i] >= x)
  1129. return i;
  1130. return 63;
  1131. };
  1132. void vp8_new_frame_rate(VP8_COMP *cpi, double framerate)
  1133. {
  1134. if(framerate < .1)
  1135. framerate = 30;
  1136. cpi->oxcf.frame_rate = framerate;
  1137. cpi->output_frame_rate = cpi->oxcf.frame_rate;
  1138. cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth / cpi->output_frame_rate);
  1139. cpi->av_per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth / cpi->output_frame_rate);
  1140. cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth * cpi->oxcf.two_pass_vbrmin_section / 100);
  1141. // Set Maximum gf/arf interval
  1142. cpi->max_gf_interval = ((int)(cpi->output_frame_rate / 2.0) + 2);
  1143. if(cpi->max_gf_interval < 12)
  1144. cpi->max_gf_interval = 12;
  1145. // Extended interval for genuinely static scenes
  1146. cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
  1147. // Special conditions when altr ref frame enabled in lagged compress mode
  1148. if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames)
  1149. {
  1150. if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1)
  1151. cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
  1152. if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1)
  1153. cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
  1154. }
  1155. if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval )
  1156. cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
  1157. }
  1158. static int
  1159. rescale(int val, int num, int denom)
  1160. {
  1161. int64_t llnum = num;
  1162. int64_t llden = denom;
  1163. int64_t llval = val;
  1164. return llval * llnum / llden;
  1165. }
  1166. static void init_config(VP8_PTR ptr, VP8_CONFIG *oxcf)
  1167. {
  1168. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  1169. VP8_COMMON *cm = &cpi->common;
  1170. cpi->oxcf = *oxcf;
  1171. cpi->auto_gold = 1;
  1172. cpi->auto_adjust_gold_quantizer = 1;
  1173. cpi->goldfreq = 7;
  1174. cm->version = oxcf->Version;
  1175. vp8_setup_version(cm);
  1176. // change includes all joint functionality
  1177. vp8_change_config(ptr, oxcf);
  1178. // Initialize active best and worst q and average q values.
  1179. cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
  1180. cpi->active_best_quality = cpi->oxcf.best_allowed_q;
  1181. cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
  1182. // Initialise the starting buffer levels
  1183. cpi->buffer_level = cpi->oxcf.starting_buffer_level;
  1184. cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
  1185. cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
  1186. cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
  1187. cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
  1188. cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
  1189. cpi->total_actual_bits = 0;
  1190. cpi->total_target_vs_actual = 0;
  1191. #if VP8_TEMPORAL_ALT_REF
  1192. {
  1193. int i;
  1194. cpi->fixed_divide[0] = 0;
  1195. for (i = 1; i < 512; i++)
  1196. cpi->fixed_divide[i] = 0x80000 / i;
  1197. }
  1198. #endif
  1199. }
  1200. void vp8_change_config(VP8_PTR ptr, VP8_CONFIG *oxcf)
  1201. {
  1202. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  1203. VP8_COMMON *cm = &cpi->common;
  1204. if (!cpi)
  1205. return;
  1206. if (!oxcf)
  1207. return;
  1208. if (cm->version != oxcf->Version)
  1209. {
  1210. cm->version = oxcf->Version;
  1211. vp8_setup_version(cm);
  1212. }
  1213. cpi->oxcf = *oxcf;
  1214. switch (cpi->oxcf.Mode)
  1215. {
  1216. case MODE_REALTIME:
  1217. cpi->pass = 0;
  1218. cpi->compressor_speed = 2;
  1219. if (cpi->oxcf.cpu_used < -16)
  1220. {
  1221. cpi->oxcf.cpu_used = -16;
  1222. }
  1223. if (cpi->oxcf.cpu_used > 16)
  1224. cpi->oxcf.cpu_used = 16;
  1225. break;
  1226. case MODE_GOODQUALITY:
  1227. cpi->pass = 0;
  1228. cpi->compressor_speed = 1;
  1229. if (cpi->oxcf.cpu_used < -5)
  1230. {
  1231. cpi->oxcf.cpu_used = -5;
  1232. }
  1233. if (cpi->oxcf.cpu_used > 5)
  1234. cpi->oxcf.cpu_used = 5;
  1235. break;
  1236. case MODE_BESTQUALITY:
  1237. cpi->pass = 0;
  1238. cpi->compressor_speed = 0;
  1239. break;
  1240. case MODE_FIRSTPASS:
  1241. cpi->pass = 1;
  1242. cpi->compressor_speed = 1;
  1243. break;
  1244. case MODE_SECONDPASS:
  1245. cpi->pass = 2;
  1246. cpi->compressor_speed = 1;
  1247. if (cpi->oxcf.cpu_used < -5)
  1248. {
  1249. cpi->oxcf.cpu_used = -5;
  1250. }
  1251. if (cpi->oxcf.cpu_used > 5)
  1252. cpi->oxcf.cpu_used = 5;
  1253. break;
  1254. case MODE_SECONDPASS_BEST:
  1255. cpi->pass = 2;
  1256. cpi->compressor_speed = 0;
  1257. break;
  1258. }
  1259. if (cpi->pass == 0)
  1260. cpi->auto_worst_q = 1;
  1261. cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
  1262. cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
  1263. cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
  1264. if (oxcf->fixed_q >= 0)
  1265. {
  1266. if (oxcf->worst_allowed_q < 0)
  1267. cpi->oxcf.fixed_q = q_trans[0];
  1268. else
  1269. cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
  1270. if (oxcf->alt_q < 0)
  1271. cpi->oxcf.alt_q = q_trans[0];
  1272. else
  1273. cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
  1274. if (oxcf->key_q < 0)
  1275. cpi->oxcf.key_q = q_trans[0];
  1276. else
  1277. cpi->oxcf.key_q = q_trans[oxcf->key_q];
  1278. if (oxcf->gold_q < 0)
  1279. cpi->oxcf.gold_q = q_trans[0];
  1280. else
  1281. cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
  1282. }
  1283. cpi->baseline_gf_interval =
  1284. cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
  1285. cpi->ref_frame_flags = VP8_ALT_FLAG | VP8_GOLD_FLAG | VP8_LAST_FLAG;
  1286. //cpi->use_golden_frame_only = 0;
  1287. //cpi->use_last_frame_only = 0;
  1288. cm->refresh_golden_frame = 0;
  1289. cm->refresh_last_frame = 1;
  1290. cm->refresh_entropy_probs = 1;
  1291. if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3)
  1292. cm->multi_token_partition =
  1293. (TOKEN_PARTITION) cpi->oxcf.token_partitions;
  1294. setup_features(cpi);
  1295. {
  1296. int i;
  1297. for (i = 0; i < MAX_MB_SEGMENTS; i++)
  1298. cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
  1299. }
  1300. // At the moment the first order values may not be > MAXQ
  1301. if (cpi->oxcf.fixed_q > MAXQ)
  1302. cpi->oxcf.fixed_q = MAXQ;
  1303. // local file playback mode == really big buffer
  1304. if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK)
  1305. {
  1306. cpi->oxcf.starting_buffer_level = 60000;
  1307. cpi->oxcf.optimal_buffer_level = 60000;
  1308. cpi->oxcf.maximum_buffer_size = 240000;
  1309. }
  1310. // Convert target bandwidth from Kbit/s to Bit/s
  1311. cpi->oxcf.target_bandwidth *= 1000;
  1312. cpi->oxcf.starting_buffer_level =
  1313. rescale(cpi->oxcf.starting_buffer_level,
  1314. cpi->oxcf.target_bandwidth, 1000);
  1315. // Set or reset optimal and maximum buffer levels.
  1316. if (cpi->oxcf.optimal_buffer_level == 0)
  1317. cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
  1318. else
  1319. cpi->oxcf.optimal_buffer_level =
  1320. rescale(cpi->oxcf.optimal_buffer_level,
  1321. cpi->oxcf.target_bandwidth, 1000);
  1322. if (cpi->oxcf.maximum_buffer_size == 0)
  1323. cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
  1324. else
  1325. cpi->oxcf.maximum_buffer_size =
  1326. rescale(cpi->oxcf.maximum_buffer_size,
  1327. cpi->oxcf.target_bandwidth, 1000);
  1328. // Set up frame rate and related parameters rate control values.
  1329. vp8_new_frame_rate(cpi, cpi->oxcf.frame_rate);
  1330. // Set absolute upper and lower quality limits
  1331. cpi->worst_quality = cpi->oxcf.worst_allowed_q;
  1332. cpi->best_quality = cpi->oxcf.best_allowed_q;
  1333. // active values should only be modified if out of new range
  1334. if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q)
  1335. {
  1336. cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
  1337. }
  1338. // less likely
  1339. else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q)
  1340. {
  1341. cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
  1342. }
  1343. if (cpi->active_best_quality < cpi->oxcf.best_allowed_q)
  1344. {
  1345. cpi->active_best_quality = cpi->oxcf.best_allowed_q;
  1346. }
  1347. // less likely
  1348. else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q)
  1349. {
  1350. cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
  1351. }
  1352. cpi->buffered_mode = (cpi->oxcf.optimal_buffer_level > 0) ? TRUE : FALSE;
  1353. cpi->cq_target_quality = cpi->oxcf.cq_level;
  1354. // Only allow dropped frames in buffered mode
  1355. cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
  1356. if (!cm->use_bilinear_mc_filter)
  1357. cm->mcomp_filter_type = SIXTAP;
  1358. else
  1359. cm->mcomp_filter_type = BILINEAR;
  1360. cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
  1361. cm->Width = cpi->oxcf.Width ;
  1362. cm->Height = cpi->oxcf.Height ;
  1363. cm->horiz_scale = cpi->horiz_scale;
  1364. cm->vert_scale = cpi->vert_scale ;
  1365. // VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs)
  1366. if (cpi->oxcf.Sharpness > 7)
  1367. cpi->oxcf.Sharpness = 7;
  1368. cm->sharpness_level = cpi->oxcf.Sharpness;
  1369. if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL)
  1370. {
  1371. int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
  1372. int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
  1373. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  1374. Scale2Ratio(cm->vert_scale, &vr, &vs);
  1375. // always go to the next whole number
  1376. cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
  1377. cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
  1378. }
  1379. if (((cm->Width + 15) & 0xfffffff0) !=
  1380. cm->yv12_fb[cm->lst_fb_idx].y_width ||
  1381. ((cm->Height + 15) & 0xfffffff0) !=
  1382. cm->yv12_fb[cm->lst_fb_idx].y_height ||
  1383. cm->yv12_fb[cm->lst_fb_idx].y_width == 0)
  1384. {
  1385. alloc_raw_frame_buffers(cpi);
  1386. vp8_alloc_compressor_data(cpi);
  1387. }
  1388. if (cpi->oxcf.fixed_q >= 0)
  1389. {
  1390. cpi->last_q[0] = cpi->oxcf.fixed_q;
  1391. cpi->last_q[1] = cpi->oxcf.fixed_q;
  1392. }
  1393. cpi->Speed = cpi->oxcf.cpu_used;
  1394. // force to allowlag to 0 if lag_in_frames is 0;
  1395. if (cpi->oxcf.lag_in_frames == 0)
  1396. {
  1397. cpi->oxcf.allow_lag = 0;
  1398. }
  1399. // Limit on lag buffers as these are not currently dynamically allocated
  1400. else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS)
  1401. cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
  1402. // YX Temp
  1403. cpi->alt_ref_source = NULL;
  1404. cpi->is_src_frame_alt_ref = 0;
  1405. #if 0
  1406. // Experimental RD Code
  1407. cpi->frame_distortion = 0;
  1408. cpi->last_frame_distortion = 0;
  1409. #endif
  1410. }
  1411. #define M_LOG2_E 0.693147180559945309417
  1412. #define log2f(x) (log (x) / (float) M_LOG2_E)
  1413. static void cal_mvsadcosts(int *mvsadcost[2])
  1414. {
  1415. int i = 1;
  1416. mvsadcost [0] [0] = 300;
  1417. mvsadcost [1] [0] = 300;
  1418. do
  1419. {
  1420. double z = 256 * (2 * (log2f(8 * i) + .6));
  1421. mvsadcost [0][i] = (int) z;
  1422. mvsadcost [1][i] = (int) z;
  1423. mvsadcost [0][-i] = (int) z;
  1424. mvsadcost [1][-i] = (int) z;
  1425. }
  1426. while (++i <= mvfp_max);
  1427. }
  1428. VP8_PTR vp8_create_compressor(VP8_CONFIG *oxcf)
  1429. {
  1430. int i;
  1431. volatile union
  1432. {
  1433. VP8_COMP *cpi;
  1434. VP8_PTR ptr;
  1435. } ctx;
  1436. VP8_COMP *cpi;
  1437. VP8_COMMON *cm;
  1438. cpi = ctx.cpi = vpx_memalign(32, sizeof(VP8_COMP));
  1439. // Check that the CPI instance is valid
  1440. if (!cpi)
  1441. return 0;
  1442. cm = &cpi->common;
  1443. vpx_memset(cpi, 0, sizeof(VP8_COMP));
  1444. if (setjmp(cm->error.jmp))
  1445. {
  1446. VP8_PTR ptr = ctx.ptr;
  1447. ctx.cpi->common.error.setjmp = 0;
  1448. vp8_remove_compressor(&ptr);
  1449. return 0;
  1450. }
  1451. cpi->common.error.setjmp = 1;
  1452. CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1));
  1453. vp8_create_common(&cpi->common);
  1454. vp8_cmachine_specific_config(cpi);
  1455. init_config((VP8_PTR)cpi, oxcf);
  1456. memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob));
  1457. cpi->common.current_video_frame = 0;
  1458. cpi->kf_overspend_bits = 0;
  1459. cpi->kf_bitrate_adjustment = 0;
  1460. cpi->frames_till_gf_update_due = 0;
  1461. cpi->gf_overspend_bits = 0;
  1462. cpi->non_gf_bitrate_adjustment = 0;
  1463. cpi->prob_last_coded = 128;
  1464. cpi->prob_gf_coded = 128;
  1465. cpi->prob_intra_coded = 63;
  1466. // Prime the recent reference frame useage counters.
  1467. // Hereafter they will be maintained as a sort of moving average
  1468. cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
  1469. cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
  1470. cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
  1471. cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
  1472. // Set reference frame sign bias for ALTREF frame to 1 (for now)
  1473. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
  1474. cpi->twopass.gf_decay_rate = 0;
  1475. cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
  1476. cpi->gold_is_last = 0 ;
  1477. cpi->alt_is_last = 0 ;
  1478. cpi->gold_is_alt = 0 ;
  1479. // allocate memory for storing last frame's MVs for MV prediction.
  1480. CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cpi->common.mb_rows+2) * (cpi->common.mb_cols+2), sizeof(int_mv)));
  1481. CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias, vpx_calloc((cpi->common.mb_rows+2) * (cpi->common.mb_cols+2), sizeof(int)));
  1482. CHECK_MEM_ERROR(cpi->lf_ref_frame, vpx_calloc((cpi->common.mb_rows+2) * (cpi->common.mb_cols+2), sizeof(int)));
  1483. // Create the encoder segmentation map and set all entries to 0
  1484. CHECK_MEM_ERROR(cpi->segmentation_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
  1485. CHECK_MEM_ERROR(cpi->active_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
  1486. vpx_memset(cpi->active_map , 1, (cpi->common.mb_rows * cpi->common.mb_cols));
  1487. cpi->active_map_enabled = 0;
  1488. #if 0
  1489. // Experimental code for lagged and one pass
  1490. // Initialise one_pass GF frames stats
  1491. // Update stats used for GF selection
  1492. if (cpi->pass == 0)
  1493. {
  1494. cpi->one_pass_frame_index = 0;
  1495. for (i = 0; i < MAX_LAG_BUFFERS; i++)
  1496. {
  1497. cpi->one_pass_frame_stats[i].frames_so_far = 0;
  1498. cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
  1499. cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
  1500. cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
  1501. cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
  1502. cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
  1503. cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
  1504. cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
  1505. cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
  1506. }
  1507. }
  1508. #endif
  1509. // Should we use the cyclic refresh method.
  1510. // Currently this is tied to error resilliant mode
  1511. cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode;
  1512. cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 40;
  1513. cpi->cyclic_refresh_mode_index = 0;
  1514. cpi->cyclic_refresh_q = 32;
  1515. if (cpi->cyclic_refresh_mode_enabled)
  1516. {
  1517. CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
  1518. }
  1519. else
  1520. cpi->cyclic_refresh_map = (signed char *) NULL;
  1521. // Test function for segmentation
  1522. //segmentation_test_function((VP8_PTR) cpi);
  1523. #ifdef ENTROPY_STATS
  1524. init_context_counters();
  1525. #endif
  1526. /*Initialize the feed-forward activity masking.*/
  1527. cpi->activity_avg = 90<<12;
  1528. cpi->frames_since_key = 8; // Give a sensible default for the first frame.
  1529. cpi->key_frame_frequency = cpi->oxcf.key_freq;
  1530. cpi->this_key_frame_forced = FALSE;
  1531. cpi->next_key_frame_forced = FALSE;
  1532. cpi->source_alt_ref_pending = FALSE;
  1533. cpi->source_alt_ref_active = FALSE;
  1534. cpi->common.refresh_alt_ref_frame = 0;
  1535. cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
  1536. #if CONFIG_INTERNAL_STATS
  1537. cpi->b_calculate_ssimg = 0;
  1538. cpi->count = 0;
  1539. cpi->bytes = 0;
  1540. if (cpi->b_calculate_psnr)
  1541. {
  1542. cpi->total_sq_error = 0.0;
  1543. cpi->total_sq_error2 = 0.0;
  1544. cpi->total_y = 0.0;
  1545. cpi->total_u = 0.0;
  1546. cpi->total_v = 0.0;
  1547. cpi->total = 0.0;
  1548. cpi->totalp_y = 0.0;
  1549. cpi->totalp_u = 0.0;
  1550. cpi->totalp_v = 0.0;
  1551. cpi->totalp = 0.0;
  1552. cpi->tot_recode_hits = 0;
  1553. cpi->summed_quality = 0;
  1554. cpi->summed_weights = 0;
  1555. }
  1556. if (cpi->b_calculate_ssimg)
  1557. {
  1558. cpi->total_ssimg_y = 0;
  1559. cpi->total_ssimg_u = 0;
  1560. cpi->total_ssimg_v = 0;
  1561. cpi->total_ssimg_all = 0;
  1562. }
  1563. #endif
  1564. #ifndef LLONG_MAX
  1565. #define LLONG_MAX 9223372036854775807LL
  1566. #endif
  1567. cpi->first_time_stamp_ever = LLONG_MAX;
  1568. cpi->frames_till_gf_update_due = 0;
  1569. cpi->key_frame_count = 1;
  1570. cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
  1571. cpi->ni_tot_qi = 0;
  1572. cpi->ni_frames = 0;
  1573. cpi->total_byte_count = 0;
  1574. cpi->drop_frame = 0;
  1575. cpi->drop_count = 0;
  1576. cpi->max_drop_count = 0;
  1577. cpi->max_consec_dropped_frames = 4;
  1578. cpi->rate_correction_factor = 1.0;
  1579. cpi->key_frame_rate_correction_factor = 1.0;
  1580. cpi->gf_rate_correction_factor = 1.0;
  1581. cpi->twopass.est_max_qcorrection_factor = 1.0;
  1582. cpi->mb.mvcost[0] = &cpi->mb.mvcosts[0][mv_max+1];
  1583. cpi->mb.mvcost[1] = &cpi->mb.mvcosts[1][mv_max+1];
  1584. cpi->mb.mvsadcost[0] = &cpi->mb.mvsadcosts[0][mvfp_max+1];
  1585. cpi->mb.mvsadcost[1] = &cpi->mb.mvsadcosts[1][mvfp_max+1];
  1586. cal_mvsadcosts(cpi->mb.mvsadcost);
  1587. for (i = 0; i < KEY_FRAME_CONTEXT; i++)
  1588. {
  1589. cpi->prior_key_frame_distance[i] = (int)cpi->output_frame_rate;
  1590. }
  1591. #ifdef OUTPUT_YUV_SRC
  1592. yuv_file = fopen("bd.yuv", "ab");
  1593. #endif
  1594. #if 0
  1595. framepsnr = fopen("framepsnr.stt", "a");
  1596. kf_list = fopen("kf_list.stt", "w");
  1597. #endif
  1598. cpi->output_pkt_list = oxcf->output_pkt_list;
  1599. #if !(CONFIG_REALTIME_ONLY)
  1600. if (cpi->pass == 1)
  1601. {
  1602. vp8_init_first_pass(cpi);
  1603. }
  1604. else if (cpi->pass == 2)
  1605. {
  1606. size_t packet_sz = sizeof(FIRSTPASS_STATS);
  1607. int packets = oxcf->two_pass_stats_in.sz / packet_sz;
  1608. cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
  1609. cpi->twopass.stats_in = cpi->twopass.stats_in_start;
  1610. cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in
  1611. + (packets - 1) * packet_sz);
  1612. vp8_init_second_pass(cpi);
  1613. }
  1614. #endif
  1615. if (cpi->compressor_speed == 2)
  1616. {
  1617. cpi->cpu_freq = 0; //vp8_get_processor_freq();
  1618. cpi->avg_encode_time = 0;
  1619. cpi->avg_pick_mode_time = 0;
  1620. }
  1621. vp8_set_speed_features(cpi);
  1622. // Set starting values of RD threshold multipliers (128 = *1)
  1623. for (i = 0; i < MAX_MODES; i++)
  1624. {
  1625. cpi->rd_thresh_mult[i] = 128;
  1626. }
  1627. #ifdef ENTROPY_STATS
  1628. init_mv_ref_counts();
  1629. #endif
  1630. #if CONFIG_MULTITHREAD
  1631. vp8cx_create_encoder_threads(cpi);
  1632. #endif
  1633. cpi->fn_ptr[BLOCK_16X16].sdf = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x16);
  1634. cpi->fn_ptr[BLOCK_16X16].vf = VARIANCE_INVOKE(&cpi->rtcd.variance, var16x16);
  1635. cpi->fn_ptr[BLOCK_16X16].svf = VARIANCE_INVOKE(&cpi->rtcd.variance, subpixvar16x16);
  1636. cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = VARIANCE_INVOKE(&cpi->rtcd.variance, halfpixvar16x16_h);
  1637. cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = VARIANCE_INVOKE(&cpi->rtcd.variance, halfpixvar16x16_v);
  1638. cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = VARIANCE_INVOKE(&cpi->rtcd.variance, halfpixvar16x16_hv);
  1639. cpi->fn_ptr[BLOCK_16X16].sdx3f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x16x3);
  1640. cpi->fn_ptr[BLOCK_16X16].sdx8f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x16x8);
  1641. cpi->fn_ptr[BLOCK_16X16].sdx4df = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x16x4d);
  1642. cpi->fn_ptr[BLOCK_16X8].sdf = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x8);
  1643. cpi->fn_ptr[BLOCK_16X8].vf = VARIANCE_INVOKE(&cpi->rtcd.variance, var16x8);
  1644. cpi->fn_ptr[BLOCK_16X8].svf = VARIANCE_INVOKE(&cpi->rtcd.variance, subpixvar16x8);
  1645. cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL;
  1646. cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL;
  1647. cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL;
  1648. cpi->fn_ptr[BLOCK_16X8].sdx3f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x8x3);
  1649. cpi->fn_ptr[BLOCK_16X8].sdx8f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x8x8);
  1650. cpi->fn_ptr[BLOCK_16X8].sdx4df = VARIANCE_INVOKE(&cpi->rtcd.variance, sad16x8x4d);
  1651. cpi->fn_ptr[BLOCK_8X16].sdf = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x16);
  1652. cpi->fn_ptr[BLOCK_8X16].vf = VARIANCE_INVOKE(&cpi->rtcd.variance, var8x16);
  1653. cpi->fn_ptr[BLOCK_8X16].svf = VARIANCE_INVOKE(&cpi->rtcd.variance, subpixvar8x16);
  1654. cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL;
  1655. cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL;
  1656. cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL;
  1657. cpi->fn_ptr[BLOCK_8X16].sdx3f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x16x3);
  1658. cpi->fn_ptr[BLOCK_8X16].sdx8f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x16x8);
  1659. cpi->fn_ptr[BLOCK_8X16].sdx4df = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x16x4d);
  1660. cpi->fn_ptr[BLOCK_8X8].sdf = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x8);
  1661. cpi->fn_ptr[BLOCK_8X8].vf = VARIANCE_INVOKE(&cpi->rtcd.variance, var8x8);
  1662. cpi->fn_ptr[BLOCK_8X8].svf = VARIANCE_INVOKE(&cpi->rtcd.variance, subpixvar8x8);
  1663. cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL;
  1664. cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL;
  1665. cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL;
  1666. cpi->fn_ptr[BLOCK_8X8].sdx3f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x8x3);
  1667. cpi->fn_ptr[BLOCK_8X8].sdx8f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x8x8);
  1668. cpi->fn_ptr[BLOCK_8X8].sdx4df = VARIANCE_INVOKE(&cpi->rtcd.variance, sad8x8x4d);
  1669. cpi->fn_ptr[BLOCK_4X4].sdf = VARIANCE_INVOKE(&cpi->rtcd.variance, sad4x4);
  1670. cpi->fn_ptr[BLOCK_4X4].vf = VARIANCE_INVOKE(&cpi->rtcd.variance, var4x4);
  1671. cpi->fn_ptr[BLOCK_4X4].svf = VARIANCE_INVOKE(&cpi->rtcd.variance, subpixvar4x4);
  1672. cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL;
  1673. cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL;
  1674. cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL;
  1675. cpi->fn_ptr[BLOCK_4X4].sdx3f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad4x4x3);
  1676. cpi->fn_ptr[BLOCK_4X4].sdx8f = VARIANCE_INVOKE(&cpi->rtcd.variance, sad4x4x8);
  1677. cpi->fn_ptr[BLOCK_4X4].sdx4df = VARIANCE_INVOKE(&cpi->rtcd.variance, sad4x4x4d);
  1678. #if ARCH_X86 || ARCH_X86_64
  1679. cpi->fn_ptr[BLOCK_16X16].copymem = VARIANCE_INVOKE(&cpi->rtcd.variance, copy32xn);
  1680. cpi->fn_ptr[BLOCK_16X8].copymem = VARIANCE_INVOKE(&cpi->rtcd.variance, copy32xn);
  1681. cpi->fn_ptr[BLOCK_8X16].copymem = VARIANCE_INVOKE(&cpi->rtcd.variance, copy32xn);
  1682. cpi->fn_ptr[BLOCK_8X8].copymem = VARIANCE_INVOKE(&cpi->rtcd.variance, copy32xn);
  1683. cpi->fn_ptr[BLOCK_4X4].copymem = VARIANCE_INVOKE(&cpi->rtcd.variance, copy32xn);
  1684. #endif
  1685. cpi->full_search_sad = SEARCH_INVOKE(&cpi->rtcd.search, full_search);
  1686. cpi->diamond_search_sad = SEARCH_INVOKE(&cpi->rtcd.search, diamond_search);
  1687. cpi->refining_search_sad = SEARCH_INVOKE(&cpi->rtcd.search, refining_search);
  1688. // make sure frame 1 is okay
  1689. cpi->error_bins[0] = cpi->common.MBs;
  1690. //vp8cx_init_quantizer() is first called here. Add check in vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only called later
  1691. //when needed. This will avoid unnecessary calls of vp8cx_init_quantizer() for every frame.
  1692. vp8cx_init_quantizer(cpi);
  1693. vp8_loop_filter_init(cm);
  1694. cpi->common.error.setjmp = 0;
  1695. return (VP8_PTR) cpi;
  1696. }
  1697. void vp8_remove_compressor(VP8_PTR *ptr)
  1698. {
  1699. VP8_COMP *cpi = (VP8_COMP *)(*ptr);
  1700. if (!cpi)
  1701. return;
  1702. if (cpi && (cpi->common.current_video_frame > 0))
  1703. {
  1704. #if !(CONFIG_REALTIME_ONLY)
  1705. if (cpi->pass == 2)
  1706. {
  1707. vp8_end_second_pass(cpi);
  1708. }
  1709. #endif
  1710. #ifdef ENTROPY_STATS
  1711. print_context_counters();
  1712. print_tree_update_probs();
  1713. print_mode_context();
  1714. #endif
  1715. #if CONFIG_INTERNAL_STATS
  1716. if (cpi->pass != 1)
  1717. {
  1718. FILE *f = fopen("opsnr.stt", "a");
  1719. double time_encoded = (cpi->last_end_time_stamp_seen
  1720. - cpi->first_time_stamp_ever) / 10000000.000;
  1721. double total_encode_time = (cpi->time_receive_data + cpi->time_compress_data) / 1000.000;
  1722. double dr = (double)cpi->bytes * (double) 8 / (double)1000 / time_encoded;
  1723. if (cpi->b_calculate_psnr)
  1724. {
  1725. YV12_BUFFER_CONFIG *lst_yv12 = &cpi->common.yv12_fb[cpi->common.lst_fb_idx];
  1726. double samples = 3.0 / 2 * cpi->count * lst_yv12->y_width * lst_yv12->y_height;
  1727. double total_psnr = vp8_mse2psnr(samples, 255.0, cpi->total_sq_error);
  1728. double total_psnr2 = vp8_mse2psnr(samples, 255.0, cpi->total_sq_error2);
  1729. double total_ssim = 100 * pow(cpi->summed_quality / cpi->summed_weights, 8.0);
  1730. fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\tVPXSSIM\t Time(us)\n");
  1731. fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t%8.0f\n",
  1732. dr, cpi->total / cpi->count, total_psnr, cpi->totalp / cpi->count, total_psnr2, total_ssim,
  1733. total_encode_time);
  1734. }
  1735. if (cpi->b_calculate_ssimg)
  1736. {
  1737. fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t Time(us)\n");
  1738. fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
  1739. cpi->total_ssimg_y / cpi->count, cpi->total_ssimg_u / cpi->count,
  1740. cpi->total_ssimg_v / cpi->count, cpi->total_ssimg_all / cpi->count, total_encode_time);
  1741. }
  1742. fclose(f);
  1743. #if 0
  1744. f = fopen("qskip.stt", "a");
  1745. fprintf(f, "minq:%d -maxq:%d skipture:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
  1746. fclose(f);
  1747. #endif
  1748. }
  1749. #endif
  1750. #ifdef SPEEDSTATS
  1751. if (cpi->compressor_speed == 2)
  1752. {
  1753. int i;
  1754. FILE *f = fopen("cxspeed.stt", "a");
  1755. cnt_pm /= cpi->common.MBs;
  1756. for (i = 0; i < 16; i++)
  1757. fprintf(f, "%5d", frames_at_speed[i]);
  1758. fprintf(f, "\n");
  1759. //fprintf(f, "%10d PM %10d %10d %10d EF %10d %10d %10d\n", cpi->Speed, cpi->avg_pick_mode_time, (tot_pm/cnt_pm), cnt_pm, cpi->avg_encode_time, 0, 0);
  1760. fclose(f);
  1761. }
  1762. #endif
  1763. #ifdef MODE_STATS
  1764. {
  1765. extern int count_mb_seg[4];
  1766. FILE *f = fopen("modes.stt", "a");
  1767. double dr = (double)cpi->oxcf.frame_rate * (double)bytes * (double)8 / (double)count / (double)1000 ;
  1768. fprintf(f, "intra_mode in Intra Frames:\n");
  1769. fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]);
  1770. fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]);
  1771. fprintf(f, "B: ");
  1772. {
  1773. int i;
  1774. for (i = 0; i < 10; i++)
  1775. fprintf(f, "%8d, ", b_modes[i]);
  1776. fprintf(f, "\n");
  1777. }
  1778. fprintf(f, "Modes in Inter Frames:\n");
  1779. fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
  1780. inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4],
  1781. inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]);
  1782. fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
  1783. fprintf(f, "B: ");
  1784. {
  1785. int i;
  1786. for (i = 0; i < 15; i++)
  1787. fprintf(f, "%8d, ", inter_b_modes[i]);
  1788. fprintf(f, "\n");
  1789. }
  1790. fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]);
  1791. fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]);
  1792. fclose(f);
  1793. }
  1794. #endif
  1795. #ifdef ENTROPY_STATS
  1796. {
  1797. int i, j, k;
  1798. FILE *fmode = fopen("modecontext.c", "w");
  1799. fprintf(fmode, "\n#include \"entropymode.h\"\n\n");
  1800. fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts ");
  1801. fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n");
  1802. for (i = 0; i < 10; i++)
  1803. {
  1804. fprintf(fmode, " { //Above Mode : %d\n", i);
  1805. for (j = 0; j < 10; j++)
  1806. {
  1807. fprintf(fmode, " {");
  1808. for (k = 0; k < 10; k++)
  1809. {
  1810. if (!intra_mode_stats[i][j][k])
  1811. fprintf(fmode, " %5d, ", 1);
  1812. else
  1813. fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]);
  1814. }
  1815. fprintf(fmode, "}, // left_mode %d\n", j);
  1816. }
  1817. fprintf(fmode, " },\n");
  1818. }
  1819. fprintf(fmode, "};\n");
  1820. fclose(fmode);
  1821. }
  1822. #endif
  1823. #if defined(SECTIONBITS_OUTPUT)
  1824. if (0)
  1825. {
  1826. int i;
  1827. FILE *f = fopen("tokenbits.stt", "a");
  1828. for (i = 0; i < 28; i++)
  1829. fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
  1830. fprintf(f, "\n");
  1831. fclose(f);
  1832. }
  1833. #endif
  1834. #if 0
  1835. {
  1836. printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
  1837. printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
  1838. printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
  1839. }
  1840. #endif
  1841. }
  1842. #if CONFIG_MULTITHREAD
  1843. vp8cx_remove_encoder_threads(cpi);
  1844. #endif
  1845. dealloc_compressor_data(cpi);
  1846. vpx_free(cpi->mb.ss);
  1847. vpx_free(cpi->tok);
  1848. vpx_free(cpi->cyclic_refresh_map);
  1849. vp8_remove_common(&cpi->common);
  1850. vpx_free(cpi);
  1851. *ptr = 0;
  1852. #ifdef OUTPUT_YUV_SRC
  1853. fclose(yuv_file);
  1854. #endif
  1855. #if 0
  1856. if (keyfile)
  1857. fclose(keyfile);
  1858. if (framepsnr)
  1859. fclose(framepsnr);
  1860. if (kf_list)
  1861. fclose(kf_list);
  1862. #endif
  1863. }
  1864. static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
  1865. unsigned char *recon, int recon_stride,
  1866. unsigned int cols, unsigned int rows,
  1867. vp8_variance_rtcd_vtable_t *rtcd)
  1868. {
  1869. unsigned int row, col;
  1870. uint64_t total_sse = 0;
  1871. int diff;
  1872. for (row = 0; row + 16 <= rows; row += 16)
  1873. {
  1874. for (col = 0; col + 16 <= cols; col += 16)
  1875. {
  1876. unsigned int sse;
  1877. VARIANCE_INVOKE(rtcd, mse16x16)(orig + col, orig_stride,
  1878. recon + col, recon_stride,
  1879. &sse);
  1880. total_sse += sse;
  1881. }
  1882. /* Handle odd-sized width */
  1883. if (col < cols)
  1884. {
  1885. unsigned int border_row, border_col;
  1886. unsigned char *border_orig = orig;
  1887. unsigned char *border_recon = recon;
  1888. for (border_row = 0; border_row < 16; border_row++)
  1889. {
  1890. for (border_col = col; border_col < cols; border_col++)
  1891. {
  1892. diff = border_orig[border_col] - border_recon[border_col];
  1893. total_sse += diff * diff;
  1894. }
  1895. border_orig += orig_stride;
  1896. border_recon += recon_stride;
  1897. }
  1898. }
  1899. orig += orig_stride * 16;
  1900. recon += recon_stride * 16;
  1901. }
  1902. /* Handle odd-sized height */
  1903. for (; row < rows; row++)
  1904. {
  1905. for (col = 0; col < cols; col++)
  1906. {
  1907. diff = orig[col] - recon[col];
  1908. total_sse += diff * diff;
  1909. }
  1910. orig += orig_stride;
  1911. recon += recon_stride;
  1912. }
  1913. return total_sse;
  1914. }
  1915. static void generate_psnr_packet(VP8_COMP *cpi)
  1916. {
  1917. YV12_BUFFER_CONFIG *orig = cpi->Source;
  1918. YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
  1919. struct vpx_codec_cx_pkt pkt;
  1920. uint64_t sse;
  1921. int i;
  1922. unsigned int width = cpi->common.Width;
  1923. unsigned int height = cpi->common.Height;
  1924. pkt.kind = VPX_CODEC_PSNR_PKT;
  1925. sse = calc_plane_error(orig->y_buffer, orig->y_stride,
  1926. recon->y_buffer, recon->y_stride,
  1927. width, height,
  1928. IF_RTCD(&cpi->rtcd.variance));
  1929. pkt.data.psnr.sse[0] = sse;
  1930. pkt.data.psnr.sse[1] = sse;
  1931. pkt.data.psnr.samples[0] = width * height;
  1932. pkt.data.psnr.samples[1] = width * height;
  1933. width = (width + 1) / 2;
  1934. height = (height + 1) / 2;
  1935. sse = calc_plane_error(orig->u_buffer, orig->uv_stride,
  1936. recon->u_buffer, recon->uv_stride,
  1937. width, height,
  1938. IF_RTCD(&cpi->rtcd.variance));
  1939. pkt.data.psnr.sse[0] += sse;
  1940. pkt.data.psnr.sse[2] = sse;
  1941. pkt.data.psnr.samples[0] += width * height;
  1942. pkt.data.psnr.samples[2] = width * height;
  1943. sse = calc_plane_error(orig->v_buffer, orig->uv_stride,
  1944. recon->v_buffer, recon->uv_stride,
  1945. width, height,
  1946. IF_RTCD(&cpi->rtcd.variance));
  1947. pkt.data.psnr.sse[0] += sse;
  1948. pkt.data.psnr.sse[3] = sse;
  1949. pkt.data.psnr.samples[0] += width * height;
  1950. pkt.data.psnr.samples[3] = width * height;
  1951. for (i = 0; i < 4; i++)
  1952. pkt.data.psnr.psnr[i] = vp8_mse2psnr(pkt.data.psnr.samples[i], 255.0,
  1953. pkt.data.psnr.sse[i]);
  1954. vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
  1955. }
  1956. int vp8_use_as_reference(VP8_PTR ptr, int ref_frame_flags)
  1957. {
  1958. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  1959. if (ref_frame_flags > 7)
  1960. return -1 ;
  1961. cpi->ref_frame_flags = ref_frame_flags;
  1962. return 0;
  1963. }
  1964. int vp8_update_reference(VP8_PTR ptr, int ref_frame_flags)
  1965. {
  1966. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  1967. if (ref_frame_flags > 7)
  1968. return -1 ;
  1969. cpi->common.refresh_golden_frame = 0;
  1970. cpi->common.refresh_alt_ref_frame = 0;
  1971. cpi->common.refresh_last_frame = 0;
  1972. if (ref_frame_flags & VP8_LAST_FLAG)
  1973. cpi->common.refresh_last_frame = 1;
  1974. if (ref_frame_flags & VP8_GOLD_FLAG)
  1975. cpi->common.refresh_golden_frame = 1;
  1976. if (ref_frame_flags & VP8_ALT_FLAG)
  1977. cpi->common.refresh_alt_ref_frame = 1;
  1978. return 0;
  1979. }
  1980. int vp8_get_reference(VP8_PTR ptr, VP8_REFFRAME ref_frame_flag, YV12_BUFFER_CONFIG *sd)
  1981. {
  1982. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  1983. VP8_COMMON *cm = &cpi->common;
  1984. int ref_fb_idx;
  1985. if (ref_frame_flag == VP8_LAST_FLAG)
  1986. ref_fb_idx = cm->lst_fb_idx;
  1987. else if (ref_frame_flag == VP8_GOLD_FLAG)
  1988. ref_fb_idx = cm->gld_fb_idx;
  1989. else if (ref_frame_flag == VP8_ALT_FLAG)
  1990. ref_fb_idx = cm->alt_fb_idx;
  1991. else
  1992. return -1;
  1993. vp8_yv12_copy_frame_ptr(&cm->yv12_fb[ref_fb_idx], sd);
  1994. return 0;
  1995. }
  1996. int vp8_set_reference(VP8_PTR ptr, VP8_REFFRAME ref_frame_flag, YV12_BUFFER_CONFIG *sd)
  1997. {
  1998. VP8_COMP *cpi = (VP8_COMP *)(ptr);
  1999. VP8_COMMON *cm = &cpi->common;
  2000. int ref_fb_idx;
  2001. if (ref_frame_flag == VP8_LAST_FLAG)
  2002. ref_fb_idx = cm->lst_fb_idx;
  2003. else if (ref_frame_flag == VP8_GOLD_FLAG)
  2004. ref_fb_idx = cm->gld_fb_idx;
  2005. else if (ref_frame_flag == VP8_ALT_FLAG)
  2006. ref_fb_idx = cm->alt_fb_idx;
  2007. else
  2008. return -1;
  2009. vp8_yv12_copy_frame_ptr(sd, &cm->yv12_fb[ref_fb_idx]);
  2010. return 0;
  2011. }
  2012. int vp8_update_entropy(VP8_PTR comp, int update)
  2013. {
  2014. VP8_COMP *cpi = (VP8_COMP *) comp;
  2015. VP8_COMMON *cm = &cpi->common;
  2016. cm->refresh_entropy_probs = update;
  2017. return 0;
  2018. }
  2019. #if OUTPUT_YUV_SRC
  2020. void vp8_write_yuv_frame(const char *name, YV12_BUFFER_CONFIG *s)
  2021. {
  2022. FILE *yuv_file = fopen(name, "ab");
  2023. unsigned char *src = s->y_buffer;
  2024. int h = s->y_height;
  2025. do
  2026. {
  2027. fwrite(src, s->y_width, 1, yuv_file);
  2028. src += s->y_stride;
  2029. }
  2030. while (--h);
  2031. src = s->u_buffer;
  2032. h = s->uv_height;
  2033. do
  2034. {
  2035. fwrite(src, s->uv_width, 1, yuv_file);
  2036. src += s->uv_stride;
  2037. }
  2038. while (--h);
  2039. src = s->v_buffer;
  2040. h = s->uv_height;
  2041. do
  2042. {
  2043. fwrite(src, s->uv_width, 1, yuv_file);
  2044. src += s->uv_stride;
  2045. }
  2046. while (--h);
  2047. fclose(yuv_file);
  2048. }
  2049. #endif
  2050. static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi)
  2051. {
  2052. VP8_COMMON *cm = &cpi->common;
  2053. // are we resizing the image
  2054. if (cm->horiz_scale != 0 || cm->vert_scale != 0)
  2055. {
  2056. #if CONFIG_SPATIAL_RESAMPLING
  2057. int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
  2058. int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
  2059. int tmp_height;
  2060. if (cm->vert_scale == 3)
  2061. tmp_height = 9;
  2062. else
  2063. tmp_height = 11;
  2064. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  2065. Scale2Ratio(cm->vert_scale, &vr, &vs);
  2066. vp8_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
  2067. tmp_height, hs, hr, vs, vr, 0);
  2068. vp8_yv12_extend_frame_borders(&cpi->scaled_source);
  2069. cpi->Source = &cpi->scaled_source;
  2070. #endif
  2071. }
  2072. else
  2073. cpi->Source = sd;
  2074. }
  2075. static void resize_key_frame(VP8_COMP *cpi)
  2076. {
  2077. #if CONFIG_SPATIAL_RESAMPLING
  2078. VP8_COMMON *cm = &cpi->common;
  2079. // Do we need to apply resampling for one pass cbr.
  2080. // In one pass this is more limited than in two pass cbr
  2081. // The test and any change is only made one per key frame sequence
  2082. if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER))
  2083. {
  2084. int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
  2085. int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
  2086. int new_width, new_height;
  2087. // If we are below the resample DOWN watermark then scale down a notch.
  2088. if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100))
  2089. {
  2090. cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
  2091. cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
  2092. }
  2093. // Should we now start scaling back up
  2094. else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100))
  2095. {
  2096. cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
  2097. cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
  2098. }
  2099. // Get the new hieght and width
  2100. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  2101. Scale2Ratio(cm->vert_scale, &vr, &vs);
  2102. new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
  2103. new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
  2104. // If the image size has changed we need to reallocate the buffers
  2105. // and resample the source image
  2106. if ((cm->Width != new_width) || (cm->Height != new_height))
  2107. {
  2108. cm->Width = new_width;
  2109. cm->Height = new_height;
  2110. vp8_alloc_compressor_data(cpi);
  2111. scale_and_extend_source(cpi->un_scaled_source, cpi);
  2112. }
  2113. }
  2114. #endif
  2115. }
  2116. static void update_alt_ref_frame_stats(VP8_COMP *cpi)
  2117. {
  2118. VP8_COMMON *cm = &cpi->common;
  2119. // Select an interval before next GF or altref
  2120. if (!cpi->auto_gold)
  2121. cpi->frames_till_gf_update_due = cpi->goldfreq;
  2122. if ((cpi->pass != 2) && cpi->frames_till_gf_update_due)
  2123. {
  2124. cpi->current_gf_interval = cpi->frames_till_gf_update_due;
  2125. // Set the bits per frame that we should try and recover in subsequent inter frames
  2126. // to account for the extra GF spend... note that his does not apply for GF updates
  2127. // that occur coincident with a key frame as the extra cost of key frames is dealt
  2128. // with elsewhere.
  2129. cpi->gf_overspend_bits += cpi->projected_frame_size;
  2130. cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
  2131. }
  2132. // Update data structure that monitors level of reference to last GF
  2133. vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
  2134. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  2135. // this frame refreshes means next frames don't unless specified by user
  2136. cpi->common.frames_since_golden = 0;
  2137. // Clear the alternate reference update pending flag.
  2138. cpi->source_alt_ref_pending = FALSE;
  2139. // Set the alternate refernce frame active flag
  2140. cpi->source_alt_ref_active = TRUE;
  2141. }
  2142. static void update_golden_frame_stats(VP8_COMP *cpi)
  2143. {
  2144. VP8_COMMON *cm = &cpi->common;
  2145. // Update the Golden frame usage counts.
  2146. if (cm->refresh_golden_frame)
  2147. {
  2148. // Select an interval before next GF
  2149. if (!cpi->auto_gold)
  2150. cpi->frames_till_gf_update_due = cpi->goldfreq;
  2151. if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0))
  2152. {
  2153. cpi->current_gf_interval = cpi->frames_till_gf_update_due;
  2154. // Set the bits per frame that we should try and recover in subsequent inter frames
  2155. // to account for the extra GF spend... note that his does not apply for GF updates
  2156. // that occur coincident with a key frame as the extra cost of key frames is dealt
  2157. // with elsewhere.
  2158. if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active)
  2159. {
  2160. // Calcluate GF bits to be recovered
  2161. // Projected size - av frame bits available for inter frames for clip as a whole
  2162. cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target);
  2163. }
  2164. cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
  2165. }
  2166. // Update data structure that monitors level of reference to last GF
  2167. vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
  2168. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  2169. // this frame refreshes means next frames don't unless specified by user
  2170. cm->refresh_golden_frame = 0;
  2171. cpi->common.frames_since_golden = 0;
  2172. //if ( cm->frame_type == KEY_FRAME )
  2173. //{
  2174. cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
  2175. cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
  2176. cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
  2177. cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
  2178. //}
  2179. //else
  2180. //{
  2181. // // Carry a potrtion of count over to begining of next gf sequence
  2182. // cpi->recent_ref_frame_usage[INTRA_FRAME] >>= 5;
  2183. // cpi->recent_ref_frame_usage[LAST_FRAME] >>= 5;
  2184. // cpi->recent_ref_frame_usage[GOLDEN_FRAME] >>= 5;
  2185. // cpi->recent_ref_frame_usage[ALTREF_FRAME] >>= 5;
  2186. //}
  2187. // ******** Fixed Q test code only ************
  2188. // If we are going to use the ALT reference for the next group of frames set a flag to say so.
  2189. if (cpi->oxcf.fixed_q >= 0 &&
  2190. cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame)
  2191. {
  2192. cpi->source_alt_ref_pending = TRUE;
  2193. cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
  2194. }
  2195. if (!cpi->source_alt_ref_pending)
  2196. cpi->source_alt_ref_active = FALSE;
  2197. // Decrement count down till next gf
  2198. if (cpi->frames_till_gf_update_due > 0)
  2199. cpi->frames_till_gf_update_due--;
  2200. }
  2201. else if (!cpi->common.refresh_alt_ref_frame)
  2202. {
  2203. // Decrement count down till next gf
  2204. if (cpi->frames_till_gf_update_due > 0)
  2205. cpi->frames_till_gf_update_due--;
  2206. if (cpi->common.frames_till_alt_ref_frame)
  2207. cpi->common.frames_till_alt_ref_frame --;
  2208. cpi->common.frames_since_golden ++;
  2209. if (cpi->common.frames_since_golden > 1)
  2210. {
  2211. cpi->recent_ref_frame_usage[INTRA_FRAME] += cpi->count_mb_ref_frame_usage[INTRA_FRAME];
  2212. cpi->recent_ref_frame_usage[LAST_FRAME] += cpi->count_mb_ref_frame_usage[LAST_FRAME];
  2213. cpi->recent_ref_frame_usage[GOLDEN_FRAME] += cpi->count_mb_ref_frame_usage[GOLDEN_FRAME];
  2214. cpi->recent_ref_frame_usage[ALTREF_FRAME] += cpi->count_mb_ref_frame_usage[ALTREF_FRAME];
  2215. }
  2216. }
  2217. }
  2218. // This function updates the reference frame probability estimates that
  2219. // will be used during mode selection
  2220. static void update_rd_ref_frame_probs(VP8_COMP *cpi)
  2221. {
  2222. VP8_COMMON *cm = &cpi->common;
  2223. #if 0
  2224. const int *const rfct = cpi->recent_ref_frame_usage;
  2225. const int rf_intra = rfct[INTRA_FRAME];
  2226. const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
  2227. if (cm->frame_type == KEY_FRAME)
  2228. {
  2229. cpi->prob_intra_coded = 255;
  2230. cpi->prob_last_coded = 128;
  2231. cpi->prob_gf_coded = 128;
  2232. }
  2233. else if (!(rf_intra + rf_inter))
  2234. {
  2235. // This is a trap in case this function is called with cpi->recent_ref_frame_usage[] blank.
  2236. cpi->prob_intra_coded = 63;
  2237. cpi->prob_last_coded = 128;
  2238. cpi->prob_gf_coded = 128;
  2239. }
  2240. else
  2241. {
  2242. cpi->prob_intra_coded = (rf_intra * 255) / (rf_intra + rf_inter);
  2243. if (cpi->prob_intra_coded < 1)
  2244. cpi->prob_intra_coded = 1;
  2245. if ((cm->frames_since_golden > 0) || cpi->source_alt_ref_active)
  2246. {
  2247. cpi->prob_last_coded = rf_inter ? (rfct[LAST_FRAME] * 255) / rf_inter : 128;
  2248. if (cpi->prob_last_coded < 1)
  2249. cpi->prob_last_coded = 1;
  2250. cpi->prob_gf_coded = (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME])
  2251. ? (rfct[GOLDEN_FRAME] * 255) / (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) : 128;
  2252. if (cpi->prob_gf_coded < 1)
  2253. cpi->prob_gf_coded = 1;
  2254. }
  2255. }
  2256. #else
  2257. const int *const rfct = cpi->count_mb_ref_frame_usage;
  2258. const int rf_intra = rfct[INTRA_FRAME];
  2259. const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
  2260. if (cm->frame_type == KEY_FRAME)
  2261. {
  2262. cpi->prob_intra_coded = 255;
  2263. cpi->prob_last_coded = 128;
  2264. cpi->prob_gf_coded = 128;
  2265. }
  2266. else if (!(rf_intra + rf_inter))
  2267. {
  2268. // This is a trap in case this function is called with cpi->recent_ref_frame_usage[] blank.
  2269. cpi->prob_intra_coded = 63;
  2270. cpi->prob_last_coded = 128;
  2271. cpi->prob_gf_coded = 128;
  2272. }
  2273. else
  2274. {
  2275. cpi->prob_intra_coded = (rf_intra * 255) / (rf_intra + rf_inter);
  2276. if (cpi->prob_intra_coded < 1)
  2277. cpi->prob_intra_coded = 1;
  2278. cpi->prob_last_coded = rf_inter ? (rfct[LAST_FRAME] * 255) / rf_inter : 128;
  2279. if (cpi->prob_last_coded < 1)
  2280. cpi->prob_last_coded = 1;
  2281. cpi->prob_gf_coded = (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME])
  2282. ? (rfct[GOLDEN_FRAME] * 255) / (rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]) : 128;
  2283. if (cpi->prob_gf_coded < 1)
  2284. cpi->prob_gf_coded = 1;
  2285. }
  2286. // update reference frame costs since we can do better than what we got last frame.
  2287. if (cpi->common.refresh_alt_ref_frame)
  2288. {
  2289. cpi->prob_intra_coded += 40;
  2290. cpi->prob_last_coded = 200;
  2291. cpi->prob_gf_coded = 1;
  2292. }
  2293. else if (cpi->common.frames_since_golden == 0)
  2294. {
  2295. cpi->prob_last_coded = 214;
  2296. cpi->prob_gf_coded = 1;
  2297. }
  2298. else if (cpi->common.frames_since_golden == 1)
  2299. {
  2300. cpi->prob_last_coded = 192;
  2301. cpi->prob_gf_coded = 220;
  2302. }
  2303. else if (cpi->source_alt_ref_active)
  2304. {
  2305. //int dist = cpi->common.frames_till_alt_ref_frame + cpi->common.frames_since_golden;
  2306. cpi->prob_gf_coded -= 20;
  2307. if (cpi->prob_gf_coded < 10)
  2308. cpi->prob_gf_coded = 10;
  2309. }
  2310. #endif
  2311. }
  2312. // 1 = key, 0 = inter
  2313. static int decide_key_frame(VP8_COMP *cpi)
  2314. {
  2315. VP8_COMMON *cm = &cpi->common;
  2316. int code_key_frame = FALSE;
  2317. cpi->kf_boost = 0;
  2318. if (cpi->Speed > 11)
  2319. return FALSE;
  2320. // Clear down mmx registers
  2321. vp8_clear_system_state(); //__asm emms;
  2322. if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0))
  2323. {
  2324. double change = 1.0 * abs((int)(cpi->intra_error - cpi->last_intra_error)) / (1 + cpi->last_intra_error);
  2325. double change2 = 1.0 * abs((int)(cpi->prediction_error - cpi->last_prediction_error)) / (1 + cpi->last_prediction_error);
  2326. double minerror = cm->MBs * 256;
  2327. #if 0
  2328. if (10 * cpi->intra_error / (1 + cpi->prediction_error) < 15
  2329. && cpi->prediction_error > minerror
  2330. && (change > .25 || change2 > .25))
  2331. {
  2332. FILE *f = fopen("intra_inter.stt", "a");
  2333. if (cpi->prediction_error <= 0)
  2334. cpi->prediction_error = 1;
  2335. fprintf(f, "%d %d %d %d %14.4f\n",
  2336. cm->current_video_frame,
  2337. (int) cpi->prediction_error,
  2338. (int) cpi->intra_error,
  2339. (int)((10 * cpi->intra_error) / cpi->prediction_error),
  2340. change);
  2341. fclose(f);
  2342. }
  2343. #endif
  2344. cpi->last_intra_error = cpi->intra_error;
  2345. cpi->last_prediction_error = cpi->prediction_error;
  2346. if (10 * cpi->intra_error / (1 + cpi->prediction_error) < 15
  2347. && cpi->prediction_error > minerror
  2348. && (change > .25 || change2 > .25))
  2349. {
  2350. /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/
  2351. return TRUE;
  2352. }
  2353. return FALSE;
  2354. }
  2355. // If the following are true we might as well code a key frame
  2356. if (((cpi->this_frame_percent_intra == 100) &&
  2357. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
  2358. ((cpi->this_frame_percent_intra > 95) &&
  2359. (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5))))
  2360. {
  2361. code_key_frame = TRUE;
  2362. }
  2363. // in addition if the following are true and this is not a golden frame then code a key frame
  2364. // Note that on golden frames there often seems to be a pop in intra useage anyway hence this
  2365. // restriction is designed to prevent spurious key frames. The Intra pop needs to be investigated.
  2366. else if (((cpi->this_frame_percent_intra > 60) &&
  2367. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) ||
  2368. ((cpi->this_frame_percent_intra > 75) &&
  2369. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) ||
  2370. ((cpi->this_frame_percent_intra > 90) &&
  2371. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10))))
  2372. {
  2373. if (!cm->refresh_golden_frame)
  2374. code_key_frame = TRUE;
  2375. }
  2376. return code_key_frame;
  2377. }
  2378. #if !(CONFIG_REALTIME_ONLY)
  2379. static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
  2380. {
  2381. (void) size;
  2382. (void) dest;
  2383. (void) frame_flags;
  2384. vp8_set_quantizer(cpi, 26);
  2385. scale_and_extend_source(cpi->un_scaled_source, cpi);
  2386. vp8_first_pass(cpi);
  2387. }
  2388. #endif
  2389. #if 0
  2390. void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
  2391. {
  2392. // write the frame
  2393. FILE *yframe;
  2394. int i;
  2395. char filename[255];
  2396. sprintf(filename, "cx\\y%04d.raw", this_frame);
  2397. yframe = fopen(filename, "wb");
  2398. for (i = 0; i < frame->y_height; i++)
  2399. fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
  2400. fclose(yframe);
  2401. sprintf(filename, "cx\\u%04d.raw", this_frame);
  2402. yframe = fopen(filename, "wb");
  2403. for (i = 0; i < frame->uv_height; i++)
  2404. fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
  2405. fclose(yframe);
  2406. sprintf(filename, "cx\\v%04d.raw", this_frame);
  2407. yframe = fopen(filename, "wb");
  2408. for (i = 0; i < frame->uv_height; i++)
  2409. fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
  2410. fclose(yframe);
  2411. }
  2412. #endif
  2413. // return of 0 means drop frame
  2414. // Function to test for conditions that indeicate we should loop
  2415. // back and recode a frame.
  2416. static BOOL recode_loop_test( VP8_COMP *cpi,
  2417. int high_limit, int low_limit,
  2418. int q, int maxq, int minq )
  2419. {
  2420. BOOL force_recode = FALSE;
  2421. VP8_COMMON *cm = &cpi->common;
  2422. // Is frame recode allowed at all
  2423. // Yes if either recode mode 1 is selected or mode two is selcted
  2424. // and the frame is a key frame. golden frame or alt_ref_frame
  2425. if ( (cpi->sf.recode_loop == 1) ||
  2426. ( (cpi->sf.recode_loop == 2) &&
  2427. ( (cm->frame_type == KEY_FRAME) ||
  2428. cm->refresh_golden_frame ||
  2429. cm->refresh_alt_ref_frame ) ) )
  2430. {
  2431. // General over and under shoot tests
  2432. if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
  2433. ((cpi->projected_frame_size < low_limit) && (q > minq)) )
  2434. {
  2435. force_recode = TRUE;
  2436. }
  2437. // Special Constrained quality tests
  2438. else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
  2439. {
  2440. // Undershoot and below auto cq level
  2441. if ( (q > cpi->cq_target_quality) &&
  2442. (cpi->projected_frame_size <
  2443. ((cpi->this_frame_target * 7) >> 3)))
  2444. {
  2445. force_recode = TRUE;
  2446. }
  2447. // Severe undershoot and between auto and user cq level
  2448. else if ( (q > cpi->oxcf.cq_level) &&
  2449. (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
  2450. (cpi->active_best_quality > cpi->oxcf.cq_level))
  2451. {
  2452. force_recode = TRUE;
  2453. cpi->active_best_quality = cpi->oxcf.cq_level;
  2454. }
  2455. }
  2456. }
  2457. return force_recode;
  2458. }
  2459. void update_reference_frames(VP8_COMMON *cm)
  2460. {
  2461. YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
  2462. // At this point the new frame has been encoded.
  2463. // If any buffer copy / swapping is signaled it should be done here.
  2464. if (cm->frame_type == KEY_FRAME)
  2465. {
  2466. yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FLAG | VP8_ALT_FLAG ;
  2467. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
  2468. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
  2469. cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
  2470. }
  2471. else /* For non key frames */
  2472. {
  2473. if (cm->refresh_alt_ref_frame)
  2474. {
  2475. assert(!cm->copy_buffer_to_arf);
  2476. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALT_FLAG;
  2477. cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
  2478. cm->alt_fb_idx = cm->new_fb_idx;
  2479. }
  2480. else if (cm->copy_buffer_to_arf)
  2481. {
  2482. assert(!(cm->copy_buffer_to_arf & ~0x3));
  2483. if (cm->copy_buffer_to_arf == 1)
  2484. {
  2485. if(cm->alt_fb_idx != cm->lst_fb_idx)
  2486. {
  2487. yv12_fb[cm->lst_fb_idx].flags |= VP8_ALT_FLAG;
  2488. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
  2489. cm->alt_fb_idx = cm->lst_fb_idx;
  2490. }
  2491. }
  2492. else /* if (cm->copy_buffer_to_arf == 2) */
  2493. {
  2494. if(cm->alt_fb_idx != cm->gld_fb_idx)
  2495. {
  2496. yv12_fb[cm->gld_fb_idx].flags |= VP8_ALT_FLAG;
  2497. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALT_FLAG;
  2498. cm->alt_fb_idx = cm->gld_fb_idx;
  2499. }
  2500. }
  2501. }
  2502. if (cm->refresh_golden_frame)
  2503. {
  2504. assert(!cm->copy_buffer_to_gf);
  2505. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FLAG;
  2506. cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
  2507. cm->gld_fb_idx = cm->new_fb_idx;
  2508. }
  2509. else if (cm->copy_buffer_to_gf)
  2510. {
  2511. assert(!(cm->copy_buffer_to_arf & ~0x3));
  2512. if (cm->copy_buffer_to_gf == 1)
  2513. {
  2514. if(cm->gld_fb_idx != cm->lst_fb_idx)
  2515. {
  2516. yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FLAG;
  2517. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
  2518. cm->gld_fb_idx = cm->lst_fb_idx;
  2519. }
  2520. }
  2521. else /* if (cm->copy_buffer_to_gf == 2) */
  2522. {
  2523. if(cm->alt_fb_idx != cm->gld_fb_idx)
  2524. {
  2525. yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FLAG;
  2526. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FLAG;
  2527. cm->gld_fb_idx = cm->alt_fb_idx;
  2528. }
  2529. }
  2530. }
  2531. }
  2532. if (cm->refresh_last_frame)
  2533. {
  2534. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FLAG;
  2535. cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FLAG;
  2536. cm->lst_fb_idx = cm->new_fb_idx;
  2537. }
  2538. }
  2539. void loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
  2540. {
  2541. if (cm->no_lpf)
  2542. {
  2543. cm->filter_level = 0;
  2544. }
  2545. else
  2546. {
  2547. struct vpx_usec_timer timer;
  2548. vp8_clear_system_state();
  2549. vpx_usec_timer_start(&timer);
  2550. if (cpi->sf.auto_filter == 0)
  2551. vp8cx_pick_filter_level_fast(cpi->Source, cpi);
  2552. else
  2553. vp8cx_pick_filter_level(cpi->Source, cpi);
  2554. vpx_usec_timer_mark(&timer);
  2555. cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
  2556. }
  2557. #if CONFIG_MULTITHREAD
  2558. if (cpi->b_multi_threaded)
  2559. sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
  2560. #endif
  2561. if (cm->filter_level > 0)
  2562. {
  2563. vp8cx_set_alt_lf_level(cpi, cm->filter_level);
  2564. vp8_loop_filter_frame(cm, &cpi->mb.e_mbd);
  2565. }
  2566. vp8_yv12_extend_frame_borders_ptr(cm->frame_to_show);
  2567. }
  2568. static void encode_frame_to_data_rate
  2569. (
  2570. VP8_COMP *cpi,
  2571. unsigned long *size,
  2572. unsigned char *dest,
  2573. unsigned int *frame_flags
  2574. )
  2575. {
  2576. int Q;
  2577. int frame_over_shoot_limit;
  2578. int frame_under_shoot_limit;
  2579. int Loop = FALSE;
  2580. int loop_count;
  2581. int this_q;
  2582. int last_zbin_oq;
  2583. int q_low;
  2584. int q_high;
  2585. int zbin_oq_high;
  2586. int zbin_oq_low = 0;
  2587. int top_index;
  2588. int bottom_index;
  2589. VP8_COMMON *cm = &cpi->common;
  2590. int active_worst_qchanged = FALSE;
  2591. int overshoot_seen = FALSE;
  2592. int undershoot_seen = FALSE;
  2593. int drop_mark = cpi->oxcf.drop_frames_water_mark * cpi->oxcf.optimal_buffer_level / 100;
  2594. int drop_mark75 = drop_mark * 2 / 3;
  2595. int drop_mark50 = drop_mark / 4;
  2596. int drop_mark25 = drop_mark / 8;
  2597. // Clear down mmx registers to allow floating point in what follows
  2598. vp8_clear_system_state();
  2599. // Test code for segmentation of gf/arf (0,0)
  2600. //segmentation_test_function((VP8_PTR) cpi);
  2601. #if CONFIG_REALTIME_ONLY
  2602. if(cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME)
  2603. {
  2604. if(cpi->force_next_frame_intra)
  2605. {
  2606. cm->frame_type = KEY_FRAME; /* delayed intra frame */
  2607. }
  2608. }
  2609. cpi->force_next_frame_intra = 0;
  2610. #endif
  2611. // For an alt ref frame in 2 pass we skip the call to the second pass function that sets the target bandwidth
  2612. #if !(CONFIG_REALTIME_ONLY)
  2613. if (cpi->pass == 2)
  2614. {
  2615. if (cpi->common.refresh_alt_ref_frame)
  2616. {
  2617. cpi->per_frame_bandwidth = cpi->twopass.gf_bits; // Per frame bit target for the alt ref frame
  2618. cpi->target_bandwidth = cpi->twopass.gf_bits * cpi->output_frame_rate; // per second target bitrate
  2619. }
  2620. }
  2621. else
  2622. #endif
  2623. cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_frame_rate);
  2624. // Default turn off buffer to buffer copying
  2625. cm->copy_buffer_to_gf = 0;
  2626. cm->copy_buffer_to_arf = 0;
  2627. // Clear zbin over-quant value and mode boost values.
  2628. cpi->zbin_over_quant = 0;
  2629. cpi->zbin_mode_boost = 0;
  2630. // Enable or disable mode based tweaking of the zbin
  2631. // For 2 Pass Only used where GF/ARF prediction quality
  2632. // is above a threshold
  2633. cpi->zbin_mode_boost = 0;
  2634. cpi->zbin_mode_boost_enabled = TRUE;
  2635. if (cpi->pass == 2)
  2636. {
  2637. if ( cpi->gfu_boost <= 400 )
  2638. {
  2639. cpi->zbin_mode_boost_enabled = FALSE;
  2640. }
  2641. }
  2642. // Current default encoder behaviour for the altref sign bias
  2643. if (cpi->source_alt_ref_active)
  2644. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
  2645. else
  2646. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
  2647. // Check to see if a key frame is signalled
  2648. // For two pass with auto key frame enabled cm->frame_type may already be set, but not for one pass.
  2649. if ((cm->current_video_frame == 0) ||
  2650. (cm->frame_flags & FRAMEFLAGS_KEY) ||
  2651. (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0)))
  2652. {
  2653. // Key frame from VFW/auto-keyframe/first frame
  2654. cm->frame_type = KEY_FRAME;
  2655. }
  2656. // Set default state for segment and mode based loop filter update flags
  2657. cpi->mb.e_mbd.update_mb_segmentation_map = 0;
  2658. cpi->mb.e_mbd.update_mb_segmentation_data = 0;
  2659. cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
  2660. // Set various flags etc to special state if it is a key frame
  2661. if (cm->frame_type == KEY_FRAME)
  2662. {
  2663. int i;
  2664. // Reset the loop filter deltas and segmentation map
  2665. setup_features(cpi);
  2666. // If segmentation is enabled force a map update for key frames
  2667. if (cpi->mb.e_mbd.segmentation_enabled)
  2668. {
  2669. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  2670. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  2671. }
  2672. // The alternate reference frame cannot be active for a key frame
  2673. cpi->source_alt_ref_active = FALSE;
  2674. // Reset the RD threshold multipliers to default of * 1 (128)
  2675. for (i = 0; i < MAX_MODES; i++)
  2676. {
  2677. cpi->rd_thresh_mult[i] = 128;
  2678. }
  2679. }
  2680. // Test code for segmentation
  2681. //if ( (cm->frame_type == KEY_FRAME) || ((cm->current_video_frame % 2) == 0))
  2682. //if ( (cm->current_video_frame % 2) == 0 )
  2683. // enable_segmentation((VP8_PTR)cpi);
  2684. //else
  2685. // disable_segmentation((VP8_PTR)cpi);
  2686. #if 0
  2687. // Experimental code for lagged compress and one pass
  2688. // Initialise one_pass GF frames stats
  2689. // Update stats used for GF selection
  2690. //if ( cpi->pass == 0 )
  2691. {
  2692. cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
  2693. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
  2694. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
  2695. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
  2696. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
  2697. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
  2698. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
  2699. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
  2700. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
  2701. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
  2702. }
  2703. #endif
  2704. update_rd_ref_frame_probs(cpi);
  2705. if (cpi->drop_frames_allowed)
  2706. {
  2707. // The reset to decimation 0 is only done here for one pass.
  2708. // Once it is set two pass leaves decimation on till the next kf.
  2709. if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0))
  2710. cpi->decimation_factor --;
  2711. if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0)
  2712. cpi->decimation_factor = 1;
  2713. else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3))
  2714. {
  2715. cpi->decimation_factor = 3;
  2716. }
  2717. else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2))
  2718. {
  2719. cpi->decimation_factor = 2;
  2720. }
  2721. else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1))
  2722. {
  2723. cpi->decimation_factor = 1;
  2724. }
  2725. //vpx_log("Encoder: Decimation Factor: %d \n",cpi->decimation_factor);
  2726. }
  2727. // The following decimates the frame rate according to a regular pattern (i.e. to 1/2 or 2/3 frame rate)
  2728. // This can be used to help prevent buffer under-run in CBR mode. Alternatively it might be desirable in
  2729. // some situations to drop frame rate but throw more bits at each frame.
  2730. //
  2731. // Note that dropping a key frame can be problematic if spatial resampling is also active
  2732. if (cpi->decimation_factor > 0)
  2733. {
  2734. switch (cpi->decimation_factor)
  2735. {
  2736. case 1:
  2737. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
  2738. break;
  2739. case 2:
  2740. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
  2741. break;
  2742. case 3:
  2743. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
  2744. break;
  2745. }
  2746. // Note that we should not throw out a key frame (especially when spatial resampling is enabled).
  2747. if ((cm->frame_type == KEY_FRAME)) // && cpi->oxcf.allow_spatial_resampling )
  2748. {
  2749. cpi->decimation_count = cpi->decimation_factor;
  2750. }
  2751. else if (cpi->decimation_count > 0)
  2752. {
  2753. cpi->decimation_count --;
  2754. cpi->bits_off_target += cpi->av_per_frame_bandwidth;
  2755. cm->current_video_frame++;
  2756. cpi->frames_since_key++;
  2757. #if CONFIG_INTERNAL_STATS
  2758. cpi->count ++;
  2759. #endif
  2760. cpi->buffer_level = cpi->bits_off_target;
  2761. return;
  2762. }
  2763. else
  2764. cpi->decimation_count = cpi->decimation_factor;
  2765. }
  2766. // Decide how big to make the frame
  2767. if (!vp8_pick_frame_size(cpi))
  2768. {
  2769. cm->current_video_frame++;
  2770. cpi->frames_since_key++;
  2771. return;
  2772. }
  2773. // Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
  2774. // This has a knock on effect on active best quality as well.
  2775. // For CBR if the buffer reaches its maximum level then we can no longer
  2776. // save up bits for later frames so we might as well use them up
  2777. // on the current frame.
  2778. if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
  2779. (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode)
  2780. {
  2781. int Adjustment = cpi->active_worst_quality / 4; // Max adjustment is 1/4
  2782. if (Adjustment)
  2783. {
  2784. int buff_lvl_step;
  2785. if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size)
  2786. {
  2787. buff_lvl_step = (cpi->oxcf.maximum_buffer_size - cpi->oxcf.optimal_buffer_level) / Adjustment;
  2788. if (buff_lvl_step)
  2789. Adjustment = (cpi->buffer_level - cpi->oxcf.optimal_buffer_level) / buff_lvl_step;
  2790. else
  2791. Adjustment = 0;
  2792. }
  2793. cpi->active_worst_quality -= Adjustment;
  2794. if(cpi->active_worst_quality < cpi->active_best_quality)
  2795. cpi->active_worst_quality = cpi->active_best_quality;
  2796. }
  2797. }
  2798. // Set an active best quality and if necessary active worst quality
  2799. // There is some odd behaviour for one pass here that needs attention.
  2800. if ( (cpi->pass == 2) || (cpi->ni_frames > 150))
  2801. {
  2802. vp8_clear_system_state();
  2803. Q = cpi->active_worst_quality;
  2804. if ( cm->frame_type == KEY_FRAME )
  2805. {
  2806. if ( cpi->pass == 2 )
  2807. {
  2808. if (cpi->gfu_boost > 600)
  2809. cpi->active_best_quality = kf_low_motion_minq[Q];
  2810. else
  2811. cpi->active_best_quality = kf_high_motion_minq[Q];
  2812. // Special case for key frames forced because we have reached
  2813. // the maximum key frame interval. Here force the Q to a range
  2814. // based on the ambient Q to reduce the risk of popping
  2815. if ( cpi->this_key_frame_forced )
  2816. {
  2817. if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8)
  2818. cpi->active_best_quality = cpi->avg_frame_qindex * 7/8;
  2819. else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 )
  2820. cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
  2821. }
  2822. }
  2823. // One pass more conservative
  2824. else
  2825. cpi->active_best_quality = kf_high_motion_minq[Q];
  2826. }
  2827. else if (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame)
  2828. {
  2829. // Use the lower of cpi->active_worst_quality and recent
  2830. // average Q as basis for GF/ARF Q limit unless last frame was
  2831. // a key frame.
  2832. if ( (cpi->frames_since_key > 1) &&
  2833. (cpi->avg_frame_qindex < cpi->active_worst_quality) )
  2834. {
  2835. Q = cpi->avg_frame_qindex;
  2836. if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  2837. (Q < cpi->oxcf.cq_level) )
  2838. {
  2839. Q = cpi->oxcf.cq_level;
  2840. }
  2841. }
  2842. if ( cpi->pass == 2 )
  2843. {
  2844. if ( cpi->gfu_boost > 1000 )
  2845. cpi->active_best_quality = gf_low_motion_minq[Q];
  2846. else if ( cpi->gfu_boost < 400 )
  2847. cpi->active_best_quality = gf_high_motion_minq[Q];
  2848. else
  2849. cpi->active_best_quality = gf_mid_motion_minq[Q];
  2850. }
  2851. // One pass more conservative
  2852. else
  2853. cpi->active_best_quality = gf_high_motion_minq[Q];
  2854. }
  2855. else
  2856. {
  2857. cpi->active_best_quality = inter_minq[Q];
  2858. // For the constant/constrained quality mode we dont want
  2859. // the quality to rise above the cq level.
  2860. if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  2861. (cpi->active_best_quality < cpi->cq_target_quality) )
  2862. {
  2863. // If we are strongly undershooting the target rate in the last
  2864. // frames then use the user passed in cq value not the auto
  2865. // cq value.
  2866. if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth )
  2867. cpi->active_best_quality = cpi->oxcf.cq_level;
  2868. else
  2869. cpi->active_best_quality = cpi->cq_target_quality;
  2870. }
  2871. }
  2872. // If CBR and the buffer is as full then it is reasonable to allow
  2873. // higher quality on the frames to prevent bits just going to waste.
  2874. if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)
  2875. {
  2876. // Note that the use of >= here elliminates the risk of a devide
  2877. // by 0 error in the else if clause
  2878. if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size)
  2879. cpi->active_best_quality = cpi->best_quality;
  2880. else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level)
  2881. {
  2882. int Fraction = ((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128) / (cpi->oxcf.maximum_buffer_size - cpi->oxcf.optimal_buffer_level);
  2883. int min_qadjustment = ((cpi->active_best_quality - cpi->best_quality) * Fraction) / 128;
  2884. cpi->active_best_quality -= min_qadjustment;
  2885. }
  2886. }
  2887. }
  2888. // Make sure constrained quality mode limits are adhered to for the first
  2889. // few frames of one pass encodes
  2890. else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
  2891. {
  2892. if ( (cm->frame_type == KEY_FRAME) ||
  2893. cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame )
  2894. {
  2895. cpi->active_best_quality = cpi->best_quality;
  2896. }
  2897. else if (cpi->active_best_quality < cpi->cq_target_quality)
  2898. {
  2899. cpi->active_best_quality = cpi->cq_target_quality;
  2900. }
  2901. }
  2902. // Clip the active best and worst quality values to limits
  2903. if (cpi->active_worst_quality > cpi->worst_quality)
  2904. cpi->active_worst_quality = cpi->worst_quality;
  2905. if (cpi->active_best_quality < cpi->best_quality)
  2906. cpi->active_best_quality = cpi->best_quality;
  2907. else if (cpi->active_best_quality > cpi->active_worst_quality)
  2908. cpi->active_best_quality = cpi->active_worst_quality;
  2909. // Determine initial Q to try
  2910. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  2911. last_zbin_oq = cpi->zbin_over_quant;
  2912. // Set highest allowed value for Zbin over quant
  2913. if (cm->frame_type == KEY_FRAME)
  2914. zbin_oq_high = 0; //ZBIN_OQ_MAX/16
  2915. else if (cm->refresh_alt_ref_frame || (cm->refresh_golden_frame && !cpi->source_alt_ref_active))
  2916. zbin_oq_high = 16;
  2917. else
  2918. zbin_oq_high = ZBIN_OQ_MAX;
  2919. // Setup background Q adjustment for error resilliant mode
  2920. if (cpi->cyclic_refresh_mode_enabled)
  2921. cyclic_background_refresh(cpi, Q, 0);
  2922. vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
  2923. // Limit Q range for the adaptive loop.
  2924. bottom_index = cpi->active_best_quality;
  2925. top_index = cpi->active_worst_quality;
  2926. q_low = cpi->active_best_quality;
  2927. q_high = cpi->active_worst_quality;
  2928. vp8_save_coding_context(cpi);
  2929. loop_count = 0;
  2930. scale_and_extend_source(cpi->un_scaled_source, cpi);
  2931. #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC
  2932. if (cpi->oxcf.noise_sensitivity > 0)
  2933. {
  2934. unsigned char *src;
  2935. int l = 0;
  2936. switch (cpi->oxcf.noise_sensitivity)
  2937. {
  2938. case 1:
  2939. l = 20;
  2940. break;
  2941. case 2:
  2942. l = 40;
  2943. break;
  2944. case 3:
  2945. l = 60;
  2946. break;
  2947. case 4:
  2948. l = 80;
  2949. break;
  2950. case 5:
  2951. l = 100;
  2952. break;
  2953. case 6:
  2954. l = 150;
  2955. break;
  2956. }
  2957. if (cm->frame_type == KEY_FRAME)
  2958. {
  2959. vp8_de_noise(cpi->Source, cpi->Source, l , 1, 0, RTCD(postproc));
  2960. }
  2961. else
  2962. {
  2963. vp8_de_noise(cpi->Source, cpi->Source, l , 1, 0, RTCD(postproc));
  2964. src = cpi->Source->y_buffer;
  2965. if (cpi->Source->y_stride < 0)
  2966. {
  2967. src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
  2968. }
  2969. }
  2970. }
  2971. #endif
  2972. #ifdef OUTPUT_YUV_SRC
  2973. vp8_write_yuv_frame(cpi->Source);
  2974. #endif
  2975. do
  2976. {
  2977. vp8_clear_system_state(); //__asm emms;
  2978. /*
  2979. if(cpi->is_src_frame_alt_ref)
  2980. Q = 127;
  2981. */
  2982. vp8_set_quantizer(cpi, Q);
  2983. this_q = Q;
  2984. // setup skip prob for costing in mode/mv decision
  2985. if (cpi->common.mb_no_coeff_skip)
  2986. {
  2987. cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
  2988. if (cm->frame_type != KEY_FRAME)
  2989. {
  2990. if (cpi->common.refresh_alt_ref_frame)
  2991. {
  2992. if (cpi->last_skip_false_probs[2] != 0)
  2993. cpi->prob_skip_false = cpi->last_skip_false_probs[2];
  2994. /*
  2995. if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 )
  2996. cpi->prob_skip_false = cpi->last_skip_false_probs[2];
  2997. else if (cpi->last_skip_false_probs[2]!=0)
  2998. cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2;
  2999. */
  3000. }
  3001. else if (cpi->common.refresh_golden_frame)
  3002. {
  3003. if (cpi->last_skip_false_probs[1] != 0)
  3004. cpi->prob_skip_false = cpi->last_skip_false_probs[1];
  3005. /*
  3006. if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 )
  3007. cpi->prob_skip_false = cpi->last_skip_false_probs[1];
  3008. else if (cpi->last_skip_false_probs[1]!=0)
  3009. cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2;
  3010. */
  3011. }
  3012. else
  3013. {
  3014. if (cpi->last_skip_false_probs[0] != 0)
  3015. cpi->prob_skip_false = cpi->last_skip_false_probs[0];
  3016. /*
  3017. if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 )
  3018. cpi->prob_skip_false = cpi->last_skip_false_probs[0];
  3019. else if(cpi->last_skip_false_probs[0]!=0)
  3020. cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2;
  3021. */
  3022. }
  3023. //as this is for cost estimate, let's make sure it does not go extreme eitehr way
  3024. if (cpi->prob_skip_false < 5)
  3025. cpi->prob_skip_false = 5;
  3026. if (cpi->prob_skip_false > 250)
  3027. cpi->prob_skip_false = 250;
  3028. if (cpi->is_src_frame_alt_ref)
  3029. cpi->prob_skip_false = 1;
  3030. }
  3031. #if 0
  3032. if (cpi->pass != 1)
  3033. {
  3034. FILE *f = fopen("skip.stt", "a");
  3035. fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
  3036. fclose(f);
  3037. }
  3038. #endif
  3039. }
  3040. if (cm->frame_type == KEY_FRAME)
  3041. {
  3042. resize_key_frame(cpi);
  3043. vp8_setup_key_frame(cpi);
  3044. }
  3045. // transform / motion compensation build reconstruction frame
  3046. vp8_encode_frame(cpi);
  3047. cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
  3048. cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
  3049. vp8_clear_system_state(); //__asm emms;
  3050. // Test to see if the stats generated for this frame indicate that we should have coded a key frame
  3051. // (assuming that we didn't)!
  3052. if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME)
  3053. {
  3054. #if CONFIG_REALTIME_ONLY
  3055. {
  3056. /* we don't do re-encoding in realtime mode
  3057. * if key frame is decided than we force it on next frame */
  3058. cpi->force_next_frame_intra = decide_key_frame(cpi);
  3059. }
  3060. #else
  3061. if (decide_key_frame(cpi))
  3062. {
  3063. // Reset all our sizing numbers and recode
  3064. cm->frame_type = KEY_FRAME;
  3065. vp8_pick_frame_size(cpi);
  3066. // Clear the Alt reference frame active flag when we have a key frame
  3067. cpi->source_alt_ref_active = FALSE;
  3068. // Reset the loop filter deltas and segmentation map
  3069. setup_features(cpi);
  3070. // If segmentation is enabled force a map update for key frames
  3071. if (cpi->mb.e_mbd.segmentation_enabled)
  3072. {
  3073. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  3074. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  3075. }
  3076. vp8_restore_coding_context(cpi);
  3077. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3078. vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
  3079. // Limit Q range for the adaptive loop.
  3080. bottom_index = cpi->active_best_quality;
  3081. top_index = cpi->active_worst_quality;
  3082. q_low = cpi->active_best_quality;
  3083. q_high = cpi->active_worst_quality;
  3084. loop_count++;
  3085. Loop = TRUE;
  3086. continue;
  3087. }
  3088. #endif
  3089. }
  3090. vp8_clear_system_state();
  3091. if (frame_over_shoot_limit == 0)
  3092. frame_over_shoot_limit = 1;
  3093. // Are we are overshooting and up against the limit of active max Q.
  3094. if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
  3095. (Q == cpi->active_worst_quality) &&
  3096. (cpi->active_worst_quality < cpi->worst_quality) &&
  3097. (cpi->projected_frame_size > frame_over_shoot_limit))
  3098. {
  3099. int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit;
  3100. // If so is there any scope for relaxing it
  3101. while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0))
  3102. {
  3103. cpi->active_worst_quality++;
  3104. top_index = cpi->active_worst_quality;
  3105. over_size_percent = (int)(over_size_percent * 0.96); // Assume 1 qstep = about 4% on frame size.
  3106. }
  3107. // If we have updated the active max Q do not call vp8_update_rate_correction_factors() this loop.
  3108. active_worst_qchanged = TRUE;
  3109. }
  3110. else
  3111. active_worst_qchanged = FALSE;
  3112. #if !(CONFIG_REALTIME_ONLY)
  3113. // Special case handling for forced key frames
  3114. if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced )
  3115. {
  3116. int last_q = Q;
  3117. int kf_err = vp8_calc_ss_err(cpi->Source,
  3118. &cm->yv12_fb[cm->new_fb_idx],
  3119. IF_RTCD(&cpi->rtcd.variance));
  3120. // The key frame is not good enough
  3121. if ( kf_err > ((cpi->ambient_err * 7) >> 3) )
  3122. {
  3123. // Lower q_high
  3124. q_high = (Q > q_low) ? (Q - 1) : q_low;
  3125. // Adjust Q
  3126. Q = (q_high + q_low) >> 1;
  3127. }
  3128. // The key frame is much better than the previous frame
  3129. else if ( kf_err < (cpi->ambient_err >> 1) )
  3130. {
  3131. // Raise q_low
  3132. q_low = (Q < q_high) ? (Q + 1) : q_high;
  3133. // Adjust Q
  3134. Q = (q_high + q_low + 1) >> 1;
  3135. }
  3136. // Clamp Q to upper and lower limits:
  3137. if (Q > q_high)
  3138. Q = q_high;
  3139. else if (Q < q_low)
  3140. Q = q_low;
  3141. Loop = ((Q != last_q)) ? TRUE : FALSE;
  3142. }
  3143. // Is the projected frame size out of range and are we allowed to attempt to recode.
  3144. else if ( recode_loop_test( cpi,
  3145. frame_over_shoot_limit, frame_under_shoot_limit,
  3146. Q, top_index, bottom_index ) )
  3147. {
  3148. int last_q = Q;
  3149. int Retries = 0;
  3150. // Frame size out of permitted range:
  3151. // Update correction factor & compute new Q to try...
  3152. // Frame is too large
  3153. if (cpi->projected_frame_size > cpi->this_frame_target)
  3154. {
  3155. //if ( cpi->zbin_over_quant == 0 )
  3156. q_low = (Q < q_high) ? (Q + 1) : q_high; // Raise Qlow as to at least the current value
  3157. if (cpi->zbin_over_quant > 0) // If we are using over quant do the same for zbin_oq_low
  3158. zbin_oq_low = (cpi->zbin_over_quant < zbin_oq_high) ? (cpi->zbin_over_quant + 1) : zbin_oq_high;
  3159. //if ( undershoot_seen || (Q == MAXQ) )
  3160. if (undershoot_seen)
  3161. {
  3162. // Update rate_correction_factor unless cpi->active_worst_quality has changed.
  3163. if (!active_worst_qchanged)
  3164. vp8_update_rate_correction_factors(cpi, 1);
  3165. Q = (q_high + q_low + 1) / 2;
  3166. // Adjust cpi->zbin_over_quant (only allowed when Q is max)
  3167. if (Q < MAXQ)
  3168. cpi->zbin_over_quant = 0;
  3169. else
  3170. {
  3171. zbin_oq_low = (cpi->zbin_over_quant < zbin_oq_high) ? (cpi->zbin_over_quant + 1) : zbin_oq_high;
  3172. cpi->zbin_over_quant = (zbin_oq_high + zbin_oq_low) / 2;
  3173. }
  3174. }
  3175. else
  3176. {
  3177. // Update rate_correction_factor unless cpi->active_worst_quality has changed.
  3178. if (!active_worst_qchanged)
  3179. vp8_update_rate_correction_factors(cpi, 0);
  3180. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3181. while (((Q < q_low) || (cpi->zbin_over_quant < zbin_oq_low)) && (Retries < 10))
  3182. {
  3183. vp8_update_rate_correction_factors(cpi, 0);
  3184. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3185. Retries ++;
  3186. }
  3187. }
  3188. overshoot_seen = TRUE;
  3189. }
  3190. // Frame is too small
  3191. else
  3192. {
  3193. if (cpi->zbin_over_quant == 0)
  3194. q_high = (Q > q_low) ? (Q - 1) : q_low; // Lower q_high if not using over quant
  3195. else // else lower zbin_oq_high
  3196. zbin_oq_high = (cpi->zbin_over_quant > zbin_oq_low) ? (cpi->zbin_over_quant - 1) : zbin_oq_low;
  3197. if (overshoot_seen)
  3198. {
  3199. // Update rate_correction_factor unless cpi->active_worst_quality has changed.
  3200. if (!active_worst_qchanged)
  3201. vp8_update_rate_correction_factors(cpi, 1);
  3202. Q = (q_high + q_low) / 2;
  3203. // Adjust cpi->zbin_over_quant (only allowed when Q is max)
  3204. if (Q < MAXQ)
  3205. cpi->zbin_over_quant = 0;
  3206. else
  3207. cpi->zbin_over_quant = (zbin_oq_high + zbin_oq_low) / 2;
  3208. }
  3209. else
  3210. {
  3211. // Update rate_correction_factor unless cpi->active_worst_quality has changed.
  3212. if (!active_worst_qchanged)
  3213. vp8_update_rate_correction_factors(cpi, 0);
  3214. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3215. // Special case reset for qlow for constrained quality.
  3216. // This should only trigger where there is very substantial
  3217. // undershoot on a frame and the auto cq level is above
  3218. // the user passsed in value.
  3219. if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  3220. (Q < q_low) )
  3221. {
  3222. q_low = Q;
  3223. }
  3224. while (((Q > q_high) || (cpi->zbin_over_quant > zbin_oq_high)) && (Retries < 10))
  3225. {
  3226. vp8_update_rate_correction_factors(cpi, 0);
  3227. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3228. Retries ++;
  3229. }
  3230. }
  3231. undershoot_seen = TRUE;
  3232. }
  3233. // Clamp Q to upper and lower limits:
  3234. if (Q > q_high)
  3235. Q = q_high;
  3236. else if (Q < q_low)
  3237. Q = q_low;
  3238. // Clamp cpi->zbin_over_quant
  3239. cpi->zbin_over_quant = (cpi->zbin_over_quant < zbin_oq_low) ? zbin_oq_low : (cpi->zbin_over_quant > zbin_oq_high) ? zbin_oq_high : cpi->zbin_over_quant;
  3240. //Loop = ((Q != last_q) || (last_zbin_oq != cpi->zbin_over_quant)) ? TRUE : FALSE;
  3241. Loop = ((Q != last_q)) ? TRUE : FALSE;
  3242. last_zbin_oq = cpi->zbin_over_quant;
  3243. }
  3244. else
  3245. #endif
  3246. Loop = FALSE;
  3247. if (cpi->is_src_frame_alt_ref)
  3248. Loop = FALSE;
  3249. if (Loop == TRUE)
  3250. {
  3251. vp8_restore_coding_context(cpi);
  3252. loop_count++;
  3253. #if CONFIG_INTERNAL_STATS
  3254. cpi->tot_recode_hits++;
  3255. #endif
  3256. }
  3257. }
  3258. while (Loop == TRUE);
  3259. #if 0
  3260. // Experimental code for lagged and one pass
  3261. // Update stats used for one pass GF selection
  3262. {
  3263. /*
  3264. int frames_so_far;
  3265. double frame_intra_error;
  3266. double frame_coded_error;
  3267. double frame_pcnt_inter;
  3268. double frame_pcnt_motion;
  3269. double frame_mvr;
  3270. double frame_mvr_abs;
  3271. double frame_mvc;
  3272. double frame_mvc_abs;
  3273. */
  3274. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
  3275. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
  3276. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
  3277. }
  3278. #endif
  3279. // Special case code to reduce pulsing when key frames are forced at a
  3280. // fixed interval. Note the reconstruction error if it is the frame before
  3281. // the force key frame
  3282. if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) )
  3283. {
  3284. cpi->ambient_err = vp8_calc_ss_err(cpi->Source,
  3285. &cm->yv12_fb[cm->new_fb_idx],
  3286. IF_RTCD(&cpi->rtcd.variance));
  3287. }
  3288. // This frame's MVs are saved and will be used in next frame's MV prediction.
  3289. // Last frame has one more line(add to bottom) and one more column(add to right) than cm->mip. The edge elements are initialized to 0.
  3290. if(cm->show_frame) //do not save for altref frame
  3291. {
  3292. int mb_row;
  3293. int mb_col;
  3294. MODE_INFO *tmp = cm->mip; //point to beginning of allocated MODE_INFO arrays.
  3295. if(cm->frame_type != KEY_FRAME)
  3296. {
  3297. for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++)
  3298. {
  3299. for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++)
  3300. {
  3301. if(tmp->mbmi.ref_frame != INTRA_FRAME)
  3302. cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int;
  3303. cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
  3304. cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame;
  3305. tmp++;
  3306. }
  3307. }
  3308. }
  3309. }
  3310. // Update the GF useage maps.
  3311. // This is done after completing the compression of a frame when all modes etc. are finalized but before loop filter
  3312. // This is done after completing the compression of a frame when all modes etc. are finalized but before loop filter
  3313. vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
  3314. if (cm->frame_type == KEY_FRAME)
  3315. cm->refresh_last_frame = 1;
  3316. #if 0
  3317. {
  3318. FILE *f = fopen("gfactive.stt", "a");
  3319. fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
  3320. fclose(f);
  3321. }
  3322. #endif
  3323. // For inter frames the current default behavior is that when
  3324. // cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
  3325. // This is purely an encoder decision at present.
  3326. if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
  3327. cm->copy_buffer_to_arf = 2;
  3328. else
  3329. cm->copy_buffer_to_arf = 0;
  3330. cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
  3331. #if CONFIG_MULTITHREAD
  3332. if (cpi->b_multi_threaded)
  3333. {
  3334. sem_post(&cpi->h_event_start_lpf); /* start loopfilter in separate thread */
  3335. }
  3336. else
  3337. #endif
  3338. {
  3339. loopfilter_frame(cpi, cm);
  3340. }
  3341. update_reference_frames(cm);
  3342. if (cpi->oxcf.error_resilient_mode)
  3343. {
  3344. cm->refresh_entropy_probs = 0;
  3345. }
  3346. #if CONFIG_MULTITHREAD
  3347. /* wait that filter_level is picked so that we can continue with stream packing */
  3348. if (cpi->b_multi_threaded)
  3349. sem_wait(&cpi->h_event_end_lpf);
  3350. #endif
  3351. // build the bitstream
  3352. vp8_pack_bitstream(cpi, dest, size);
  3353. #if CONFIG_MULTITHREAD
  3354. /* wait for loopfilter thread done */
  3355. if (cpi->b_multi_threaded)
  3356. {
  3357. sem_wait(&cpi->h_event_end_lpf);
  3358. }
  3359. #endif
  3360. /* Move storing frame_type out of the above loop since it is also
  3361. * needed in motion search besides loopfilter */
  3362. cm->last_frame_type = cm->frame_type;
  3363. // Update rate control heuristics
  3364. cpi->total_byte_count += (*size);
  3365. cpi->projected_frame_size = (*size) << 3;
  3366. if (!active_worst_qchanged)
  3367. vp8_update_rate_correction_factors(cpi, 2);
  3368. cpi->last_q[cm->frame_type] = cm->base_qindex;
  3369. if (cm->frame_type == KEY_FRAME)
  3370. {
  3371. vp8_adjust_key_frame_context(cpi);
  3372. }
  3373. // Keep a record of ambient average Q.
  3374. if (cm->frame_type != KEY_FRAME)
  3375. cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
  3376. // Keep a record from which we can calculate the average Q excluding GF updates and key frames
  3377. if ((cm->frame_type != KEY_FRAME) && !cm->refresh_golden_frame && !cm->refresh_alt_ref_frame)
  3378. {
  3379. cpi->ni_frames++;
  3380. // Calculate the average Q for normal inter frames (not key or GFU
  3381. // frames).
  3382. if ( cpi->pass == 2 )
  3383. {
  3384. cpi->ni_tot_qi += Q;
  3385. cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
  3386. }
  3387. else
  3388. {
  3389. // Damp value for first few frames
  3390. if (cpi->ni_frames > 150 )
  3391. {
  3392. cpi->ni_tot_qi += Q;
  3393. cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
  3394. }
  3395. // For one pass, early in the clip ... average the current frame Q
  3396. // value with the worstq entered by the user as a dampening measure
  3397. else
  3398. {
  3399. cpi->ni_tot_qi += Q;
  3400. cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
  3401. }
  3402. // If the average Q is higher than what was used in the last frame
  3403. // (after going through the recode loop to keep the frame size within range)
  3404. // then use the last frame value - 1.
  3405. // The -1 is designed to stop Q and hence the data rate, from progressively
  3406. // falling away during difficult sections, but at the same time reduce the number of
  3407. // itterations around the recode loop.
  3408. if (Q > cpi->ni_av_qi)
  3409. cpi->ni_av_qi = Q - 1;
  3410. }
  3411. }
  3412. #if 0
  3413. // If the frame was massively oversize and we are below optimal buffer level drop next frame
  3414. if ((cpi->drop_frames_allowed) &&
  3415. (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
  3416. (cpi->buffer_level < cpi->oxcf.drop_frames_water_mark * cpi->oxcf.optimal_buffer_level / 100) &&
  3417. (cpi->projected_frame_size > (4 * cpi->this_frame_target)))
  3418. {
  3419. cpi->drop_frame = TRUE;
  3420. }
  3421. #endif
  3422. // Set the count for maximum consequative dropped frames based upon the ratio of
  3423. // this frame size to the target average per frame bandwidth.
  3424. // (cpi->av_per_frame_bandwidth > 0) is just a sanity check to prevent / 0.
  3425. if (cpi->drop_frames_allowed && (cpi->av_per_frame_bandwidth > 0))
  3426. {
  3427. cpi->max_drop_count = cpi->projected_frame_size / cpi->av_per_frame_bandwidth;
  3428. if (cpi->max_drop_count > cpi->max_consec_dropped_frames)
  3429. cpi->max_drop_count = cpi->max_consec_dropped_frames;
  3430. }
  3431. // Update the buffer level variable.
  3432. // Non-viewable frames are a special case and are treated as pure overhead.
  3433. if ( !cm->show_frame )
  3434. cpi->bits_off_target -= cpi->projected_frame_size;
  3435. else
  3436. cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
  3437. // Rolling monitors of whether we are over or underspending used to help regulate min and Max Q in two pass.
  3438. cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
  3439. cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
  3440. cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
  3441. cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32;
  3442. // Actual bits spent
  3443. cpi->total_actual_bits += cpi->projected_frame_size;
  3444. // Debug stats
  3445. cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size);
  3446. cpi->buffer_level = cpi->bits_off_target;
  3447. // Update bits left to the kf and gf groups to account for overshoot or undershoot on these frames
  3448. if (cm->frame_type == KEY_FRAME)
  3449. {
  3450. cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
  3451. if (cpi->twopass.kf_group_bits < 0)
  3452. cpi->twopass.kf_group_bits = 0 ;
  3453. }
  3454. else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
  3455. {
  3456. cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
  3457. if (cpi->twopass.gf_group_bits < 0)
  3458. cpi->twopass.gf_group_bits = 0 ;
  3459. }
  3460. if (cm->frame_type != KEY_FRAME)
  3461. {
  3462. if (cpi->common.refresh_alt_ref_frame)
  3463. {
  3464. cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
  3465. cpi->last_skip_probs_q[2] = cm->base_qindex;
  3466. }
  3467. else if (cpi->common.refresh_golden_frame)
  3468. {
  3469. cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
  3470. cpi->last_skip_probs_q[1] = cm->base_qindex;
  3471. }
  3472. else
  3473. {
  3474. cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
  3475. cpi->last_skip_probs_q[0] = cm->base_qindex;
  3476. //update the baseline
  3477. cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
  3478. }
  3479. }
  3480. #if 0 && CONFIG_INTERNAL_STATS
  3481. {
  3482. FILE *f = fopen("tmp.stt", "a");
  3483. vp8_clear_system_state(); //__asm emms;
  3484. if (cpi->twopass.total_coded_error_left != 0.0)
  3485. fprintf(f, "%10d %10d %10d %10d %10d %10d %10d %10d %6d %6d"
  3486. "%6d %6d %6d %5d %5d %5d %8d %8.2f %10d %10.3f"
  3487. "%10.3f %8d\n",
  3488. cpi->common.current_video_frame, cpi->this_frame_target,
  3489. cpi->projected_frame_size,
  3490. (cpi->projected_frame_size - cpi->this_frame_target),
  3491. (int)cpi->total_target_vs_actual,
  3492. (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
  3493. (int)cpi->total_actual_bits, cm->base_qindex,
  3494. cpi->active_best_quality, cpi->active_worst_quality,
  3495. cpi->ni_av_qi, cpi->cq_target_quality, cpi->zbin_over_quant,
  3496. //cpi->avg_frame_qindex, cpi->zbin_over_quant,
  3497. cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
  3498. cm->frame_type, cpi->gfu_boost,
  3499. cpi->twopass.est_max_qcorrection_factor, (int)cpi->twopass.bits_left,
  3500. cpi->twopass.total_coded_error_left,
  3501. (double)cpi->twopass.bits_left / cpi->twopass.total_coded_error_left,
  3502. cpi->tot_recode_hits);
  3503. else
  3504. fprintf(f, "%10d %10d %10d %10d %10d %10d %10d %10d %6d %6d"
  3505. "%6d %6d %6d %5d %5d %5d %8d %8.2f %10d %10.3f"
  3506. "%8d\n",
  3507. cpi->common.current_video_frame,
  3508. cpi->this_frame_target, cpi->projected_frame_size,
  3509. (cpi->projected_frame_size - cpi->this_frame_target),
  3510. (int)cpi->total_target_vs_actual,
  3511. (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
  3512. (int)cpi->total_actual_bits, cm->base_qindex,
  3513. cpi->active_best_quality, cpi->active_worst_quality,
  3514. cpi->ni_av_qi, cpi->cq_target_quality, cpi->zbin_over_quant,
  3515. //cpi->avg_frame_qindex, cpi->zbin_over_quant,
  3516. cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
  3517. cm->frame_type, cpi->gfu_boost,
  3518. cpi->twopass.est_max_qcorrection_factor, (int)cpi->twopass.bits_left,
  3519. cpi->twopass.total_coded_error_left, cpi->tot_recode_hits);
  3520. fclose(f);
  3521. {
  3522. FILE *fmodes = fopen("Modes.stt", "a");
  3523. int i;
  3524. fprintf(fmodes, "%6d:%1d:%1d:%1d ",
  3525. cpi->common.current_video_frame,
  3526. cm->frame_type, cm->refresh_golden_frame,
  3527. cm->refresh_alt_ref_frame);
  3528. for (i = 0; i < MAX_MODES; i++)
  3529. fprintf(fmodes, "%5d ", cpi->mode_chosen_counts[i]);
  3530. fprintf(fmodes, "\n");
  3531. fclose(fmodes);
  3532. }
  3533. }
  3534. #endif
  3535. // If this was a kf or Gf note the Q
  3536. if ((cm->frame_type == KEY_FRAME) || cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
  3537. cm->last_kf_gf_q = cm->base_qindex;
  3538. if (cm->refresh_golden_frame == 1)
  3539. cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
  3540. else
  3541. cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN;
  3542. if (cm->refresh_alt_ref_frame == 1)
  3543. cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
  3544. else
  3545. cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF;
  3546. if (cm->refresh_last_frame & cm->refresh_golden_frame) // both refreshed
  3547. cpi->gold_is_last = 1;
  3548. else if (cm->refresh_last_frame ^ cm->refresh_golden_frame) // 1 refreshed but not the other
  3549. cpi->gold_is_last = 0;
  3550. if (cm->refresh_last_frame & cm->refresh_alt_ref_frame) // both refreshed
  3551. cpi->alt_is_last = 1;
  3552. else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame) // 1 refreshed but not the other
  3553. cpi->alt_is_last = 0;
  3554. if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame) // both refreshed
  3555. cpi->gold_is_alt = 1;
  3556. else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame) // 1 refreshed but not the other
  3557. cpi->gold_is_alt = 0;
  3558. cpi->ref_frame_flags = VP8_ALT_FLAG | VP8_GOLD_FLAG | VP8_LAST_FLAG;
  3559. if (cpi->gold_is_last)
  3560. cpi->ref_frame_flags &= ~VP8_GOLD_FLAG;
  3561. if (cpi->alt_is_last)
  3562. cpi->ref_frame_flags &= ~VP8_ALT_FLAG;
  3563. if (cpi->gold_is_alt)
  3564. cpi->ref_frame_flags &= ~VP8_ALT_FLAG;
  3565. if (!cpi->oxcf.error_resilient_mode)
  3566. {
  3567. if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
  3568. // Update the alternate reference frame stats as appropriate.
  3569. update_alt_ref_frame_stats(cpi);
  3570. else
  3571. // Update the Golden frame stats as appropriate.
  3572. update_golden_frame_stats(cpi);
  3573. }
  3574. if (cm->frame_type == KEY_FRAME)
  3575. {
  3576. // Tell the caller that the frame was coded as a key frame
  3577. *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
  3578. // As this frame is a key frame the next defaults to an inter frame.
  3579. cm->frame_type = INTER_FRAME;
  3580. cpi->last_frame_percent_intra = 100;
  3581. }
  3582. else
  3583. {
  3584. *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY;
  3585. cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
  3586. }
  3587. // Clear the one shot update flags for segmentation map and mode/ref loop filter deltas.
  3588. cpi->mb.e_mbd.update_mb_segmentation_map = 0;
  3589. cpi->mb.e_mbd.update_mb_segmentation_data = 0;
  3590. cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
  3591. // Dont increment frame counters if this was an altref buffer update not a real frame
  3592. if (cm->show_frame)
  3593. {
  3594. cm->current_video_frame++;
  3595. cpi->frames_since_key++;
  3596. }
  3597. // reset to normal state now that we are done.
  3598. #if 0
  3599. {
  3600. char filename[512];
  3601. FILE *recon_file;
  3602. sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
  3603. recon_file = fopen(filename, "wb");
  3604. fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
  3605. cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
  3606. fclose(recon_file);
  3607. }
  3608. #endif
  3609. // DEBUG
  3610. //vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show);
  3611. }
  3612. static void check_gf_quality(VP8_COMP *cpi)
  3613. {
  3614. VP8_COMMON *cm = &cpi->common;
  3615. int gf_active_pct = (100 * cpi->gf_active_count) / (cm->mb_rows * cm->mb_cols);
  3616. int gf_ref_usage_pct = (cpi->count_mb_ref_frame_usage[GOLDEN_FRAME] * 100) / (cm->mb_rows * cm->mb_cols);
  3617. int last_ref_zz_useage = (cpi->inter_zz_count * 100) / (cm->mb_rows * cm->mb_cols);
  3618. // Gf refresh is not currently being signalled
  3619. if (cpi->gf_update_recommended == 0)
  3620. {
  3621. if (cpi->common.frames_since_golden > 7)
  3622. {
  3623. // Low use of gf
  3624. if ((gf_active_pct < 10) || ((gf_active_pct + gf_ref_usage_pct) < 15))
  3625. {
  3626. // ...but last frame zero zero usage is reasonbable so a new gf might be appropriate
  3627. if (last_ref_zz_useage >= 25)
  3628. {
  3629. cpi->gf_bad_count ++;
  3630. if (cpi->gf_bad_count >= 8) // Check that the condition is stable
  3631. {
  3632. cpi->gf_update_recommended = 1;
  3633. cpi->gf_bad_count = 0;
  3634. }
  3635. }
  3636. else
  3637. cpi->gf_bad_count = 0; // Restart count as the background is not stable enough
  3638. }
  3639. else
  3640. cpi->gf_bad_count = 0; // Gf useage has picked up so reset count
  3641. }
  3642. }
  3643. // If the signal is set but has not been read should we cancel it.
  3644. else if (last_ref_zz_useage < 15)
  3645. {
  3646. cpi->gf_update_recommended = 0;
  3647. cpi->gf_bad_count = 0;
  3648. }
  3649. #if 0
  3650. {
  3651. FILE *f = fopen("gfneeded.stt", "a");
  3652. fprintf(f, "%10d %10d %10d %10d %10ld \n",
  3653. cm->current_video_frame,
  3654. cpi->common.frames_since_golden,
  3655. gf_active_pct, gf_ref_usage_pct,
  3656. cpi->gf_update_recommended);
  3657. fclose(f);
  3658. }
  3659. #endif
  3660. }
  3661. #if !(CONFIG_REALTIME_ONLY)
  3662. static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
  3663. {
  3664. if (!cpi->common.refresh_alt_ref_frame)
  3665. vp8_second_pass(cpi);
  3666. encode_frame_to_data_rate(cpi, size, dest, frame_flags);
  3667. cpi->twopass.bits_left -= 8 * *size;
  3668. if (!cpi->common.refresh_alt_ref_frame)
  3669. {
  3670. double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth
  3671. *cpi->oxcf.two_pass_vbrmin_section / 100);
  3672. cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->oxcf.frame_rate);
  3673. }
  3674. }
  3675. #endif
  3676. //For ARM NEON, d8-d15 are callee-saved registers, and need to be saved by us.
  3677. #if HAVE_ARMV7
  3678. extern void vp8_push_neon(int64_t *store);
  3679. extern void vp8_pop_neon(int64_t *store);
  3680. #endif
  3681. int vp8_receive_raw_frame(VP8_PTR ptr, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time)
  3682. {
  3683. #if HAVE_ARMV7
  3684. int64_t store_reg[8];
  3685. #endif
  3686. VP8_COMP *cpi = (VP8_COMP *) ptr;
  3687. VP8_COMMON *cm = &cpi->common;
  3688. struct vpx_usec_timer timer;
  3689. int res = 0;
  3690. #if HAVE_ARMV7
  3691. #if CONFIG_RUNTIME_CPU_DETECT
  3692. if (cm->rtcd.flags & HAS_NEON)
  3693. #endif
  3694. {
  3695. vp8_push_neon(store_reg);
  3696. }
  3697. #endif
  3698. vpx_usec_timer_start(&timer);
  3699. if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
  3700. frame_flags))
  3701. res = -1;
  3702. cm->clr_type = sd->clrtype;
  3703. vpx_usec_timer_mark(&timer);
  3704. cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
  3705. #if HAVE_ARMV7
  3706. #if CONFIG_RUNTIME_CPU_DETECT
  3707. if (cm->rtcd.flags & HAS_NEON)
  3708. #endif
  3709. {
  3710. vp8_pop_neon(store_reg);
  3711. }
  3712. #endif
  3713. return res;
  3714. }
  3715. static int frame_is_reference(const VP8_COMP *cpi)
  3716. {
  3717. const VP8_COMMON *cm = &cpi->common;
  3718. const MACROBLOCKD *xd = &cpi->mb.e_mbd;
  3719. return cm->frame_type == KEY_FRAME || cm->refresh_last_frame
  3720. || cm->refresh_golden_frame || cm->refresh_alt_ref_frame
  3721. || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf
  3722. || cm->refresh_entropy_probs
  3723. || xd->mode_ref_lf_delta_update
  3724. || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
  3725. }
  3726. int vp8_get_compressed_data(VP8_PTR ptr, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, int64_t *time_stamp, int64_t *time_end, int flush)
  3727. {
  3728. #if HAVE_ARMV7
  3729. int64_t store_reg[8];
  3730. #endif
  3731. VP8_COMP *cpi = (VP8_COMP *) ptr;
  3732. VP8_COMMON *cm = &cpi->common;
  3733. struct vpx_usec_timer tsctimer;
  3734. struct vpx_usec_timer ticktimer;
  3735. struct vpx_usec_timer cmptimer;
  3736. YV12_BUFFER_CONFIG *force_src_buffer = NULL;
  3737. if (!cpi)
  3738. return -1;
  3739. #if HAVE_ARMV7
  3740. #if CONFIG_RUNTIME_CPU_DETECT
  3741. if (cm->rtcd.flags & HAS_NEON)
  3742. #endif
  3743. {
  3744. vp8_push_neon(store_reg);
  3745. }
  3746. #endif
  3747. vpx_usec_timer_start(&cmptimer);
  3748. cpi->source = NULL;
  3749. #if !(CONFIG_REALTIME_ONLY)
  3750. // Should we code an alternate reference frame
  3751. if (cpi->oxcf.error_resilient_mode == 0 &&
  3752. cpi->oxcf.play_alternate &&
  3753. cpi->source_alt_ref_pending)
  3754. {
  3755. if ((cpi->source = vp8_lookahead_peek(cpi->lookahead,
  3756. cpi->frames_till_gf_update_due)))
  3757. {
  3758. cpi->alt_ref_source = cpi->source;
  3759. if (cpi->oxcf.arnr_max_frames > 0)
  3760. {
  3761. vp8_temporal_filter_prepare_c(cpi,
  3762. cpi->frames_till_gf_update_due);
  3763. force_src_buffer = &cpi->alt_ref_buffer;
  3764. }
  3765. cm->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
  3766. cm->refresh_alt_ref_frame = 1;
  3767. cm->refresh_golden_frame = 0;
  3768. cm->refresh_last_frame = 0;
  3769. cm->show_frame = 0;
  3770. cpi->source_alt_ref_pending = FALSE; // Clear Pending altf Ref flag.
  3771. cpi->is_src_frame_alt_ref = 0;
  3772. }
  3773. }
  3774. #endif
  3775. if (!cpi->source)
  3776. {
  3777. if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush)))
  3778. {
  3779. cm->show_frame = 1;
  3780. cpi->is_src_frame_alt_ref = cpi->alt_ref_source
  3781. && (cpi->source == cpi->alt_ref_source);
  3782. if(cpi->is_src_frame_alt_ref)
  3783. cpi->alt_ref_source = NULL;
  3784. }
  3785. }
  3786. if (cpi->source)
  3787. {
  3788. cpi->un_scaled_source =
  3789. cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
  3790. *time_stamp = cpi->source->ts_start;
  3791. *time_end = cpi->source->ts_end;
  3792. *frame_flags = cpi->source->flags;
  3793. }
  3794. else
  3795. {
  3796. *size = 0;
  3797. #if !(CONFIG_REALTIME_ONLY)
  3798. if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done)
  3799. {
  3800. vp8_end_first_pass(cpi); /* get last stats packet */
  3801. cpi->twopass.first_pass_done = 1;
  3802. }
  3803. #endif
  3804. #if HAVE_ARMV7
  3805. #if CONFIG_RUNTIME_CPU_DETECT
  3806. if (cm->rtcd.flags & HAS_NEON)
  3807. #endif
  3808. {
  3809. vp8_pop_neon(store_reg);
  3810. }
  3811. #endif
  3812. return -1;
  3813. }
  3814. if (cpi->source->ts_start < cpi->first_time_stamp_ever)
  3815. {
  3816. cpi->first_time_stamp_ever = cpi->source->ts_start;
  3817. cpi->last_end_time_stamp_seen = cpi->source->ts_start;
  3818. }
  3819. // adjust frame rates based on timestamps given
  3820. if (!cm->refresh_alt_ref_frame)
  3821. {
  3822. int64_t this_duration;
  3823. int step = 0;
  3824. if (cpi->source->ts_start == cpi->first_time_stamp_ever)
  3825. {
  3826. this_duration = cpi->source->ts_end - cpi->source->ts_start;
  3827. step = 1;
  3828. }
  3829. else
  3830. {
  3831. int64_t last_duration;
  3832. this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
  3833. last_duration = cpi->last_end_time_stamp_seen
  3834. - cpi->last_time_stamp_seen;
  3835. // do a step update if the duration changes by 10%
  3836. if (last_duration)
  3837. step = ((this_duration - last_duration) * 10 / last_duration);
  3838. }
  3839. if (this_duration)
  3840. {
  3841. if (step)
  3842. vp8_new_frame_rate(cpi, 10000000.0 / this_duration);
  3843. else
  3844. {
  3845. double avg_duration, interval;
  3846. /* Average this frame's rate into the last second's average
  3847. * frame rate. If we haven't seen 1 second yet, then average
  3848. * over the whole interval seen.
  3849. */
  3850. interval = cpi->source->ts_end - cpi->first_time_stamp_ever;
  3851. if(interval > 10000000.0)
  3852. interval = 10000000;
  3853. avg_duration = 10000000.0 / cpi->oxcf.frame_rate;
  3854. avg_duration *= (interval - avg_duration + this_duration);
  3855. avg_duration /= interval;
  3856. vp8_new_frame_rate(cpi, 10000000.0 / avg_duration);
  3857. }
  3858. }
  3859. cpi->last_time_stamp_seen = cpi->source->ts_start;
  3860. cpi->last_end_time_stamp_seen = cpi->source->ts_end;
  3861. }
  3862. if (cpi->compressor_speed == 2)
  3863. {
  3864. check_gf_quality(cpi);
  3865. vpx_usec_timer_start(&tsctimer);
  3866. vpx_usec_timer_start(&ticktimer);
  3867. }
  3868. // start with a 0 size frame
  3869. *size = 0;
  3870. // Clear down mmx registers
  3871. vp8_clear_system_state(); //__asm emms;
  3872. cm->frame_type = INTER_FRAME;
  3873. cm->frame_flags = *frame_flags;
  3874. #if 0
  3875. if (cm->refresh_alt_ref_frame)
  3876. {
  3877. //cm->refresh_golden_frame = 1;
  3878. cm->refresh_golden_frame = 0;
  3879. cm->refresh_last_frame = 0;
  3880. }
  3881. else
  3882. {
  3883. cm->refresh_golden_frame = 0;
  3884. cm->refresh_last_frame = 1;
  3885. }
  3886. #endif
  3887. /* find a free buffer for the new frame */
  3888. {
  3889. int i = 0;
  3890. for(; i < NUM_YV12_BUFFERS; i++)
  3891. {
  3892. if(!cm->yv12_fb[i].flags)
  3893. {
  3894. cm->new_fb_idx = i;
  3895. break;
  3896. }
  3897. }
  3898. assert(i < NUM_YV12_BUFFERS );
  3899. }
  3900. #if !(CONFIG_REALTIME_ONLY)
  3901. if (cpi->pass == 1)
  3902. {
  3903. Pass1Encode(cpi, size, dest, frame_flags);
  3904. }
  3905. else if (cpi->pass == 2)
  3906. {
  3907. Pass2Encode(cpi, size, dest, frame_flags);
  3908. }
  3909. else
  3910. #endif
  3911. encode_frame_to_data_rate(cpi, size, dest, frame_flags);
  3912. if (cpi->compressor_speed == 2)
  3913. {
  3914. unsigned int duration, duration2;
  3915. vpx_usec_timer_mark(&tsctimer);
  3916. vpx_usec_timer_mark(&ticktimer);
  3917. duration = vpx_usec_timer_elapsed(&ticktimer);
  3918. duration2 = (unsigned int)((double)duration / 2);
  3919. if (cm->frame_type != KEY_FRAME)
  3920. {
  3921. if (cpi->avg_encode_time == 0)
  3922. cpi->avg_encode_time = duration;
  3923. else
  3924. cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
  3925. }
  3926. if (duration2)
  3927. {
  3928. //if(*frame_flags!=1)
  3929. {
  3930. if (cpi->avg_pick_mode_time == 0)
  3931. cpi->avg_pick_mode_time = duration2;
  3932. else
  3933. cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3;
  3934. }
  3935. }
  3936. }
  3937. if (cm->refresh_entropy_probs == 0)
  3938. {
  3939. vpx_memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
  3940. }
  3941. // if its a dropped frame honor the requests on subsequent frames
  3942. if (*size > 0)
  3943. {
  3944. cpi->droppable = !frame_is_reference(cpi);
  3945. // return to normal state
  3946. cm->refresh_entropy_probs = 1;
  3947. cm->refresh_alt_ref_frame = 0;
  3948. cm->refresh_golden_frame = 0;
  3949. cm->refresh_last_frame = 1;
  3950. cm->frame_type = INTER_FRAME;
  3951. }
  3952. vpx_usec_timer_mark(&cmptimer);
  3953. cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
  3954. if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame)
  3955. {
  3956. generate_psnr_packet(cpi);
  3957. }
  3958. #if CONFIG_INTERNAL_STATS
  3959. if (cpi->pass != 1)
  3960. {
  3961. cpi->bytes += *size;
  3962. if (cm->show_frame)
  3963. {
  3964. cpi->count ++;
  3965. if (cpi->b_calculate_psnr)
  3966. {
  3967. double ye,ue,ve;
  3968. double frame_psnr;
  3969. YV12_BUFFER_CONFIG *orig = cpi->Source;
  3970. YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
  3971. YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
  3972. int y_samples = orig->y_height * orig->y_width ;
  3973. int uv_samples = orig->uv_height * orig->uv_width ;
  3974. int t_samples = y_samples + 2 * uv_samples;
  3975. int64_t sq_error;
  3976. ye = calc_plane_error(orig->y_buffer, orig->y_stride,
  3977. recon->y_buffer, recon->y_stride, orig->y_width, orig->y_height,
  3978. IF_RTCD(&cpi->rtcd.variance));
  3979. ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
  3980. recon->u_buffer, recon->uv_stride, orig->uv_width, orig->uv_height,
  3981. IF_RTCD(&cpi->rtcd.variance));
  3982. ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
  3983. recon->v_buffer, recon->uv_stride, orig->uv_width, orig->uv_height,
  3984. IF_RTCD(&cpi->rtcd.variance));
  3985. sq_error = ye + ue + ve;
  3986. frame_psnr = vp8_mse2psnr(t_samples, 255.0, sq_error);
  3987. cpi->total_y += vp8_mse2psnr(y_samples, 255.0, ye);
  3988. cpi->total_u += vp8_mse2psnr(uv_samples, 255.0, ue);
  3989. cpi->total_v += vp8_mse2psnr(uv_samples, 255.0, ve);
  3990. cpi->total_sq_error += sq_error;
  3991. cpi->total += frame_psnr;
  3992. {
  3993. double frame_psnr2, frame_ssim2 = 0;
  3994. double weight = 0;
  3995. vp8_deblock(cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0, IF_RTCD(&cm->rtcd.postproc));
  3996. vp8_clear_system_state();
  3997. ye = calc_plane_error(orig->y_buffer, orig->y_stride,
  3998. pp->y_buffer, pp->y_stride, orig->y_width, orig->y_height,
  3999. IF_RTCD(&cpi->rtcd.variance));
  4000. ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
  4001. pp->u_buffer, pp->uv_stride, orig->uv_width, orig->uv_height,
  4002. IF_RTCD(&cpi->rtcd.variance));
  4003. ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
  4004. pp->v_buffer, pp->uv_stride, orig->uv_width, orig->uv_height,
  4005. IF_RTCD(&cpi->rtcd.variance));
  4006. sq_error = ye + ue + ve;
  4007. frame_psnr2 = vp8_mse2psnr(t_samples, 255.0, sq_error);
  4008. cpi->totalp_y += vp8_mse2psnr(y_samples, 255.0, ye);
  4009. cpi->totalp_u += vp8_mse2psnr(uv_samples, 255.0, ue);
  4010. cpi->totalp_v += vp8_mse2psnr(uv_samples, 255.0, ve);
  4011. cpi->total_sq_error2 += sq_error;
  4012. cpi->totalp += frame_psnr2;
  4013. frame_ssim2 = vp8_calc_ssim(cpi->Source,
  4014. &cm->post_proc_buffer, 1, &weight,
  4015. IF_RTCD(&cpi->rtcd.variance));
  4016. cpi->summed_quality += frame_ssim2 * weight;
  4017. cpi->summed_weights += weight;
  4018. }
  4019. }
  4020. if (cpi->b_calculate_ssimg)
  4021. {
  4022. double y, u, v, frame_all;
  4023. frame_all = vp8_calc_ssimg(cpi->Source, cm->frame_to_show,
  4024. &y, &u, &v, IF_RTCD(&cpi->rtcd.variance));
  4025. cpi->total_ssimg_y += y;
  4026. cpi->total_ssimg_u += u;
  4027. cpi->total_ssimg_v += v;
  4028. cpi->total_ssimg_all += frame_all;
  4029. }
  4030. }
  4031. }
  4032. #if 0
  4033. if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
  4034. {
  4035. skiptruecount += cpi->skip_true_count;
  4036. skipfalsecount += cpi->skip_false_count;
  4037. }
  4038. #endif
  4039. #if 0
  4040. if (cpi->pass != 1)
  4041. {
  4042. FILE *f = fopen("skip.stt", "a");
  4043. fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
  4044. if (cpi->is_src_frame_alt_ref == 1)
  4045. fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
  4046. fclose(f);
  4047. }
  4048. #endif
  4049. #endif
  4050. #if HAVE_ARMV7
  4051. #if CONFIG_RUNTIME_CPU_DETECT
  4052. if (cm->rtcd.flags & HAS_NEON)
  4053. #endif
  4054. {
  4055. vp8_pop_neon(store_reg);
  4056. }
  4057. #endif
  4058. return 0;
  4059. }
  4060. int vp8_get_preview_raw_frame(VP8_PTR comp, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags)
  4061. {
  4062. VP8_COMP *cpi = (VP8_COMP *) comp;
  4063. if (cpi->common.refresh_alt_ref_frame)
  4064. return -1;
  4065. else
  4066. {
  4067. int ret;
  4068. #if CONFIG_POSTPROC
  4069. ret = vp8_post_proc_frame(&cpi->common, dest, flags);
  4070. #else
  4071. if (cpi->common.frame_to_show)
  4072. {
  4073. *dest = *cpi->common.frame_to_show;
  4074. dest->y_width = cpi->common.Width;
  4075. dest->y_height = cpi->common.Height;
  4076. dest->uv_height = cpi->common.Height / 2;
  4077. ret = 0;
  4078. }
  4079. else
  4080. {
  4081. ret = -1;
  4082. }
  4083. #endif //!CONFIG_POSTPROC
  4084. vp8_clear_system_state();
  4085. return ret;
  4086. }
  4087. }
  4088. int vp8_set_roimap(VP8_PTR comp, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4])
  4089. {
  4090. VP8_COMP *cpi = (VP8_COMP *) comp;
  4091. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  4092. if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols)
  4093. return -1;
  4094. if (!map)
  4095. {
  4096. disable_segmentation((VP8_PTR)cpi);
  4097. return 0;
  4098. }
  4099. // Set the segmentation Map
  4100. set_segmentation_map((VP8_PTR)cpi, map);
  4101. // Activate segmentation.
  4102. enable_segmentation((VP8_PTR)cpi);
  4103. // Set up the quant segment data
  4104. feature_data[MB_LVL_ALT_Q][0] = delta_q[0];
  4105. feature_data[MB_LVL_ALT_Q][1] = delta_q[1];
  4106. feature_data[MB_LVL_ALT_Q][2] = delta_q[2];
  4107. feature_data[MB_LVL_ALT_Q][3] = delta_q[3];
  4108. // Set up the loop segment data s
  4109. feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
  4110. feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
  4111. feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
  4112. feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
  4113. cpi->segment_encode_breakout[0] = threshold[0];
  4114. cpi->segment_encode_breakout[1] = threshold[1];
  4115. cpi->segment_encode_breakout[2] = threshold[2];
  4116. cpi->segment_encode_breakout[3] = threshold[3];
  4117. // Initialise the feature data structure
  4118. // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
  4119. set_segment_data((VP8_PTR)cpi, &feature_data[0][0], SEGMENT_DELTADATA);
  4120. return 0;
  4121. }
  4122. int vp8_set_active_map(VP8_PTR comp, unsigned char *map, unsigned int rows, unsigned int cols)
  4123. {
  4124. VP8_COMP *cpi = (VP8_COMP *) comp;
  4125. if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols)
  4126. {
  4127. if (map)
  4128. {
  4129. vpx_memcpy(cpi->active_map, map, rows * cols);
  4130. cpi->active_map_enabled = 1;
  4131. }
  4132. else
  4133. cpi->active_map_enabled = 0;
  4134. return 0;
  4135. }
  4136. else
  4137. {
  4138. //cpi->active_map_enabled = 0;
  4139. return -1 ;
  4140. }
  4141. }
  4142. int vp8_set_internal_size(VP8_PTR comp, VPX_SCALING horiz_mode, VPX_SCALING vert_mode)
  4143. {
  4144. VP8_COMP *cpi = (VP8_COMP *) comp;
  4145. if (horiz_mode <= ONETWO)
  4146. cpi->common.horiz_scale = horiz_mode;
  4147. else
  4148. return -1;
  4149. if (vert_mode <= ONETWO)
  4150. cpi->common.vert_scale = vert_mode;
  4151. else
  4152. return -1;
  4153. return 0;
  4154. }
  4155. int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest, const vp8_variance_rtcd_vtable_t *rtcd)
  4156. {
  4157. int i, j;
  4158. int Total = 0;
  4159. unsigned char *src = source->y_buffer;
  4160. unsigned char *dst = dest->y_buffer;
  4161. (void)rtcd;
  4162. // Loop through the Y plane raw and reconstruction data summing (square differences)
  4163. for (i = 0; i < source->y_height; i += 16)
  4164. {
  4165. for (j = 0; j < source->y_width; j += 16)
  4166. {
  4167. unsigned int sse;
  4168. Total += VARIANCE_INVOKE(rtcd, mse16x16)(src + j, source->y_stride, dst + j, dest->y_stride, &sse);
  4169. }
  4170. src += 16 * source->y_stride;
  4171. dst += 16 * dest->y_stride;
  4172. }
  4173. return Total;
  4174. }
  4175. int vp8_get_quantizer(VP8_PTR c)
  4176. {
  4177. VP8_COMP *cpi = (VP8_COMP *) c;
  4178. return cpi->common.base_qindex;
  4179. }