PageRenderTime 62ms CodeModel.GetById 26ms RepoModel.GetById 0ms app.codeStats 1ms

/content/common/gpu/media/video_encode_accelerator_unittest.cc

https://gitlab.com/jonnialva90/iridium-browser
C++ | 1346 lines | 931 code | 208 blank | 207 comment | 119 complexity | 7969a7adc896e2aeea74371352241bc0 MD5 | raw file
  1. // Copyright 2013 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include <inttypes.h>
  5. #include <algorithm>
  6. #include <queue>
  7. #include <string>
  8. #include "base/at_exit.h"
  9. #include "base/bind.h"
  10. #include "base/command_line.h"
  11. #include "base/files/file_util.h"
  12. #include "base/files/memory_mapped_file.h"
  13. #include "base/memory/scoped_vector.h"
  14. #include "base/message_loop/message_loop.h"
  15. #include "base/numerics/safe_conversions.h"
  16. #include "base/process/process_handle.h"
  17. #include "base/strings/string_number_conversions.h"
  18. #include "base/strings/string_split.h"
  19. #include "base/strings/stringprintf.h"
  20. #include "base/threading/thread.h"
  21. #include "base/threading/thread_checker.h"
  22. #include "base/time/time.h"
  23. #include "base/timer/timer.h"
  24. #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
  25. #include "media/base/bind_to_current_loop.h"
  26. #include "media/base/bitstream_buffer.h"
  27. #include "media/base/decoder_buffer.h"
  28. #include "media/base/test_data_util.h"
  29. #include "media/base/video_decoder.h"
  30. #include "media/base/video_frame.h"
  31. #include "media/filters/ffmpeg_glue.h"
  32. #include "media/filters/ffmpeg_video_decoder.h"
  33. #include "media/filters/h264_parser.h"
  34. #include "media/filters/ivf_parser.h"
  35. #include "media/video/fake_video_encode_accelerator.h"
  36. #include "media/video/video_encode_accelerator.h"
  37. #include "testing/gtest/include/gtest/gtest.h"
  38. #if defined(OS_CHROMEOS)
  39. #if defined(ARCH_CPU_ARMEL) || (defined(USE_OZONE) && defined(USE_V4L2_CODEC))
  40. #include "content/common/gpu/media/v4l2_video_encode_accelerator.h"
  41. #endif
  42. #if defined(ARCH_CPU_X86_FAMILY)
  43. #include "content/common/gpu/media/vaapi_video_encode_accelerator.h"
  44. #include "content/common/gpu/media/vaapi_wrapper.h"
  45. // Status has been defined as int in Xlib.h.
  46. #undef Status
  47. #endif // defined(ARCH_CPU_X86_FAMILY)
  48. #else
  49. #error The VideoEncodeAcceleratorUnittest is not supported on this platform.
  50. #endif
  51. using media::VideoEncodeAccelerator;
  52. namespace content {
  53. namespace {
  54. const media::VideoPixelFormat kInputFormat = media::PIXEL_FORMAT_I420;
  55. // The absolute differences between original frame and decoded frame usually
  56. // ranges aroud 1 ~ 7. So we pick 10 as an extreme value to detect abnormal
  57. // decoded frames.
  58. const double kDecodeSimilarityThreshold = 10.0;
  59. // Arbitrarily chosen to add some depth to the pipeline.
  60. const unsigned int kNumOutputBuffers = 4;
  61. const unsigned int kNumExtraInputFrames = 4;
  62. // Maximum delay between requesting a keyframe and receiving one, in frames.
  63. // Arbitrarily chosen as a reasonable requirement.
  64. const unsigned int kMaxKeyframeDelay = 4;
  65. // Default initial bitrate.
  66. const uint32 kDefaultBitrate = 2000000;
  67. // Default ratio of requested_subsequent_bitrate to initial_bitrate
  68. // (see test parameters below) if one is not provided.
  69. const double kDefaultSubsequentBitrateRatio = 2.0;
  70. // Default initial framerate.
  71. const uint32 kDefaultFramerate = 30;
  72. // Default ratio of requested_subsequent_framerate to initial_framerate
  73. // (see test parameters below) if one is not provided.
  74. const double kDefaultSubsequentFramerateRatio = 0.1;
  75. // Tolerance factor for how encoded bitrate can differ from requested bitrate.
  76. const double kBitrateTolerance = 0.1;
  77. // Minimum required FPS throughput for the basic performance test.
  78. const uint32 kMinPerfFPS = 30;
  79. // Minimum (arbitrary) number of frames required to enforce bitrate requirements
  80. // over. Streams shorter than this may be too short to realistically require
  81. // an encoder to be able to converge to the requested bitrate over.
  82. // The input stream will be looped as many times as needed in bitrate tests
  83. // to reach at least this number of frames before calculating final bitrate.
  84. const unsigned int kMinFramesForBitrateTests = 300;
  85. // The percentiles to measure for encode latency.
  86. const unsigned int kLoggedLatencyPercentiles[] = {50, 75, 95};
  87. // The syntax of multiple test streams is:
  88. // test-stream1;test-stream2;test-stream3
  89. // The syntax of each test stream is:
  90. // "in_filename:width:height:out_filename:requested_bitrate:requested_framerate
  91. // :requested_subsequent_bitrate:requested_subsequent_framerate"
  92. // - |in_filename| must be an I420 (YUV planar) raw stream
  93. // (see http://www.fourcc.org/yuv.php#IYUV).
  94. // - |width| and |height| are in pixels.
  95. // - |profile| to encode into (values of media::VideoCodecProfile).
  96. // - |out_filename| filename to save the encoded stream to (optional). The
  97. // format for H264 is Annex-B byte stream. The format for VP8 is IVF. Output
  98. // stream is saved for the simple encode test only. H264 raw stream and IVF
  99. // can be used as input of VDA unittest. H264 raw stream can be played by
  100. // "mplayer -fps 25 out.h264" and IVF can be played by mplayer directly.
  101. // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
  102. // Further parameters are optional (need to provide preceding positional
  103. // parameters if a specific subsequent parameter is required):
  104. // - |requested_bitrate| requested bitrate in bits per second.
  105. // - |requested_framerate| requested initial framerate.
  106. // - |requested_subsequent_bitrate| bitrate to switch to in the middle of the
  107. // stream.
  108. // - |requested_subsequent_framerate| framerate to switch to in the middle
  109. // of the stream.
  110. // Bitrate is only forced for tests that test bitrate.
  111. const char* g_default_in_filename = "bear_320x192_40frames.yuv";
  112. const char* g_default_in_parameters = ":320:192:1:out.h264:200000";
  113. // Enabled by including a --fake_encoder flag to the command line invoking the
  114. // test.
  115. bool g_fake_encoder = false;
  116. // Environment to store test stream data for all test cases.
  117. class VideoEncodeAcceleratorTestEnvironment;
  118. VideoEncodeAcceleratorTestEnvironment* g_env;
  119. // The number of frames to be encoded. This variable is set by the switch
  120. // "--num_frames_to_encode". Ignored if 0.
  121. int g_num_frames_to_encode = 0;
  122. struct TestStream {
  123. TestStream()
  124. : num_frames(0),
  125. aligned_buffer_size(0),
  126. requested_bitrate(0),
  127. requested_framerate(0),
  128. requested_subsequent_bitrate(0),
  129. requested_subsequent_framerate(0) {}
  130. ~TestStream() {}
  131. gfx::Size visible_size;
  132. gfx::Size coded_size;
  133. unsigned int num_frames;
  134. // Original unaligned input file name provided as an argument to the test.
  135. // And the file must be an I420 (YUV planar) raw stream.
  136. std::string in_filename;
  137. // A temporary file used to prepare aligned input buffers of |in_filename|.
  138. // The file makes sure starting address of YUV planes are 64 byte-aligned.
  139. base::FilePath aligned_in_file;
  140. // The memory mapping of |aligned_in_file|
  141. base::MemoryMappedFile mapped_aligned_in_file;
  142. // Byte size of a frame of |aligned_in_file|.
  143. size_t aligned_buffer_size;
  144. // Byte size for each aligned plane of a frame
  145. std::vector<size_t> aligned_plane_size;
  146. std::string out_filename;
  147. media::VideoCodecProfile requested_profile;
  148. unsigned int requested_bitrate;
  149. unsigned int requested_framerate;
  150. unsigned int requested_subsequent_bitrate;
  151. unsigned int requested_subsequent_framerate;
  152. };
  153. inline static size_t Align64Bytes(size_t value) {
  154. return (value + 63) & ~63;
  155. }
  156. // Write |data| of |size| bytes at |offset| bytes into |file|.
  157. static bool WriteFile(base::File* file,
  158. const off_t offset,
  159. const uint8* data,
  160. size_t size) {
  161. size_t written_bytes = 0;
  162. while (written_bytes < size) {
  163. int bytes = file->Write(offset + written_bytes,
  164. reinterpret_cast<const char*>(data + written_bytes),
  165. size - written_bytes);
  166. if (bytes <= 0)
  167. return false;
  168. written_bytes += bytes;
  169. }
  170. return true;
  171. }
  172. // Return the |percentile| from a sorted vector.
  173. static base::TimeDelta Percentile(
  174. const std::vector<base::TimeDelta>& sorted_values,
  175. unsigned int percentile) {
  176. size_t size = sorted_values.size();
  177. LOG_ASSERT(size > 0UL);
  178. LOG_ASSERT(percentile <= 100UL);
  179. // Use Nearest Rank method in http://en.wikipedia.org/wiki/Percentile.
  180. int index =
  181. std::max(static_cast<int>(ceil(0.01f * percentile * size)) - 1, 0);
  182. return sorted_values[index];
  183. }
  184. static bool IsH264(media::VideoCodecProfile profile) {
  185. return profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX;
  186. }
  187. static bool IsVP8(media::VideoCodecProfile profile) {
  188. return profile >= media::VP8PROFILE_MIN && profile <= media::VP8PROFILE_MAX;
  189. }
  190. // ARM performs CPU cache management with CPU cache line granularity. We thus
  191. // need to ensure our buffers are CPU cache line-aligned (64 byte-aligned).
  192. // Otherwise newer kernels will refuse to accept them, and on older kernels
  193. // we'll be treating ourselves to random corruption.
  194. // Since we are just mapping and passing chunks of the input file directly to
  195. // the VEA as input frames to avoid copying large chunks of raw data on each
  196. // frame and thus affecting performance measurements, we have to prepare a
  197. // temporary file with all planes aligned to 64-byte boundaries beforehand.
  198. static void CreateAlignedInputStreamFile(const gfx::Size& coded_size,
  199. TestStream* test_stream) {
  200. // Test case may have many encoders and memory should be prepared once.
  201. if (test_stream->coded_size == coded_size &&
  202. test_stream->mapped_aligned_in_file.IsValid())
  203. return;
  204. // All encoders in multiple encoder test reuse the same test_stream, make
  205. // sure they requested the same coded_size
  206. ASSERT_TRUE(!test_stream->mapped_aligned_in_file.IsValid() ||
  207. coded_size == test_stream->coded_size);
  208. test_stream->coded_size = coded_size;
  209. size_t num_planes = media::VideoFrame::NumPlanes(kInputFormat);
  210. std::vector<size_t> padding_sizes(num_planes);
  211. std::vector<size_t> coded_bpl(num_planes);
  212. std::vector<size_t> visible_bpl(num_planes);
  213. std::vector<size_t> visible_plane_rows(num_planes);
  214. // Calculate padding in bytes to be added after each plane required to keep
  215. // starting addresses of all planes at a 64 byte boudnary. This padding will
  216. // be added after each plane when copying to the temporary file.
  217. // At the same time we also need to take into account coded_size requested by
  218. // the VEA; each row of visible_bpl bytes in the original file needs to be
  219. // copied into a row of coded_bpl bytes in the aligned file.
  220. for (size_t i = 0; i < num_planes; i++) {
  221. const size_t size =
  222. media::VideoFrame::PlaneSize(kInputFormat, i, coded_size).GetArea();
  223. test_stream->aligned_plane_size.push_back(Align64Bytes(size));
  224. test_stream->aligned_buffer_size += test_stream->aligned_plane_size.back();
  225. coded_bpl[i] =
  226. media::VideoFrame::RowBytes(i, kInputFormat, coded_size.width());
  227. visible_bpl[i] = media::VideoFrame::RowBytes(
  228. i, kInputFormat, test_stream->visible_size.width());
  229. visible_plane_rows[i] = media::VideoFrame::Rows(
  230. i, kInputFormat, test_stream->visible_size.height());
  231. const size_t padding_rows =
  232. media::VideoFrame::Rows(i, kInputFormat, coded_size.height()) -
  233. visible_plane_rows[i];
  234. padding_sizes[i] = padding_rows * coded_bpl[i] + Align64Bytes(size) - size;
  235. }
  236. base::MemoryMappedFile src_file;
  237. LOG_ASSERT(src_file.Initialize(base::FilePath(test_stream->in_filename)));
  238. LOG_ASSERT(base::CreateTemporaryFile(&test_stream->aligned_in_file));
  239. size_t visible_buffer_size = media::VideoFrame::AllocationSize(
  240. kInputFormat, test_stream->visible_size);
  241. LOG_ASSERT(src_file.length() % visible_buffer_size == 0U)
  242. << "Stream byte size is not a product of calculated frame byte size";
  243. test_stream->num_frames = src_file.length() / visible_buffer_size;
  244. uint32 flags = base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE |
  245. base::File::FLAG_READ;
  246. // Create a temporary file with coded_size length.
  247. base::File dest_file(test_stream->aligned_in_file, flags);
  248. LOG_ASSERT(test_stream->aligned_buffer_size > 0UL);
  249. dest_file.SetLength(test_stream->aligned_buffer_size *
  250. test_stream->num_frames);
  251. const uint8* src = src_file.data();
  252. off_t dest_offset = 0;
  253. for (size_t frame = 0; frame < test_stream->num_frames; frame++) {
  254. for (size_t i = 0; i < num_planes; i++) {
  255. // Assert that each plane of frame starts at 64 byte boundary.
  256. ASSERT_EQ(dest_offset & 63, 0)
  257. << "Planes of frame should be mapped at a 64 byte boundary";
  258. for (size_t j = 0; j < visible_plane_rows[i]; j++) {
  259. LOG_ASSERT(WriteFile(&dest_file, dest_offset, src, visible_bpl[i]));
  260. src += visible_bpl[i];
  261. dest_offset += coded_bpl[i];
  262. }
  263. dest_offset += padding_sizes[i];
  264. }
  265. }
  266. LOG_ASSERT(test_stream->mapped_aligned_in_file.Initialize(dest_file.Pass()));
  267. // Assert that memory mapped of file starts at 64 byte boundary. So each
  268. // plane of frames also start at 64 byte boundary.
  269. ASSERT_EQ(
  270. reinterpret_cast<off_t>(test_stream->mapped_aligned_in_file.data()) & 63,
  271. 0)
  272. << "File should be mapped at a 64 byte boundary";
  273. LOG_ASSERT(test_stream->mapped_aligned_in_file.length() %
  274. test_stream->aligned_buffer_size == 0U)
  275. << "Stream byte size is not a product of calculated frame byte size";
  276. LOG_ASSERT(test_stream->num_frames > 0UL);
  277. }
  278. // Parse |data| into its constituent parts, set the various output fields
  279. // accordingly, read in video stream, and store them to |test_streams|.
  280. static void ParseAndReadTestStreamData(const base::FilePath::StringType& data,
  281. ScopedVector<TestStream>* test_streams) {
  282. // Split the string to individual test stream data.
  283. std::vector<base::FilePath::StringType> test_streams_data = base::SplitString(
  284. data, base::FilePath::StringType(1, ';'),
  285. base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
  286. LOG_ASSERT(test_streams_data.size() >= 1U) << data;
  287. // Parse each test stream data and read the input file.
  288. for (size_t index = 0; index < test_streams_data.size(); ++index) {
  289. std::vector<base::FilePath::StringType> fields = base::SplitString(
  290. test_streams_data[index], base::FilePath::StringType(1, ':'),
  291. base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
  292. LOG_ASSERT(fields.size() >= 4U) << data;
  293. LOG_ASSERT(fields.size() <= 9U) << data;
  294. TestStream* test_stream = new TestStream();
  295. test_stream->in_filename = fields[0];
  296. int width, height;
  297. bool result = base::StringToInt(fields[1], &width);
  298. LOG_ASSERT(result);
  299. result = base::StringToInt(fields[2], &height);
  300. LOG_ASSERT(result);
  301. test_stream->visible_size = gfx::Size(width, height);
  302. LOG_ASSERT(!test_stream->visible_size.IsEmpty());
  303. int profile;
  304. result = base::StringToInt(fields[3], &profile);
  305. LOG_ASSERT(result);
  306. LOG_ASSERT(profile > media::VIDEO_CODEC_PROFILE_UNKNOWN);
  307. LOG_ASSERT(profile <= media::VIDEO_CODEC_PROFILE_MAX);
  308. test_stream->requested_profile =
  309. static_cast<media::VideoCodecProfile>(profile);
  310. if (fields.size() >= 5 && !fields[4].empty())
  311. test_stream->out_filename = fields[4];
  312. if (fields.size() >= 6 && !fields[5].empty())
  313. LOG_ASSERT(base::StringToUint(fields[5],
  314. &test_stream->requested_bitrate));
  315. if (fields.size() >= 7 && !fields[6].empty())
  316. LOG_ASSERT(base::StringToUint(fields[6],
  317. &test_stream->requested_framerate));
  318. if (fields.size() >= 8 && !fields[7].empty()) {
  319. LOG_ASSERT(base::StringToUint(fields[7],
  320. &test_stream->requested_subsequent_bitrate));
  321. }
  322. if (fields.size() >= 9 && !fields[8].empty()) {
  323. LOG_ASSERT(base::StringToUint(fields[8],
  324. &test_stream->requested_subsequent_framerate));
  325. }
  326. test_streams->push_back(test_stream);
  327. }
  328. }
  329. // Basic test environment shared across multiple test cases. We only need to
  330. // setup it once for all test cases.
  331. // It helps
  332. // - maintain test stream data and other test settings.
  333. // - clean up temporary aligned files.
  334. // - output log to file.
  335. class VideoEncodeAcceleratorTestEnvironment : public ::testing::Environment {
  336. public:
  337. VideoEncodeAcceleratorTestEnvironment(
  338. scoped_ptr<base::FilePath::StringType> data,
  339. const base::FilePath& log_path,
  340. bool run_at_fps,
  341. bool needs_encode_latency,
  342. bool verify_all_output)
  343. : test_stream_data_(data.Pass()),
  344. log_path_(log_path),
  345. run_at_fps_(run_at_fps),
  346. needs_encode_latency_(needs_encode_latency),
  347. verify_all_output_(verify_all_output) {}
  348. virtual void SetUp() {
  349. if (!log_path_.empty()) {
  350. log_file_.reset(new base::File(
  351. log_path_, base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE));
  352. LOG_ASSERT(log_file_->IsValid());
  353. }
  354. ParseAndReadTestStreamData(*test_stream_data_, &test_streams_);
  355. }
  356. virtual void TearDown() {
  357. for (size_t i = 0; i < test_streams_.size(); i++) {
  358. base::DeleteFile(test_streams_[i]->aligned_in_file, false);
  359. }
  360. log_file_.reset();
  361. }
  362. // Log one entry of machine-readable data to file and LOG(INFO).
  363. // The log has one data entry per line in the format of "<key>: <value>".
  364. // Note that Chrome OS video_VEAPerf autotest parses the output key and value
  365. // pairs. Be sure to keep the autotest in sync.
  366. void LogToFile(const std::string& key, const std::string& value) {
  367. std::string s = base::StringPrintf("%s: %s\n", key.c_str(), value.c_str());
  368. LOG(INFO) << s;
  369. if (log_file_) {
  370. log_file_->WriteAtCurrentPos(s.data(), s.length());
  371. }
  372. }
  373. // Feed the encoder with the input buffers at the requested framerate. If
  374. // false, feed as fast as possible. This is set by the command line switch
  375. // "--run_at_fps".
  376. bool run_at_fps() const { return run_at_fps_; }
  377. // Whether to measure encode latency. This is set by the command line switch
  378. // "--measure_latency".
  379. bool needs_encode_latency() const { return needs_encode_latency_; }
  380. // Verify the encoder output of all testcases. This is set by the command line
  381. // switch "--verify_all_output".
  382. bool verify_all_output() const { return verify_all_output_; }
  383. ScopedVector<TestStream> test_streams_;
  384. private:
  385. scoped_ptr<base::FilePath::StringType> test_stream_data_;
  386. base::FilePath log_path_;
  387. scoped_ptr<base::File> log_file_;
  388. bool run_at_fps_;
  389. bool needs_encode_latency_;
  390. bool verify_all_output_;
  391. };
  392. enum ClientState {
  393. CS_CREATED,
  394. CS_ENCODER_SET,
  395. CS_INITIALIZED,
  396. CS_ENCODING,
  397. // Encoding has finished.
  398. CS_FINISHED,
  399. // Encoded frame quality has been validated.
  400. CS_VALIDATED,
  401. CS_ERROR,
  402. };
  403. // Performs basic, codec-specific sanity checks on the stream buffers passed
  404. // to ProcessStreamBuffer(): whether we've seen keyframes before non-keyframes,
  405. // correct sequences of H.264 NALUs (SPS before PPS and before slices), etc.
  406. // Calls given FrameFoundCallback when a complete frame is found while
  407. // processing.
  408. class StreamValidator {
  409. public:
  410. // To be called when a complete frame is found while processing a stream
  411. // buffer, passing true if the frame is a keyframe. Returns false if we
  412. // are not interested in more frames and further processing should be aborted.
  413. typedef base::Callback<bool(bool)> FrameFoundCallback;
  414. virtual ~StreamValidator() {}
  415. // Provide a StreamValidator instance for the given |profile|.
  416. static scoped_ptr<StreamValidator> Create(media::VideoCodecProfile profile,
  417. const FrameFoundCallback& frame_cb);
  418. // Process and verify contents of a bitstream buffer.
  419. virtual void ProcessStreamBuffer(const uint8* stream, size_t size) = 0;
  420. protected:
  421. explicit StreamValidator(const FrameFoundCallback& frame_cb)
  422. : frame_cb_(frame_cb) {}
  423. FrameFoundCallback frame_cb_;
  424. };
  425. class H264Validator : public StreamValidator {
  426. public:
  427. explicit H264Validator(const FrameFoundCallback& frame_cb)
  428. : StreamValidator(frame_cb),
  429. seen_sps_(false),
  430. seen_pps_(false),
  431. seen_idr_(false) {}
  432. void ProcessStreamBuffer(const uint8* stream, size_t size) override;
  433. private:
  434. // Set to true when encoder provides us with the corresponding NALU type.
  435. bool seen_sps_;
  436. bool seen_pps_;
  437. bool seen_idr_;
  438. media::H264Parser h264_parser_;
  439. };
  440. void H264Validator::ProcessStreamBuffer(const uint8* stream, size_t size) {
  441. h264_parser_.SetStream(stream, size);
  442. while (1) {
  443. media::H264NALU nalu;
  444. media::H264Parser::Result result;
  445. result = h264_parser_.AdvanceToNextNALU(&nalu);
  446. if (result == media::H264Parser::kEOStream)
  447. break;
  448. ASSERT_EQ(media::H264Parser::kOk, result);
  449. bool keyframe = false;
  450. switch (nalu.nal_unit_type) {
  451. case media::H264NALU::kIDRSlice:
  452. ASSERT_TRUE(seen_sps_);
  453. ASSERT_TRUE(seen_pps_);
  454. seen_idr_ = true;
  455. keyframe = true;
  456. // fallthrough
  457. case media::H264NALU::kNonIDRSlice: {
  458. ASSERT_TRUE(seen_idr_);
  459. if (!frame_cb_.Run(keyframe))
  460. return;
  461. break;
  462. }
  463. case media::H264NALU::kSPS: {
  464. int sps_id;
  465. ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParseSPS(&sps_id));
  466. seen_sps_ = true;
  467. break;
  468. }
  469. case media::H264NALU::kPPS: {
  470. ASSERT_TRUE(seen_sps_);
  471. int pps_id;
  472. ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParsePPS(&pps_id));
  473. seen_pps_ = true;
  474. break;
  475. }
  476. default:
  477. break;
  478. }
  479. }
  480. }
  481. class VP8Validator : public StreamValidator {
  482. public:
  483. explicit VP8Validator(const FrameFoundCallback& frame_cb)
  484. : StreamValidator(frame_cb),
  485. seen_keyframe_(false) {}
  486. void ProcessStreamBuffer(const uint8* stream, size_t size) override;
  487. private:
  488. // Have we already got a keyframe in the stream?
  489. bool seen_keyframe_;
  490. };
  491. void VP8Validator::ProcessStreamBuffer(const uint8* stream, size_t size) {
  492. bool keyframe = !(stream[0] & 0x01);
  493. if (keyframe)
  494. seen_keyframe_ = true;
  495. EXPECT_TRUE(seen_keyframe_);
  496. frame_cb_.Run(keyframe);
  497. // TODO(posciak): We could be getting more frames in the buffer, but there is
  498. // no simple way to detect this. We'd need to parse the frames and go through
  499. // partition numbers/sizes. For now assume one frame per buffer.
  500. }
  501. // static
  502. scoped_ptr<StreamValidator> StreamValidator::Create(
  503. media::VideoCodecProfile profile,
  504. const FrameFoundCallback& frame_cb) {
  505. scoped_ptr<StreamValidator> validator;
  506. if (IsH264(profile)) {
  507. validator.reset(new H264Validator(frame_cb));
  508. } else if (IsVP8(profile)) {
  509. validator.reset(new VP8Validator(frame_cb));
  510. } else {
  511. LOG(FATAL) << "Unsupported profile: " << profile;
  512. }
  513. return validator.Pass();
  514. }
  515. class VideoFrameQualityValidator {
  516. public:
  517. VideoFrameQualityValidator(const media::VideoCodecProfile profile,
  518. const base::Closure& flush_complete_cb,
  519. const base::Closure& decode_error_cb);
  520. void Initialize(const gfx::Size& coded_size, const gfx::Rect& visible_size);
  521. // Save original YUV frame to compare it with the decoded frame later.
  522. void AddOriginalFrame(scoped_refptr<media::VideoFrame> frame);
  523. void AddDecodeBuffer(const scoped_refptr<media::DecoderBuffer>& buffer);
  524. // Flush the decoder.
  525. void Flush();
  526. private:
  527. void InitializeCB(bool success);
  528. void DecodeDone(media::VideoDecoder::Status status);
  529. void FlushDone(media::VideoDecoder::Status status);
  530. void VerifyOutputFrame(const scoped_refptr<media::VideoFrame>& output_frame);
  531. void Decode();
  532. enum State { UNINITIALIZED, INITIALIZED, DECODING, ERROR };
  533. const media::VideoCodecProfile profile_;
  534. scoped_ptr<media::FFmpegVideoDecoder> decoder_;
  535. media::VideoDecoder::DecodeCB decode_cb_;
  536. // Decode callback of an EOS buffer.
  537. media::VideoDecoder::DecodeCB eos_decode_cb_;
  538. // Callback of Flush(). Called after all frames are decoded.
  539. const base::Closure flush_complete_cb_;
  540. const base::Closure decode_error_cb_;
  541. State decoder_state_;
  542. std::queue<scoped_refptr<media::VideoFrame>> original_frames_;
  543. std::queue<scoped_refptr<media::DecoderBuffer>> decode_buffers_;
  544. };
  545. VideoFrameQualityValidator::VideoFrameQualityValidator(
  546. const media::VideoCodecProfile profile,
  547. const base::Closure& flush_complete_cb,
  548. const base::Closure& decode_error_cb)
  549. : profile_(profile),
  550. decoder_(new media::FFmpegVideoDecoder(base::MessageLoop::current()
  551. ->task_runner())),
  552. decode_cb_(base::Bind(&VideoFrameQualityValidator::DecodeDone,
  553. base::Unretained(this))),
  554. eos_decode_cb_(base::Bind(&VideoFrameQualityValidator::FlushDone,
  555. base::Unretained(this))),
  556. flush_complete_cb_(flush_complete_cb),
  557. decode_error_cb_(decode_error_cb),
  558. decoder_state_(UNINITIALIZED) {
  559. // Allow decoding of individual NALU. Entire frames are required by default.
  560. decoder_->set_decode_nalus(true);
  561. }
  562. void VideoFrameQualityValidator::Initialize(const gfx::Size& coded_size,
  563. const gfx::Rect& visible_size) {
  564. media::FFmpegGlue::InitializeFFmpeg();
  565. gfx::Size natural_size(visible_size.size());
  566. // The default output format of ffmpeg video decoder is YV12.
  567. media::VideoDecoderConfig config;
  568. if (IsVP8(profile_))
  569. config.Initialize(media::kCodecVP8, media::VP8PROFILE_ANY, kInputFormat,
  570. media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size,
  571. natural_size, NULL, 0, false);
  572. else if (IsH264(profile_))
  573. config.Initialize(media::kCodecH264, media::H264PROFILE_MAIN, kInputFormat,
  574. media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size,
  575. natural_size, NULL, 0, false);
  576. else
  577. LOG_ASSERT(0) << "Invalid profile " << profile_;
  578. decoder_->Initialize(
  579. config, false, base::Bind(&VideoFrameQualityValidator::InitializeCB,
  580. base::Unretained(this)),
  581. base::Bind(&VideoFrameQualityValidator::VerifyOutputFrame,
  582. base::Unretained(this)));
  583. }
  584. void VideoFrameQualityValidator::InitializeCB(bool success) {
  585. if (success) {
  586. decoder_state_ = INITIALIZED;
  587. Decode();
  588. } else {
  589. decoder_state_ = ERROR;
  590. if (IsH264(profile_))
  591. LOG(ERROR) << "Chromium does not support H264 decode. Try Chrome.";
  592. FAIL() << "Decoder initialization error";
  593. decode_error_cb_.Run();
  594. }
  595. }
  596. void VideoFrameQualityValidator::AddOriginalFrame(
  597. scoped_refptr<media::VideoFrame> frame) {
  598. original_frames_.push(frame);
  599. }
  600. void VideoFrameQualityValidator::DecodeDone(
  601. media::VideoDecoder::Status status) {
  602. if (status == media::VideoDecoder::kOk) {
  603. decoder_state_ = INITIALIZED;
  604. Decode();
  605. } else {
  606. decoder_state_ = ERROR;
  607. FAIL() << "Unexpected decode status = " << status << ". Stop decoding.";
  608. decode_error_cb_.Run();
  609. }
  610. }
  611. void VideoFrameQualityValidator::FlushDone(media::VideoDecoder::Status status) {
  612. flush_complete_cb_.Run();
  613. }
  614. void VideoFrameQualityValidator::Flush() {
  615. if (decoder_state_ != ERROR) {
  616. decode_buffers_.push(media::DecoderBuffer::CreateEOSBuffer());
  617. Decode();
  618. }
  619. }
  620. void VideoFrameQualityValidator::AddDecodeBuffer(
  621. const scoped_refptr<media::DecoderBuffer>& buffer) {
  622. if (decoder_state_ != ERROR) {
  623. decode_buffers_.push(buffer);
  624. Decode();
  625. }
  626. }
  627. void VideoFrameQualityValidator::Decode() {
  628. if (decoder_state_ == INITIALIZED && !decode_buffers_.empty()) {
  629. scoped_refptr<media::DecoderBuffer> next_buffer = decode_buffers_.front();
  630. decode_buffers_.pop();
  631. decoder_state_ = DECODING;
  632. if (next_buffer->end_of_stream())
  633. decoder_->Decode(next_buffer, eos_decode_cb_);
  634. else
  635. decoder_->Decode(next_buffer, decode_cb_);
  636. }
  637. }
  638. void VideoFrameQualityValidator::VerifyOutputFrame(
  639. const scoped_refptr<media::VideoFrame>& output_frame) {
  640. scoped_refptr<media::VideoFrame> original_frame = original_frames_.front();
  641. original_frames_.pop();
  642. gfx::Size visible_size = original_frame->visible_rect().size();
  643. int planes[] = {media::VideoFrame::kYPlane, media::VideoFrame::kUPlane,
  644. media::VideoFrame::kVPlane};
  645. double difference = 0;
  646. for (int plane : planes) {
  647. uint8_t* original_plane = original_frame->data(plane);
  648. uint8_t* output_plane = output_frame->data(plane);
  649. size_t rows =
  650. media::VideoFrame::Rows(plane, kInputFormat, visible_size.height());
  651. size_t columns =
  652. media::VideoFrame::Columns(plane, kInputFormat, visible_size.width());
  653. size_t stride = original_frame->stride(plane);
  654. for (size_t i = 0; i < rows; i++)
  655. for (size_t j = 0; j < columns; j++)
  656. difference += std::abs(original_plane[stride * i + j] -
  657. output_plane[stride * i + j]);
  658. }
  659. // Divide the difference by the size of frame.
  660. difference /= media::VideoFrame::AllocationSize(kInputFormat, visible_size);
  661. EXPECT_TRUE(difference <= kDecodeSimilarityThreshold)
  662. << "differrence = " << difference << " > decode similarity threshold";
  663. }
  664. class VEAClient : public VideoEncodeAccelerator::Client {
  665. public:
  666. VEAClient(TestStream* test_stream,
  667. ClientStateNotification<ClientState>* note,
  668. bool save_to_file,
  669. unsigned int keyframe_period,
  670. bool force_bitrate,
  671. bool test_perf,
  672. bool mid_stream_bitrate_switch,
  673. bool mid_stream_framerate_switch,
  674. bool verify_output);
  675. ~VEAClient() override;
  676. void CreateEncoder();
  677. void DestroyEncoder();
  678. // VideoDecodeAccelerator::Client implementation.
  679. void RequireBitstreamBuffers(unsigned int input_count,
  680. const gfx::Size& input_coded_size,
  681. size_t output_buffer_size) override;
  682. void BitstreamBufferReady(int32 bitstream_buffer_id,
  683. size_t payload_size,
  684. bool key_frame) override;
  685. void NotifyError(VideoEncodeAccelerator::Error error) override;
  686. private:
  687. bool has_encoder() { return encoder_.get(); }
  688. // Return the number of encoded frames per second.
  689. double frames_per_second();
  690. scoped_ptr<media::VideoEncodeAccelerator> CreateFakeVEA();
  691. scoped_ptr<media::VideoEncodeAccelerator> CreateV4L2VEA();
  692. scoped_ptr<media::VideoEncodeAccelerator> CreateVaapiVEA();
  693. void SetState(ClientState new_state);
  694. // Set current stream parameters to given |bitrate| at |framerate|.
  695. void SetStreamParameters(unsigned int bitrate, unsigned int framerate);
  696. // Called when encoder is done with a VideoFrame.
  697. void InputNoLongerNeededCallback(int32 input_id);
  698. // Feed the encoder with one input frame.
  699. void FeedEncoderWithOneInput();
  700. // Provide the encoder with a new output buffer.
  701. void FeedEncoderWithOutput(base::SharedMemory* shm);
  702. // Called on finding a complete frame (with |keyframe| set to true for
  703. // keyframes) in the stream, to perform codec-independent, per-frame checks
  704. // and accounting. Returns false once we have collected all frames we needed.
  705. bool HandleEncodedFrame(bool keyframe);
  706. // Verify the minimum FPS requirement.
  707. void VerifyMinFPS();
  708. // Verify that stream bitrate has been close to current_requested_bitrate_,
  709. // assuming current_framerate_ since the last time VerifyStreamProperties()
  710. // was called. Fail the test if |force_bitrate_| is true and the bitrate
  711. // is not within kBitrateTolerance.
  712. void VerifyStreamProperties();
  713. // Log the performance data.
  714. void LogPerf();
  715. // Write IVF file header to test_stream_->out_filename.
  716. void WriteIvfFileHeader();
  717. // Write an IVF frame header to test_stream_->out_filename.
  718. void WriteIvfFrameHeader(int frame_index, size_t frame_size);
  719. // Create and return a VideoFrame wrapping the data at |position| bytes in the
  720. // input stream.
  721. scoped_refptr<media::VideoFrame> CreateFrame(off_t position);
  722. // Prepare and return a frame wrapping the data at |position| bytes in the
  723. // input stream, ready to be sent to encoder.
  724. // The input frame id is returned in |input_id|.
  725. scoped_refptr<media::VideoFrame> PrepareInputFrame(off_t position,
  726. int32* input_id);
  727. // Update the parameters according to |mid_stream_bitrate_switch| and
  728. // |mid_stream_framerate_switch|.
  729. void UpdateTestStreamData(bool mid_stream_bitrate_switch,
  730. bool mid_stream_framerate_switch);
  731. // Callback function of the |input_timer_|.
  732. void OnInputTimer();
  733. // Called when the quality validator has decoded all the frames.
  734. void DecodeCompleted();
  735. // Called when the quality validator fails to decode a frame.
  736. void DecodeFailed();
  737. ClientState state_;
  738. scoped_ptr<VideoEncodeAccelerator> encoder_;
  739. TestStream* test_stream_;
  740. // Used to notify another thread about the state. VEAClient does not own this.
  741. ClientStateNotification<ClientState>* note_;
  742. // Ids assigned to VideoFrames.
  743. std::set<int32> inputs_at_client_;
  744. int32 next_input_id_;
  745. // Encode start time of all encoded frames. The position in the vector is the
  746. // frame input id.
  747. std::vector<base::TimeTicks> encode_start_time_;
  748. // The encode latencies of all encoded frames. We define encode latency as the
  749. // time delay from input of each VideoFrame (VEA::Encode()) to output of the
  750. // corresponding BitstreamBuffer (VEA::Client::BitstreamBufferReady()).
  751. std::vector<base::TimeDelta> encode_latencies_;
  752. // Ids for output BitstreamBuffers.
  753. typedef std::map<int32, base::SharedMemory*> IdToSHM;
  754. ScopedVector<base::SharedMemory> output_shms_;
  755. IdToSHM output_buffers_at_client_;
  756. int32 next_output_buffer_id_;
  757. // Current offset into input stream.
  758. off_t pos_in_input_stream_;
  759. gfx::Size input_coded_size_;
  760. // Requested by encoder.
  761. unsigned int num_required_input_buffers_;
  762. size_t output_buffer_size_;
  763. // Number of frames to encode. This may differ from the number of frames in
  764. // stream if we need more frames for bitrate tests.
  765. unsigned int num_frames_to_encode_;
  766. // Number of encoded frames we've got from the encoder thus far.
  767. unsigned int num_encoded_frames_;
  768. // Frames since last bitrate verification.
  769. unsigned int num_frames_since_last_check_;
  770. // True if received a keyframe while processing current bitstream buffer.
  771. bool seen_keyframe_in_this_buffer_;
  772. // True if we are to save the encoded stream to a file.
  773. bool save_to_file_;
  774. // Request a keyframe every keyframe_period_ frames.
  775. const unsigned int keyframe_period_;
  776. // Number of keyframes requested by now.
  777. unsigned int num_keyframes_requested_;
  778. // Next keyframe expected before next_keyframe_at_ + kMaxKeyframeDelay.
  779. unsigned int next_keyframe_at_;
  780. // True if we are asking encoder for a particular bitrate.
  781. bool force_bitrate_;
  782. // Current requested bitrate.
  783. unsigned int current_requested_bitrate_;
  784. // Current expected framerate.
  785. unsigned int current_framerate_;
  786. // Byte size of the encoded stream (for bitrate calculation) since last
  787. // time we checked bitrate.
  788. size_t encoded_stream_size_since_last_check_;
  789. // If true, verify performance at the end of the test.
  790. bool test_perf_;
  791. // Check the output frame quality of the encoder.
  792. bool verify_output_;
  793. // Used to perform codec-specific sanity checks on the stream.
  794. scoped_ptr<StreamValidator> stream_validator_;
  795. // Used to validate the encoded frame quality.
  796. scoped_ptr<VideoFrameQualityValidator> quality_validator_;
  797. // The time when the first frame is submitted for encode.
  798. base::TimeTicks first_frame_start_time_;
  799. // The time when the last encoded frame is ready.
  800. base::TimeTicks last_frame_ready_time_;
  801. // All methods of this class should be run on the same thread.
  802. base::ThreadChecker thread_checker_;
  803. // Requested bitrate in bits per second.
  804. unsigned int requested_bitrate_;
  805. // Requested initial framerate.
  806. unsigned int requested_framerate_;
  807. // Bitrate to switch to in the middle of the stream.
  808. unsigned int requested_subsequent_bitrate_;
  809. // Framerate to switch to in the middle of the stream.
  810. unsigned int requested_subsequent_framerate_;
  811. // The timer used to feed the encoder with the input frames.
  812. scoped_ptr<base::RepeatingTimer> input_timer_;
  813. };
  814. VEAClient::VEAClient(TestStream* test_stream,
  815. ClientStateNotification<ClientState>* note,
  816. bool save_to_file,
  817. unsigned int keyframe_period,
  818. bool force_bitrate,
  819. bool test_perf,
  820. bool mid_stream_bitrate_switch,
  821. bool mid_stream_framerate_switch,
  822. bool verify_output)
  823. : state_(CS_CREATED),
  824. test_stream_(test_stream),
  825. note_(note),
  826. next_input_id_(0),
  827. next_output_buffer_id_(0),
  828. pos_in_input_stream_(0),
  829. num_required_input_buffers_(0),
  830. output_buffer_size_(0),
  831. num_frames_to_encode_(0),
  832. num_encoded_frames_(0),
  833. num_frames_since_last_check_(0),
  834. seen_keyframe_in_this_buffer_(false),
  835. save_to_file_(save_to_file),
  836. keyframe_period_(keyframe_period),
  837. num_keyframes_requested_(0),
  838. next_keyframe_at_(0),
  839. force_bitrate_(force_bitrate),
  840. current_requested_bitrate_(0),
  841. current_framerate_(0),
  842. encoded_stream_size_since_last_check_(0),
  843. test_perf_(test_perf),
  844. verify_output_(verify_output),
  845. requested_bitrate_(0),
  846. requested_framerate_(0),
  847. requested_subsequent_bitrate_(0),
  848. requested_subsequent_framerate_(0) {
  849. if (keyframe_period_)
  850. LOG_ASSERT(kMaxKeyframeDelay < keyframe_period_);
  851. // Fake encoder produces an invalid stream, so skip validating it.
  852. if (!g_fake_encoder) {
  853. stream_validator_ = StreamValidator::Create(
  854. test_stream_->requested_profile,
  855. base::Bind(&VEAClient::HandleEncodedFrame, base::Unretained(this)));
  856. CHECK(stream_validator_);
  857. }
  858. if (save_to_file_) {
  859. LOG_ASSERT(!test_stream_->out_filename.empty());
  860. base::FilePath out_filename(test_stream_->out_filename);
  861. // This creates or truncates out_filename.
  862. // Without it, AppendToFile() will not work.
  863. EXPECT_EQ(0, base::WriteFile(out_filename, NULL, 0));
  864. }
  865. // Initialize the parameters of the test streams.
  866. UpdateTestStreamData(mid_stream_bitrate_switch, mid_stream_framerate_switch);
  867. thread_checker_.DetachFromThread();
  868. }
  869. VEAClient::~VEAClient() { LOG_ASSERT(!has_encoder()); }
  870. scoped_ptr<media::VideoEncodeAccelerator> VEAClient::CreateFakeVEA() {
  871. scoped_ptr<media::VideoEncodeAccelerator> encoder;
  872. if (g_fake_encoder) {
  873. encoder.reset(new media::FakeVideoEncodeAccelerator(
  874. scoped_refptr<base::SingleThreadTaskRunner>(
  875. base::ThreadTaskRunnerHandle::Get())));
  876. }
  877. return encoder.Pass();
  878. }
  879. scoped_ptr<media::VideoEncodeAccelerator> VEAClient::CreateV4L2VEA() {
  880. scoped_ptr<media::VideoEncodeAccelerator> encoder;
  881. #if defined(OS_CHROMEOS) && (defined(ARCH_CPU_ARMEL) || \
  882. (defined(USE_OZONE) && defined(USE_V4L2_CODEC)))
  883. scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kEncoder);
  884. if (device)
  885. encoder.reset(new V4L2VideoEncodeAccelerator(device));
  886. #endif
  887. return encoder.Pass();
  888. }
  889. scoped_ptr<media::VideoEncodeAccelerator> VEAClient::CreateVaapiVEA() {
  890. scoped_ptr<media::VideoEncodeAccelerator> encoder;
  891. #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
  892. encoder.reset(new VaapiVideoEncodeAccelerator());
  893. #endif
  894. return encoder.Pass();
  895. }
  896. void VEAClient::CreateEncoder() {
  897. DCHECK(thread_checker_.CalledOnValidThread());
  898. LOG_ASSERT(!has_encoder());
  899. scoped_ptr<media::VideoEncodeAccelerator> encoders[] = {
  900. CreateFakeVEA(),
  901. CreateV4L2VEA(),
  902. CreateVaapiVEA()
  903. };
  904. DVLOG(1) << "Profile: " << test_stream_->requested_profile
  905. << ", initial bitrate: " << requested_bitrate_;
  906. for (size_t i = 0; i < arraysize(encoders); ++i) {
  907. if (!encoders[i])
  908. continue;
  909. encoder_ = encoders[i].Pass();
  910. SetState(CS_ENCODER_SET);
  911. if (encoder_->Initialize(kInputFormat,
  912. test_stream_->visible_size,
  913. test_stream_->requested_profile,
  914. requested_bitrate_,
  915. this)) {
  916. SetStreamParameters(requested_bitrate_, requested_framerate_);
  917. SetState(CS_INITIALIZED);
  918. if (verify_output_ && !g_fake_encoder)
  919. quality_validator_.reset(new VideoFrameQualityValidator(
  920. test_stream_->requested_profile,
  921. base::Bind(&VEAClient::DecodeCompleted, base::Unretained(this)),
  922. base::Bind(&VEAClient::DecodeFailed, base::Unretained(this))));
  923. return;
  924. }
  925. }
  926. encoder_.reset();
  927. LOG(ERROR) << "VideoEncodeAccelerator::Initialize() failed";
  928. SetState(CS_ERROR);
  929. }
  930. void VEAClient::DecodeCompleted() {
  931. SetState(CS_VALIDATED);
  932. }
  933. void VEAClient::DecodeFailed() {
  934. SetState(CS_ERROR);
  935. }
  936. void VEAClient::DestroyEncoder() {
  937. DCHECK(thread_checker_.CalledOnValidThread());
  938. if (!has_encoder())
  939. return;
  940. // Clear the objects that should be destroyed on the same thread as creation.
  941. encoder_.reset();
  942. input_timer_.reset();
  943. quality_validator_.reset();
  944. }
  945. void VEAClient::UpdateTestStreamData(bool mid_stream_bitrate_switch,
  946. bool mid_stream_framerate_switch) {
  947. // Use defaults for bitrate/framerate if they are not provided.
  948. if (test_stream_->requested_bitrate == 0)
  949. requested_bitrate_ = kDefaultBitrate;
  950. else
  951. requested_bitrate_ = test_stream_->requested_bitrate;
  952. if (test_stream_->requested_framerate == 0)
  953. requested_framerate_ = kDefaultFramerate;
  954. else
  955. requested_framerate_ = test_stream_->requested_framerate;
  956. // If bitrate/framerate switch is requested, use the subsequent values if
  957. // provided, or, if not, calculate them from their initial values using
  958. // the default ratios.
  959. // Otherwise, if a switch is not requested, keep the initial values.
  960. if (mid_stream_bitrate_switch) {
  961. if (test_stream_->requested_subsequent_bitrate == 0)
  962. requested_subsequent_bitrate_ =
  963. requested_bitrate_ * kDefaultSubsequentBitrateRatio;
  964. else
  965. requested_subsequent_bitrate_ =
  966. test_stream_->requested_subsequent_bitrate;
  967. } else {
  968. requested_subsequent_bitrate_ = requested_bitrate_;
  969. }
  970. if (requested_subsequent_bitrate_ == 0)
  971. requested_subsequent_bitrate_ = 1;
  972. if (mid_stream_framerate_switch) {
  973. if (test_stream_->requested_subsequent_framerate == 0)
  974. requested_subsequent_framerate_ =
  975. requested_framerate_ * kDefaultSubsequentFramerateRatio;
  976. else
  977. requested_subsequent_framerate_ =
  978. test_stream_->requested_subsequent_framerate;
  979. } else {
  980. requested_subsequent_framerate_ = requested_framerate_;
  981. }
  982. if (requested_subsequent_framerate_ == 0)
  983. requested_subsequent_framerate_ = 1;
  984. }
  985. double VEAClient::frames_per_second() {
  986. LOG_ASSERT(num_encoded_frames_ != 0UL);
  987. base::TimeDelta duration = last_frame_ready_time_ - first_frame_start_time_;
  988. return num_encoded_frames_ / duration.InSecondsF();
  989. }
  990. void VEAClient::RequireBitstreamBuffers(unsigned int input_count,
  991. const gfx::Size& input_coded_size,
  992. size_t output_size) {
  993. DCHECK(thread_checker_.CalledOnValidThread());
  994. ASSERT_EQ(state_, CS_INITIALIZED);
  995. SetState(CS_ENCODING);
  996. if (quality_validator_)
  997. quality_validator_->Initialize(input_coded_size,
  998. gfx::Rect(test_stream_->visible_size));
  999. CreateAlignedInputStreamFile(input_coded_size, test_stream_);
  1000. num_frames_to_encode_ = test_stream_->num_frames;
  1001. if (g_num_frames_to_encode > 0)
  1002. num_frames_to_encode_ = g_num_frames_to_encode;
  1003. // We may need to loop over the stream more than once if more frames than
  1004. // provided is required for bitrate tests.
  1005. if (force_bitrate_ && num_frames_to_encode_ < kMinFramesForBitrateTests) {
  1006. DVLOG(1) << "Stream too short for bitrate test ("
  1007. << test_stream_->num_frames << " frames), will loop it to reach "
  1008. << kMinFramesForBitrateTests << " frames";
  1009. num_frames_to_encode_ = kMinFramesForBitrateTests;
  1010. }
  1011. if (save_to_file_ && IsVP8(test_stream_->requested_profile))
  1012. WriteIvfFileHeader();
  1013. input_coded_size_ = input_coded_size;
  1014. num_required_input_buffers_ = input_count;
  1015. ASSERT_GT(num_required_input_buffers_, 0UL);
  1016. output_buffer_size_ = output_size;
  1017. ASSERT_GT(output_buffer_size_, 0UL);
  1018. for (unsigned int i = 0; i < kNumOutputBuffers; ++i) {
  1019. base::SharedMemory* shm = new base::SharedMemory();
  1020. LOG_ASSERT(shm->CreateAndMapAnonymous(output_buffer_size_));
  1021. output_shms_.push_back(shm);
  1022. FeedEncoderWithOutput(shm);
  1023. }
  1024. if (g_env->run_at_fps()) {
  1025. input_timer_.reset(new base::RepeatingTimer());
  1026. input_timer_->Start(
  1027. FROM_HERE, base::TimeDelta::FromSeconds(1) / current_framerate_,
  1028. base::Bind(&VEAClient::OnInputTimer, base::Unretained(this)));
  1029. } else {
  1030. while (inputs_at_client_.size() <
  1031. num_required_input_buffers_ + kNumExtraInputFrames)
  1032. FeedEncoderWithOneInput();
  1033. }
  1034. }
  1035. void VEAClient::BitstreamBufferReady(int32 bitstream_buffer_id,
  1036. size_t payload_size,
  1037. bool key_frame) {
  1038. DCHECK(thread_checker_.CalledOnValidThread());
  1039. ASSERT_LE(payload_size, output_buffer_size_);
  1040. IdToSHM::iterator it = output_buffers_at_client_.find(bitstream_buffer_id);
  1041. ASSERT_NE(it, output_buffers_at_client_.end());
  1042. base::SharedMemory* shm = it->second;
  1043. output_buffers_at_client_.erase(it);
  1044. if (state_ == CS_FINISHED || state_ == CS_VALIDATED)
  1045. return;
  1046. encoded_stream_size_since_last_check_ += payload_size;
  1047. const uint8* stream_ptr = static_cast<const uint8*>(shm->memory());
  1048. if (payload_size > 0) {
  1049. if (stream_validator_) {
  1050. stream_validator_->ProcessStreamBuffer(stream_ptr, payload_size);
  1051. } else {
  1052. HandleEncodedFrame(key_frame);
  1053. }
  1054. if (quality_validator_) {
  1055. scoped_refptr<media::DecoderBuffer> buffer(media::DecoderBuffer::CopyFrom(
  1056. reinterpret_cast<const uint8*>(shm->memory()),
  1057. static_cast<int>(payload_size)));
  1058. quality_validator_->AddDecodeBuffer(buffer);
  1059. // Insert EOS buffer to flush the decoder.
  1060. if (num_encoded_frames_ == num_frames_to_encode_)
  1061. quality_validator_->Flush();
  1062. }
  1063. if (save_to_file_) {
  1064. if (IsVP8(test_stream_->requested_profile))
  1065. WriteIvfFrameHeader(num_encoded_frames_ - 1, payload_size);
  1066. EXPECT_TRUE(base::AppendToFile(
  1067. base::FilePath::FromUTF8Unsafe(test_stream_->out_filename),
  1068. static_cast<char*>(shm->memory()),
  1069. base::checked_cast<int>(payload_size)));
  1070. }
  1071. }
  1072. EXPECT_EQ(key_frame, seen_keyframe_in_this_buffer_);
  1073. seen_keyframe_in_this_buffer_ = false;
  1074. FeedEncoderWithOutput(shm);
  1075. }
  1076. void VEAClient::NotifyError(VideoEncodeAccelerator::Error error) {
  1077. DCHECK(thread_checker_.CalledOnValidThread());
  1078. SetState(CS_ERROR);
  1079. }
  1080. void VEAClient::SetState(ClientState new_state) {
  1081. DVLOG(4) << "Changing state " << state_ << "->" << new_state;
  1082. note_->Notify(new_state);
  1083. state_ = new_state;
  1084. }
  1085. void VEAClient::SetStreamParameters(unsigned int bitrate,
  1086. unsigned int framerate) {
  1087. current_requested_bitrate_ = bitrate;
  1088. current_framerate_ = framerate;
  1089. LOG_ASSERT(current_requested_bitrate_ > 0UL);
  1090. LOG_ASSERT(current_framerate_ > 0UL);
  1091. encoder_->RequestEncodingParametersChange(current_requested_bitrate_,
  1092. current_framerate_);
  1093. DVLOG(1) << "Switched parameters to " << current_requested_bitrate_
  1094. << " bps @ " << current_framerate_ << " FPS";
  1095. }
  1096. void VEAClient::InputNoLongerNeededCallback(int32 input_id) {
  1097. std::set<int32>::iterator it = inputs_at_client_.find(input_id);
  1098. ASSERT_NE(it, inputs_at_client_.end());
  1099. inputs_at_client_.erase(it);
  1100. if (!g_env->run_at_fps())
  1101. FeedEncoderWithOneInput();
  1102. }
  1103. scoped_refptr<media::VideoFrame> VEAClient::CreateFrame(off_t position) {
  1104. uint8* frame_data_y = const_cast<uint8*>(
  1105. test_stream_->mapped_aligned_in_file.data() + position);
  1106. uint8* frame_data_u = frame_data_y + test_stream_->aligned_plane_size[0];
  1107. uint8* frame_data_v = frame_data_u + test_stream_->aligned_plane_size[1];
  1108. CHECK_GT(current_framerate_, 0U);
  1109. return media::VideoFrame::WrapExternalYuvData(
  1110. kInputFormat, input_coded_size_, gfx::Rect(test_stream_->visible_size),
  1111. test_stream_->visible_size, input_coded_size_.width(),
  1112. input_coded_size_.width() / 2, input_coded_size_.width() / 2,
  1113. frame_data_y, frame_data_u, frame_data_v,
  1114. base::TimeDelta().FromMilliseconds(next_input_id_ *
  1115. base::Time::kMillisecondsPerSecond /
  1116. current_framerate_));
  1117. }
  1118. scoped_refptr<media::VideoFrame> VEAClient::PrepareInputFrame(off_t position,
  1119. int32* input_id) {
  1120. CHECK_LE(position + test_stream_->aligned_buffer_size,
  1121. test_stream_->mapped_aligned_in_file.length());
  1122. scoped_refptr<media::VideoFrame> frame = CreateFrame(position);
  1123. frame->AddDestructionObserver(
  1124. media::BindToCurrentLoop(
  1125. base::Bind(&VEAClient::InputNoLongerNeededCallback,
  1126. base::Unretained(this),
  1127. next_input_id_)));
  1128. LOG_ASSERT(inputs_at_client_.insert(next_input_id_).second);
  1129. *input_id = next_input_id_++;
  1130. return frame;
  1131. }
  1132. void VEAClient::OnInputTimer() {
  1133. if (!has_encoder() || state_ != CS_ENCODING)
  1134. input_timer_.reset();
  1135. else if (inputs_at_client_.size() <
  1136. num_required_input_buffers_ + kNumExtraInputFrames)
  1137. FeedEncoderWithOneInput();
  1138. el