PageRenderTime 159ms CodeModel.GetById 34ms RepoModel.GetById 3ms app.codeStats 0ms

/CSipSimple/jni/webrtc/sources/video_engine/vie_capturer.cc

https://bitbucket.org/bohlooli/csipsimple
C++ | 906 lines | 778 code | 86 blank | 42 comment | 153 complexity | 56f272711139ec8886fb9dd9f106e6b4 MD5 | raw file
Possible License(s): LGPL-2.1, BSD-3-Clause, LGPL-3.0, GPL-3.0, GPL-2.0
  1. /*
  2. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include "video_engine/vie_capturer.h"
  11. #include "modules/interface/module_common_types.h"
  12. #include "modules/utility/interface/process_thread.h"
  13. #include "modules/video_capture/main/interface/video_capture_factory.h"
  14. #include "modules/video_processing/main/interface/video_processing.h"
  15. #include "modules/video_render/main/interface/video_render_defines.h"
  16. #include "system_wrappers/interface/critical_section_wrapper.h"
  17. #include "system_wrappers/interface/event_wrapper.h"
  18. #include "system_wrappers/interface/thread_wrapper.h"
  19. #include "system_wrappers/interface/trace.h"
  20. #include "video_engine/include/vie_image_process.h"
  21. #include "video_engine/vie_defines.h"
  22. #include "video_engine/vie_encoder.h"
  23. namespace webrtc {
  24. const int kThreadWaitTimeMs = 100;
  25. const int kMaxDeliverWaitTime = 500;
  26. ViECapturer::ViECapturer(int capture_id,
  27. int engine_id,
  28. ProcessThread& module_process_thread)
  29. : ViEFrameProviderBase(capture_id, engine_id),
  30. capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
  31. deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
  32. capture_module_(NULL),
  33. external_capture_module_(NULL),
  34. module_process_thread_(module_process_thread),
  35. capture_id_(capture_id),
  36. capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
  37. this, kHighPriority,
  38. "ViECaptureThread")),
  39. capture_event_(*EventWrapper::Create()),
  40. deliver_event_(*EventWrapper::Create()),
  41. effect_filter_(NULL),
  42. image_proc_module_(NULL),
  43. image_proc_module_ref_counter_(0),
  44. deflicker_frame_stats_(NULL),
  45. brightness_frame_stats_(NULL),
  46. current_brightness_level_(Normal),
  47. reported_brightness_level_(Normal),
  48. denoising_enabled_(false),
  49. observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
  50. observer_(NULL),
  51. encoding_cs_(CriticalSectionWrapper::CreateCriticalSection()),
  52. capture_encoder_(NULL),
  53. encode_complete_callback_(NULL),
  54. vie_encoder_(NULL),
  55. vcm_(NULL),
  56. decoder_initialized_(false) {
  57. WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
  58. "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
  59. capture_id, engine_id);
  60. unsigned int t_id = 0;
  61. if (capture_thread_.Start(t_id)) {
  62. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
  63. "%s: thread started: %u", __FUNCTION__, t_id);
  64. } else {
  65. assert(false);
  66. }
  67. }
  68. ViECapturer::~ViECapturer() {
  69. WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
  70. "ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
  71. capture_id_, engine_id_);
  72. // Stop the thread.
  73. deliver_cs_->Enter();
  74. capture_cs_->Enter();
  75. capture_thread_.SetNotAlive();
  76. capture_event_.Set();
  77. capture_cs_->Leave();
  78. deliver_cs_->Leave();
  79. provider_cs_->Enter();
  80. if (vie_encoder_) {
  81. vie_encoder_->DeRegisterExternalEncoder(codec_.plType);
  82. }
  83. provider_cs_->Leave();
  84. // Stop the camera input.
  85. if (capture_module_) {
  86. module_process_thread_.DeRegisterModule(capture_module_);
  87. capture_module_->DeRegisterCaptureDataCallback();
  88. capture_module_->Release();
  89. capture_module_ = NULL;
  90. }
  91. if (capture_thread_.Stop()) {
  92. // Thread stopped.
  93. delete &capture_thread_;
  94. delete &capture_event_;
  95. delete &deliver_event_;
  96. } else {
  97. assert(false);
  98. WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
  99. ViEId(engine_id_, capture_id_),
  100. "%s: Not able to stop capture thread for device %d, leaking",
  101. __FUNCTION__, capture_id_);
  102. }
  103. if (image_proc_module_) {
  104. VideoProcessingModule::Destroy(image_proc_module_);
  105. }
  106. if (deflicker_frame_stats_) {
  107. delete deflicker_frame_stats_;
  108. deflicker_frame_stats_ = NULL;
  109. }
  110. delete brightness_frame_stats_;
  111. if (vcm_) {
  112. delete vcm_;
  113. }
  114. }
  115. ViECapturer* ViECapturer::CreateViECapture(
  116. int capture_id,
  117. int engine_id,
  118. VideoCaptureModule* capture_module,
  119. ProcessThread& module_process_thread) {
  120. ViECapturer* capture = new ViECapturer(capture_id, engine_id,
  121. module_process_thread);
  122. if (!capture || capture->Init(capture_module) != 0) {
  123. delete capture;
  124. capture = NULL;
  125. }
  126. return capture;
  127. }
  128. WebRtc_Word32 ViECapturer::Init(VideoCaptureModule* capture_module) {
  129. assert(capture_module_ == NULL);
  130. capture_module_ = capture_module;
  131. capture_module_->RegisterCaptureDataCallback(*this);
  132. capture_module_->AddRef();
  133. if (module_process_thread_.RegisterModule(capture_module_) != 0) {
  134. return -1;
  135. }
  136. return 0;
  137. }
  138. ViECapturer* ViECapturer::CreateViECapture(
  139. int capture_id,
  140. int engine_id,
  141. const char* device_unique_idUTF8,
  142. const WebRtc_UWord32 device_unique_idUTF8Length,
  143. ProcessThread& module_process_thread) {
  144. ViECapturer* capture = new ViECapturer(capture_id, engine_id,
  145. module_process_thread);
  146. if (!capture ||
  147. capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
  148. delete capture;
  149. capture = NULL;
  150. }
  151. return capture;
  152. }
  153. WebRtc_Word32 ViECapturer::Init(
  154. const char* device_unique_idUTF8,
  155. const WebRtc_UWord32 device_unique_idUTF8Length) {
  156. assert(capture_module_ == NULL);
  157. if (device_unique_idUTF8 == NULL) {
  158. capture_module_ = VideoCaptureFactory::Create(
  159. ViEModuleId(engine_id_, capture_id_), external_capture_module_);
  160. } else {
  161. capture_module_ = VideoCaptureFactory::Create(
  162. ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
  163. }
  164. if (!capture_module_) {
  165. return -1;
  166. }
  167. capture_module_->AddRef();
  168. capture_module_->RegisterCaptureDataCallback(*this);
  169. if (module_process_thread_.RegisterModule(capture_module_) != 0) {
  170. return -1;
  171. }
  172. return 0;
  173. }
  174. int ViECapturer::FrameCallbackChanged() {
  175. if (Started() && !EncoderActive() && !CaptureCapabilityFixed()) {
  176. // Reconfigure the camera if a new size is required and the capture device
  177. // does not provide encoded frames.
  178. int best_width;
  179. int best_height;
  180. int best_frame_rate;
  181. VideoCaptureCapability capture_settings;
  182. capture_module_->CaptureSettings(capture_settings);
  183. GetBestFormat(&best_width, &best_height, &best_frame_rate);
  184. if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
  185. if (best_width != capture_settings.width ||
  186. best_height != capture_settings.height ||
  187. best_frame_rate != capture_settings.maxFPS ||
  188. capture_settings.codecType != kVideoCodecUnknown) {
  189. Stop();
  190. Start(requested_capability_);
  191. }
  192. }
  193. }
  194. return 0;
  195. }
  196. WebRtc_Word32 ViECapturer::Start(const CaptureCapability& capture_capability) {
  197. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
  198. __FUNCTION__);
  199. int width;
  200. int height;
  201. int frame_rate;
  202. VideoCaptureCapability capability;
  203. requested_capability_ = capture_capability;
  204. if (EncoderActive()) {
  205. CriticalSectionScoped cs(encoding_cs_.get());
  206. capability.width = codec_.width;
  207. capability.height = codec_.height;
  208. capability.maxFPS = codec_.maxFramerate;
  209. capability.codecType = codec_.codecType;
  210. capability.rawType = kVideoI420;
  211. } else if (!CaptureCapabilityFixed()) {
  212. // Ask the observers for best size.
  213. GetBestFormat(&width, &height, &frame_rate);
  214. if (width == 0) {
  215. width = kViECaptureDefaultWidth;
  216. }
  217. if (height == 0) {
  218. height = kViECaptureDefaultHeight;
  219. }
  220. if (frame_rate == 0) {
  221. frame_rate = kViECaptureDefaultFramerate;
  222. }
  223. capability.height = height;
  224. capability.width = width;
  225. capability.maxFPS = frame_rate;
  226. capability.rawType = kVideoI420;
  227. capability.codecType = kVideoCodecUnknown;
  228. } else {
  229. // Width, height and type specified with call to Start, not set by
  230. // observers.
  231. capability.width = requested_capability_.width;
  232. capability.height = requested_capability_.height;
  233. capability.maxFPS = requested_capability_.maxFPS;
  234. capability.rawType = requested_capability_.rawType;
  235. capability.interlaced = requested_capability_.interlaced;
  236. }
  237. return capture_module_->StartCapture(capability);
  238. }
  239. WebRtc_Word32 ViECapturer::Stop() {
  240. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
  241. __FUNCTION__);
  242. requested_capability_ = CaptureCapability();
  243. return capture_module_->StopCapture();
  244. }
  245. bool ViECapturer::Started() {
  246. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
  247. __FUNCTION__);
  248. return capture_module_->CaptureStarted();
  249. }
  250. const char* ViECapturer::CurrentDeviceName() const {
  251. return capture_module_->CurrentDeviceName();
  252. }
  253. WebRtc_Word32 ViECapturer::SetCaptureDelay(WebRtc_Word32 delay_ms) {
  254. return capture_module_->SetCaptureDelay(delay_ms);
  255. }
  256. WebRtc_Word32 ViECapturer::SetRotateCapturedFrames(
  257. const RotateCapturedFrame rotation) {
  258. VideoCaptureRotation converted_rotation = kCameraRotate0;
  259. switch (rotation) {
  260. case RotateCapturedFrame_0:
  261. converted_rotation = kCameraRotate0;
  262. break;
  263. case RotateCapturedFrame_90:
  264. converted_rotation = kCameraRotate90;
  265. break;
  266. case RotateCapturedFrame_180:
  267. converted_rotation = kCameraRotate180;
  268. break;
  269. case RotateCapturedFrame_270:
  270. converted_rotation = kCameraRotate270;
  271. break;
  272. }
  273. return capture_module_->SetCaptureRotation(converted_rotation);
  274. }
  275. int ViECapturer::IncomingFrame(unsigned char* video_frame,
  276. unsigned int video_frame_length,
  277. uint16_t width,
  278. uint16_t height,
  279. RawVideoType video_type,
  280. unsigned long long capture_time) { // NOLINT
  281. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  282. "ExternalCapture::IncomingFrame width %d, height %d, "
  283. "capture_time %u", width, height, capture_time);
  284. if (!external_capture_module_) {
  285. return -1;
  286. }
  287. VideoCaptureCapability capability;
  288. capability.width = width;
  289. capability.height = height;
  290. capability.rawType = video_type;
  291. return external_capture_module_->IncomingFrame(video_frame,
  292. video_frame_length,
  293. capability, capture_time);
  294. }
  295. int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
  296. unsigned long long capture_time) { // NOLINT
  297. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  298. "ExternalCapture::IncomingFrame width %d, height %d, "
  299. " capture_time %u", video_frame.width, video_frame.height,
  300. capture_time);
  301. if (!external_capture_module_) {
  302. return -1;
  303. }
  304. VideoFrameI420 frame;
  305. frame.width = video_frame.width;
  306. frame.height = video_frame.height;
  307. frame.y_plane = video_frame.y_plane;
  308. frame.u_plane = video_frame.u_plane;
  309. frame.v_plane = video_frame.v_plane;
  310. frame.y_pitch = video_frame.y_pitch;
  311. frame.u_pitch = video_frame.u_pitch;
  312. frame.v_pitch = video_frame.v_pitch;
  313. return external_capture_module_->IncomingFrameI420(frame, capture_time);
  314. }
  315. void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id,
  316. VideoFrame& video_frame,
  317. VideoCodecType codec_type) {
  318. WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
  319. "%s(capture_id: %d)", __FUNCTION__, capture_id);
  320. CriticalSectionScoped cs(capture_cs_.get());
  321. // Make sure we render this frame earlier since we know the render time set
  322. // is slightly off since it's being set when the frame has been received from
  323. // the camera, and not when the camera actually captured the frame.
  324. video_frame.SetRenderTime(video_frame.RenderTimeMs() - FrameDelay());
  325. if (codec_type != kVideoCodecUnknown) {
  326. if (encoded_frame_.Length() != 0) {
  327. // The last encoded frame has not been sent yet. Need to wait.
  328. deliver_event_.Reset();
  329. WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, capture_id_),
  330. "%s(capture_id: %d) Last encoded frame not yet delivered.",
  331. __FUNCTION__, capture_id);
  332. capture_cs_->Leave();
  333. // Wait for the coded frame to be sent before unblocking this.
  334. deliver_event_.Wait(kMaxDeliverWaitTime);
  335. assert(encoded_frame_.Length() == 0);
  336. capture_cs_->Enter();
  337. }
  338. encoded_frame_.SwapFrame(video_frame);
  339. } else {
  340. captured_frame_.SwapFrame(video_frame);
  341. }
  342. capture_event_.Set();
  343. return;
  344. }
  345. void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
  346. const WebRtc_Word32 delay) {
  347. WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
  348. "%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
  349. delay);
  350. // Deliver the network delay to all registered callbacks.
  351. ViEFrameProviderBase::SetFrameDelay(delay);
  352. CriticalSectionScoped cs(encoding_cs_.get());
  353. if (vie_encoder_) {
  354. vie_encoder_->DelayChanged(id, delay);
  355. }
  356. }
  357. WebRtc_Word32 ViECapturer::RegisterEffectFilter(
  358. ViEEffectFilter* effect_filter) {
  359. CriticalSectionScoped cs(deliver_cs_.get());
  360. if (!effect_filter) {
  361. if (!effect_filter_) {
  362. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  363. "%s: no effect filter added for capture device %d",
  364. __FUNCTION__, capture_id_);
  365. return -1;
  366. }
  367. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  368. "%s: deregister effect filter for device %d", __FUNCTION__,
  369. capture_id_);
  370. } else {
  371. if (effect_filter_) {
  372. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  373. "%s: effect filter already added for capture device %d",
  374. __FUNCTION__, capture_id_);
  375. return -1;
  376. }
  377. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  378. "%s: register effect filter for device %d", __FUNCTION__,
  379. capture_id_);
  380. }
  381. effect_filter_ = effect_filter;
  382. return 0;
  383. }
  384. WebRtc_Word32 ViECapturer::IncImageProcRefCount() {
  385. if (!image_proc_module_) {
  386. assert(image_proc_module_ref_counter_ == 0);
  387. image_proc_module_ = VideoProcessingModule::Create(
  388. ViEModuleId(engine_id_, capture_id_));
  389. if (!image_proc_module_) {
  390. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  391. "%s: could not create video processing module",
  392. __FUNCTION__);
  393. return -1;
  394. }
  395. }
  396. image_proc_module_ref_counter_++;
  397. return 0;
  398. }
  399. WebRtc_Word32 ViECapturer::DecImageProcRefCount() {
  400. image_proc_module_ref_counter_--;
  401. if (image_proc_module_ref_counter_ == 0) {
  402. // Destroy module.
  403. VideoProcessingModule::Destroy(image_proc_module_);
  404. image_proc_module_ = NULL;
  405. }
  406. return 0;
  407. }
  408. WebRtc_Word32 ViECapturer::EnableDenoising(bool enable) {
  409. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  410. "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
  411. capture_id_, enable);
  412. CriticalSectionScoped cs(deliver_cs_.get());
  413. if (enable) {
  414. if (denoising_enabled_) {
  415. // Already enabled, nothing need to be done.
  416. return 0;
  417. }
  418. denoising_enabled_ = true;
  419. if (IncImageProcRefCount() != 0) {
  420. return -1;
  421. }
  422. } else {
  423. if (denoising_enabled_ == false) {
  424. // Already disabled, nothing need to be done.
  425. return 0;
  426. }
  427. denoising_enabled_ = false;
  428. DecImageProcRefCount();
  429. }
  430. return 0;
  431. }
  432. WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable) {
  433. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  434. "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
  435. capture_id_, enable);
  436. CriticalSectionScoped cs(deliver_cs_.get());
  437. if (enable) {
  438. if (deflicker_frame_stats_) {
  439. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  440. "%s: deflickering already enabled", __FUNCTION__);
  441. return -1;
  442. }
  443. if (IncImageProcRefCount() != 0) {
  444. return -1;
  445. }
  446. deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
  447. } else {
  448. if (deflicker_frame_stats_ == NULL) {
  449. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  450. "%s: deflickering not enabled", __FUNCTION__);
  451. return -1;
  452. }
  453. DecImageProcRefCount();
  454. delete deflicker_frame_stats_;
  455. deflicker_frame_stats_ = NULL;
  456. }
  457. return 0;
  458. }
  459. WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable) {
  460. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  461. "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
  462. capture_id_, enable);
  463. CriticalSectionScoped cs(deliver_cs_.get());
  464. if (enable) {
  465. if (brightness_frame_stats_) {
  466. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  467. "%s: BrightnessAlarm already enabled", __FUNCTION__);
  468. return -1;
  469. }
  470. if (IncImageProcRefCount() != 0) {
  471. return -1;
  472. }
  473. brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
  474. } else {
  475. DecImageProcRefCount();
  476. if (brightness_frame_stats_ == NULL) {
  477. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  478. "%s: deflickering not enabled", __FUNCTION__);
  479. return -1;
  480. }
  481. delete brightness_frame_stats_;
  482. brightness_frame_stats_ = NULL;
  483. }
  484. return 0;
  485. }
  486. bool ViECapturer::ViECaptureThreadFunction(void* obj) {
  487. return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
  488. }
  489. bool ViECapturer::ViECaptureProcess() {
  490. if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
  491. deliver_cs_->Enter();
  492. if (captured_frame_.Length() > 0) {
  493. // New I420 frame.
  494. capture_cs_->Enter();
  495. deliver_frame_.SwapFrame(captured_frame_);
  496. captured_frame_.SetLength(0);
  497. capture_cs_->Leave();
  498. DeliverI420Frame(&deliver_frame_);
  499. }
  500. if (encoded_frame_.Length() > 0) {
  501. capture_cs_->Enter();
  502. deliver_frame_.SwapFrame(encoded_frame_);
  503. encoded_frame_.SetLength(0);
  504. deliver_event_.Set();
  505. capture_cs_->Leave();
  506. DeliverCodedFrame(&deliver_frame_);
  507. }
  508. deliver_cs_->Leave();
  509. if (current_brightness_level_ != reported_brightness_level_) {
  510. CriticalSectionScoped cs(observer_cs_.get());
  511. if (observer_) {
  512. observer_->BrightnessAlarm(id_, current_brightness_level_);
  513. reported_brightness_level_ = current_brightness_level_;
  514. }
  515. }
  516. }
  517. // We're done!
  518. return true;
  519. }
  520. void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
  521. // Apply image enhancement and effect filter.
  522. if (deflicker_frame_stats_) {
  523. if (image_proc_module_->GetFrameStats(*deflicker_frame_stats_,
  524. *video_frame) == 0) {
  525. image_proc_module_->Deflickering(*video_frame, *deflicker_frame_stats_);
  526. } else {
  527. WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
  528. "%s: could not get frame stats for captured frame",
  529. __FUNCTION__);
  530. }
  531. }
  532. if (denoising_enabled_) {
  533. image_proc_module_->Denoising(*video_frame);
  534. }
  535. if (brightness_frame_stats_) {
  536. if (image_proc_module_->GetFrameStats(*brightness_frame_stats_,
  537. *video_frame) == 0) {
  538. WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
  539. *video_frame, *brightness_frame_stats_);
  540. switch (brightness) {
  541. case VideoProcessingModule::kNoWarning:
  542. current_brightness_level_ = Normal;
  543. break;
  544. case VideoProcessingModule::kDarkWarning:
  545. current_brightness_level_ = Dark;
  546. break;
  547. case VideoProcessingModule::kBrightWarning:
  548. current_brightness_level_ = Bright;
  549. break;
  550. default:
  551. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  552. "%s: Brightness detection failed", __FUNCTION__);
  553. }
  554. }
  555. }
  556. if (effect_filter_) {
  557. effect_filter_->Transform(video_frame->Length(), video_frame->Buffer(),
  558. video_frame->TimeStamp(), video_frame->Width(),
  559. video_frame->Height());
  560. }
  561. // Deliver the captured frame to all observers (channels, renderer or file).
  562. ViEFrameProviderBase::DeliverFrame(video_frame);
  563. }
  564. void ViECapturer::DeliverCodedFrame(VideoFrame* video_frame) {
  565. if (encode_complete_callback_) {
  566. EncodedImage encoded_image(video_frame->Buffer(), video_frame->Length(),
  567. video_frame->Size());
  568. encoded_image._timeStamp =
  569. 90 * static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
  570. encode_complete_callback_->Encoded(encoded_image);
  571. }
  572. if (NumberOfRegisteredFrameCallbacks() > 0 && decoder_initialized_) {
  573. video_frame->Swap(decode_buffer_.payloadData, decode_buffer_.bufferSize,
  574. decode_buffer_.payloadSize);
  575. decode_buffer_.encodedHeight = video_frame->Height();
  576. decode_buffer_.encodedWidth = video_frame->Width();
  577. decode_buffer_.renderTimeMs = video_frame->RenderTimeMs();
  578. const int kMsToRtpTimestamp = 90;
  579. decode_buffer_.timeStamp = kMsToRtpTimestamp *
  580. static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
  581. decode_buffer_.payloadType = codec_.plType;
  582. vcm_->DecodeFromStorage(decode_buffer_);
  583. }
  584. }
  585. int ViECapturer::DeregisterFrameCallback(
  586. const ViEFrameCallback* callbackObject) {
  587. provider_cs_->Enter();
  588. if (callbackObject == vie_encoder_) {
  589. // Don't use this camera as encoder anymore. Need to tell the ViEEncoder.
  590. ViEEncoder* vie_encoder = NULL;
  591. vie_encoder = vie_encoder_;
  592. vie_encoder_ = NULL;
  593. provider_cs_->Leave();
  594. // Need to take this here in order to avoid deadlock with VCM. The reason is
  595. // that VCM will call ::Release and a deadlock can occur.
  596. deliver_cs_->Enter();
  597. vie_encoder->DeRegisterExternalEncoder(codec_.plType);
  598. deliver_cs_->Leave();
  599. return 0;
  600. }
  601. provider_cs_->Leave();
  602. return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
  603. }
  604. bool ViECapturer::IsFrameCallbackRegistered(
  605. const ViEFrameCallback* callbackObject) {
  606. CriticalSectionScoped cs(provider_cs_.get());
  607. if (callbackObject == vie_encoder_) {
  608. return true;
  609. }
  610. return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
  611. }
  612. WebRtc_Word32 ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
  613. ViEEncoder& vie_encoder,
  614. WebRtc_Word32 vie_encoder_id) {
  615. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  616. "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
  617. if (vie_encoder_ && &vie_encoder != vie_encoder_) {
  618. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  619. "%s(capture_device_id: %d Capture device already encoding)",
  620. __FUNCTION__, capture_id_);
  621. return -1;
  622. }
  623. CriticalSectionScoped cs(encoding_cs_.get());
  624. VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder =
  625. capture_module_->GetEncodeInterface(codec);
  626. if (!capture_encoder) {
  627. // Encoding not supported?
  628. return -1;
  629. }
  630. capture_encoder_ = capture_encoder;
  631. // Create VCM module used for decoding frames if needed.
  632. if (!vcm_) {
  633. vcm_ = VideoCodingModule::Create(capture_id_);
  634. }
  635. if (vie_encoder.RegisterExternalEncoder(this, codec.plType) != 0) {
  636. return -1;
  637. }
  638. if (vie_encoder.SetEncoder(codec) != 0) {
  639. vie_encoder.DeRegisterExternalEncoder(codec.plType);
  640. return -1;
  641. }
  642. // Make sure the encoder is not an I420 observer.
  643. ViEFrameProviderBase::DeregisterFrameCallback(&vie_encoder);
  644. // Store the vie_encoder using this capture device.
  645. vie_encoder_ = &vie_encoder;
  646. vie_encoder_id_ = vie_encoder_id;
  647. memcpy(&codec_, &codec, sizeof(VideoCodec));
  648. return 0;
  649. }
  650. bool ViECapturer::EncoderActive() {
  651. return vie_encoder_ != NULL;
  652. }
  653. bool ViECapturer::CaptureCapabilityFixed() {
  654. return requested_capability_.width != 0 &&
  655. requested_capability_.height != 0 &&
  656. requested_capability_.maxFPS != 0;
  657. }
  658. WebRtc_Word32 ViECapturer::Version(char* version,
  659. WebRtc_Word32 length) const {
  660. return 0;
  661. }
  662. WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings,
  663. WebRtc_Word32 number_of_cores,
  664. WebRtc_UWord32 max_payload_size) {
  665. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  666. "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
  667. CriticalSectionScoped cs(encoding_cs_.get());
  668. if (!capture_encoder_ || !codec_settings) {
  669. return WEBRTC_VIDEO_CODEC_ERROR;
  670. }
  671. if (vcm_) {
  672. // Initialize VCM to be able to decode frames if needed.
  673. if (vcm_->InitializeReceiver() == 0) {
  674. if (vcm_->RegisterReceiveCallback(this) == 0) {
  675. if (vcm_->RegisterReceiveCodec(codec_settings, number_of_cores,
  676. false) == 0) {
  677. decoder_initialized_ = true;
  678. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  679. "%s(capture_device_id: %d) VCM Decoder initialized",
  680. __FUNCTION__, capture_id_);
  681. }
  682. }
  683. }
  684. }
  685. return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
  686. }
  687. WebRtc_Word32 ViECapturer::Encode(
  688. const VideoFrame& input_image,
  689. const CodecSpecificInfo* codec_specific_info,
  690. const std::vector<VideoFrameType>* frame_types) {
  691. CriticalSectionScoped cs(encoding_cs_.get());
  692. if (!capture_encoder_) {
  693. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  694. }
  695. if (frame_types == NULL) {
  696. return capture_encoder_->EncodeFrameType(kVideoFrameDelta);
  697. } else if ((*frame_types)[0] == kKeyFrame) {
  698. return capture_encoder_->EncodeFrameType(kVideoFrameKey);
  699. } else if ((*frame_types)[0] == kSkipFrame) {
  700. return capture_encoder_->EncodeFrameType(kFrameEmpty);
  701. }
  702. return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
  703. }
  704. WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback(
  705. EncodedImageCallback* callback) {
  706. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  707. "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
  708. CriticalSectionScoped cs(deliver_cs_.get());
  709. if (!capture_encoder_) {
  710. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  711. }
  712. encode_complete_callback_ = callback;
  713. return 0;
  714. }
  715. WebRtc_Word32 ViECapturer::Release() {
  716. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  717. "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
  718. {
  719. CriticalSectionScoped cs(deliver_cs_.get());
  720. encode_complete_callback_ = NULL;
  721. }
  722. {
  723. CriticalSectionScoped cs(encoding_cs_.get());
  724. decoder_initialized_ = false;
  725. codec_.codecType = kVideoCodecUnknown;
  726. // Reset the camera to output I420.
  727. capture_encoder_->ConfigureEncoder(codec_, 0);
  728. if (vie_encoder_) {
  729. // Need to add the encoder as an observer of I420.
  730. ViEFrameProviderBase::RegisterFrameCallback(vie_encoder_id_,
  731. vie_encoder_);
  732. }
  733. vie_encoder_ = NULL;
  734. }
  735. return 0;
  736. }
  737. // Should reset the capture device to the state it was in after the InitEncode
  738. // function. Current implementation do nothing.
  739. WebRtc_Word32 ViECapturer::Reset() {
  740. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  741. "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
  742. return 0;
  743. }
  744. WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packet_loss,
  745. int rtt) {
  746. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  747. "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
  748. CriticalSectionScoped cs(encoding_cs_.get());
  749. if (!capture_encoder_) {
  750. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  751. }
  752. return capture_encoder_->SetChannelParameters(packet_loss, rtt);
  753. }
  754. WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate,
  755. WebRtc_UWord32 frame_rate) {
  756. WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
  757. "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
  758. CriticalSectionScoped cs(encoding_cs_.get());
  759. if (!capture_encoder_) {
  760. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  761. }
  762. return capture_encoder_->SetRates(new_bit_rate, frame_rate);
  763. }
  764. WebRtc_Word32 ViECapturer::FrameToRender(VideoFrame& video_frame) { // NOLINT
  765. deliver_cs_->Enter();
  766. DeliverI420Frame(&video_frame);
  767. deliver_cs_->Leave();
  768. return 0;
  769. }
  770. WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
  771. if (observer_) {
  772. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  773. "%s Observer already registered", __FUNCTION__, capture_id_);
  774. return -1;
  775. }
  776. if (capture_module_->RegisterCaptureCallback(*this) != 0) {
  777. return -1;
  778. }
  779. capture_module_->EnableFrameRateCallback(true);
  780. capture_module_->EnableNoPictureAlarm(true);
  781. observer_ = observer;
  782. return 0;
  783. }
  784. WebRtc_Word32 ViECapturer::DeRegisterObserver() {
  785. CriticalSectionScoped cs(observer_cs_.get());
  786. if (!observer_) {
  787. WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
  788. "%s No observer registered", __FUNCTION__, capture_id_);
  789. return -1;
  790. }
  791. capture_module_->EnableFrameRateCallback(false);
  792. capture_module_->EnableNoPictureAlarm(false);
  793. capture_module_->DeRegisterCaptureCallback();
  794. observer_ = NULL;
  795. return 0;
  796. }
  797. bool ViECapturer::IsObserverRegistered() {
  798. CriticalSectionScoped cs(observer_cs_.get());
  799. return observer_ != NULL;
  800. }
  801. void ViECapturer::OnCaptureFrameRate(const WebRtc_Word32 id,
  802. const WebRtc_UWord32 frame_rate) {
  803. WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
  804. "OnCaptureFrameRate %d", frame_rate);
  805. CriticalSectionScoped cs(observer_cs_.get());
  806. observer_->CapturedFrameRate(id_, (WebRtc_UWord8) frame_rate);
  807. }
  808. void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id,
  809. const VideoCaptureAlarm alarm) {
  810. WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
  811. "OnNoPictureAlarm %d", alarm);
  812. CriticalSectionScoped cs(observer_cs_.get());
  813. CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
  814. observer_->NoPictureAlarm(id, vie_alarm);
  815. }
  816. WebRtc_Word32 ViECapturer::SetCaptureDeviceImage(
  817. const VideoFrame& capture_device_image) {
  818. return capture_module_->StartSendImage(capture_device_image, 10);
  819. }
  820. } // namespace webrtc