PageRenderTime 754ms CodeModel.GetById 21ms RepoModel.GetById 2ms app.codeStats 1ms

/libcamera/ExynosCameraHWInterface.cpp

https://bitbucket.org/sola/jcrom_hardware_samsung_slsi_exynos5
C++ | 4539 lines | 3428 code | 698 blank | 413 comment | 1065 complexity | d254f181d8152368d9bdea60f63284f8 MD5 | raw file
  1. /*
  2. **
  3. ** Copyright 2008, The Android Open Source Project
  4. ** Copyright 2010, Samsung Electronics Co. LTD
  5. **
  6. ** Licensed under the Apache License, Version 2.0 (the "License");
  7. ** you may not use this file except in compliance with the License.
  8. ** You may obtain a copy of the License at
  9. **
  10. ** http://www.apache.org/licenses/LICENSE-2.0
  11. **
  12. ** Unless required by applicable law or agreed to in writing, software
  13. ** distributed under the License is distributed on an "AS IS" BASIS,
  14. ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. ** See the License for the specific language governing permissions and
  16. ** limitations under the License.
  17. */
  18. /*!
  19. * \file ExynosCameraHWInterface.h
  20. * \brief source file for Android Camera HAL
  21. * \author thun.hwang(thun.hwang@samsung.com)
  22. * \date 2010/06/03
  23. *
  24. * <b>Revision History: </b>
  25. * - 2011/12/31 : thun.hwang(thun.hwang@samsung.com) \n
  26. * Initial version
  27. *
  28. * - 2012/02/01 : Sangwoo, Park(sw5771.park@samsung.com) \n
  29. * Adjust Android Standard features
  30. *
  31. * - 2012/03/14 : sangwoo.park(sw5771.park@samsung.com) \n
  32. * Change file, class name to ExynosXXX.
  33. *
  34. */
  35. #include <sys/types.h>
  36. #include <sys/stat.h>
  37. //#define LOG_NDEBUG 0
  38. #define LOG_TAG "ExynosCameraHWInterface"
  39. #include <utils/Log.h>
  40. #include "ExynosCameraHWInterface.h"
  41. #include "exynos_format.h"
  42. #define VIDEO_COMMENT_MARKER_H (0xFFBE)
  43. #define VIDEO_COMMENT_MARKER_L (0xFFBF)
  44. #define VIDEO_COMMENT_MARKER_LENGTH (4)
  45. #define JPEG_EOI_MARKER (0xFFD9)
  46. #define HIBYTE(x) (((x) >> 8) & 0xFF)
  47. #define LOBYTE(x) ((x) & 0xFF)
  48. /*TODO: This values will be changed */
  49. #define BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR "0.10,1.20,Infinity"
  50. #define FRONT_CAMERA_FOCUS_DISTANCES_STR "0.20,0.25,Infinity"
  51. #define BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR "0.10,0.20,Infinity"
  52. #define BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR "0.10,1.20,Infinity"
  53. #define BACK_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity"
  54. #define FRONT_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity"
  55. // This hack does two things:
  56. // -- it sets preview to NV21 (YUV420SP)
  57. // -- it sets gralloc to YV12
  58. //
  59. // The reason being: the samsung encoder understands only yuv420sp, and gralloc
  60. // does yv12 and rgb565. So what we do is we break up the interleaved UV in
  61. // separate V and U planes, which makes preview look good, and enabled the
  62. // encoder as well.
  63. //
  64. // FIXME: Samsung needs to enable support for proper yv12 coming out of the
  65. // camera, and to fix their video encoder to work with yv12.
  66. // FIXME: It also seems like either Samsung's YUV420SP (NV21) or img's YV12 has
  67. // the color planes switched. We need to figure which side is doing it
  68. // wrong and have the respective party fix it.
  69. namespace android {
  70. static const int INITIAL_SKIP_FRAME = 8;
  71. static const int EFFECT_SKIP_FRAME = 1;
  72. gralloc_module_t const* ExynosCameraHWInterface::m_grallocHal;
  73. ExynosCameraHWInterface::ExynosCameraHWInterface(int cameraId, camera_device_t *dev)
  74. :
  75. m_captureInProgress(false),
  76. m_skipFrame(0),
  77. m_notifyCb(0),
  78. m_dataCb(0),
  79. m_dataCbTimestamp(0),
  80. m_callbackCookie(0),
  81. m_msgEnabled(0),
  82. m_faceDetected(false),
  83. m_halDevice(dev),
  84. m_numOfAvailableVideoBuf(0)
  85. {
  86. ALOGV("DEBUG(%s):", __func__);
  87. int ret = 0;
  88. m_previewWindow = NULL;
  89. m_secCamera = ExynosCamera::createInstance();
  90. for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
  91. m_previewHeap[i] = NULL;
  92. m_previewBufHandle[i] = NULL;
  93. m_previewStride[i] = 0;
  94. m_avaliblePreviewBufHandle[i] = false;
  95. m_flagGrallocLocked[i] = false;
  96. m_matchedGrallocIndex[i] = -1;
  97. m_grallocVirtAddr[i] = NULL;
  98. }
  99. m_minUndequeuedBufs = 0;
  100. #ifndef USE_3DNR_DMAOUT
  101. m_cntVideoBuf = 0;
  102. #endif
  103. m_oldPictureBufQueueHead = NULL;
  104. m_getMemoryCb = NULL;
  105. m_exynosPreviewCSC = NULL;
  106. m_exynosPictureCSC = NULL;
  107. m_exynosVideoCSC = NULL;
  108. m_frameMetadata.number_of_faces = 0;
  109. m_frameMetadata.faces = m_faces;
  110. for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) {
  111. m_videoHeap[i] = NULL;
  112. m_resizedVideoHeap[i] = NULL;
  113. }
  114. m_ion_client = ion_client_create();
  115. for (int i = 0; i < NUM_OF_PICTURE_BUF; i++)
  116. m_pictureHeap[i] = NULL;
  117. m_rawHeap = NULL;
  118. m_exitAutoFocusThread = false;
  119. m_exitPreviewThread = false;
  120. m_exitVideoThread = false;
  121. /* whether the PreviewThread is active in preview or stopped. we
  122. * create the thread but it is initially in stopped state.
  123. */
  124. m_previewRunning = false;
  125. m_videoRunning = false;
  126. m_pictureRunning = false;
  127. #ifndef USE_3DNR_DMAOUT
  128. m_videoStart = false;
  129. #endif
  130. m_previewStartDeferred = false;
  131. m_recordingHint = false;
  132. if (!m_grallocHal) {
  133. ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
  134. if (ret)
  135. ALOGE("ERR(%s):Fail on loading gralloc HAL", __func__);
  136. }
  137. if (m_secCamera->create(cameraId) == false) {
  138. ALOGE("ERR(%s):Fail on m_secCamera->create(%d)", __func__, cameraId);
  139. return;
  140. }
  141. m_initDefaultParameters(cameraId);
  142. CSC_METHOD cscMethod = CSC_METHOD_HW;
  143. m_exynosPreviewCSC = csc_init(cscMethod);
  144. if (m_exynosPreviewCSC == NULL)
  145. ALOGE("ERR(%s):csc_init() fail", __func__);
  146. m_exynosPictureCSC = csc_init(cscMethod);
  147. if (m_exynosPictureCSC == NULL)
  148. ALOGE("ERR(%s):csc_init() fail", __func__);
  149. m_exynosVideoCSC = csc_init(cscMethod);
  150. if (m_exynosVideoCSC == NULL)
  151. ALOGE("ERR(%s):csc_init() fail", __func__);
  152. m_previewThread = new PreviewThread(this);
  153. m_videoThread = new VideoThread(this);
  154. m_autoFocusThread = new AutoFocusThread(this);
  155. m_pictureThread = new PictureThread(this);
  156. }
  157. ExynosCameraHWInterface::~ExynosCameraHWInterface()
  158. {
  159. close(m_ion_client);
  160. this->release();
  161. }
  162. status_t ExynosCameraHWInterface::setPreviewWindow(preview_stream_ops *w)
  163. {
  164. m_previewWindow = w;
  165. ALOGV("DEBUG(%s):m_previewWindow %p", __func__, m_previewWindow);
  166. if (m_previewWindow == NULL) {
  167. ALOGV("DEBUG(%s):preview window is NULL!", __func__);
  168. return OK;
  169. }
  170. m_previewLock.lock();
  171. if (m_previewRunning == true && m_previewStartDeferred == false) {
  172. ALOGV("DEBUG(%s):stop preview (window change)", __func__);
  173. m_stopPreviewInternal();
  174. }
  175. if (m_previewWindow->get_min_undequeued_buffer_count(m_previewWindow, &m_minUndequeuedBufs) != 0) {
  176. ALOGE("ERR(%s):could not retrieve min undequeued buffer count", __func__);
  177. return INVALID_OPERATION;
  178. }
  179. if (NUM_OF_PREVIEW_BUF <= m_minUndequeuedBufs) {
  180. ALOGE("ERR(%s):min undequeued buffer count %d is too high (expecting at most %d)", __func__,
  181. m_minUndequeuedBufs, NUM_OF_PREVIEW_BUF - 1);
  182. }
  183. if (m_previewWindow->set_buffer_count(m_previewWindow, NUM_OF_PREVIEW_BUF) != 0) {
  184. ALOGE("ERR(%s):could not set buffer count", __func__);
  185. return INVALID_OPERATION;
  186. }
  187. int previewW, previewH;
  188. int hal_pixel_format = HAL_PIXEL_FORMAT_YV12;
  189. m_params.getPreviewSize(&previewW, &previewH);
  190. const char *str_preview_format = m_params.getPreviewFormat();
  191. ALOGV("DEBUG(%s):str preview format %s width : %d height : %d ", __func__, str_preview_format, previewW, previewH);
  192. if (!strcmp(str_preview_format,
  193. CameraParameters::PIXEL_FORMAT_RGB565)) {
  194. hal_pixel_format = HAL_PIXEL_FORMAT_RGB_565;
  195. } else if (!strcmp(str_preview_format,
  196. CameraParameters::PIXEL_FORMAT_RGBA8888)) {
  197. hal_pixel_format = HAL_PIXEL_FORMAT_RGBA_8888;
  198. } else if (!strcmp(str_preview_format,
  199. CameraParameters::PIXEL_FORMAT_YUV420SP)) {
  200. hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
  201. } else if (!strcmp(str_preview_format,
  202. CameraParameters::PIXEL_FORMAT_YUV420P))
  203. hal_pixel_format = HAL_PIXEL_FORMAT_YV12;
  204. if (m_previewWindow->set_usage(m_previewWindow,
  205. GRALLOC_USAGE_SW_WRITE_OFTEN |
  206. #ifdef USE_EGL
  207. #else
  208. GRALLOC_USAGE_HWC_HWOVERLAY |
  209. #endif
  210. GRALLOC_USAGE_HW_ION) != 0) {
  211. ALOGE("ERR(%s):could not set usage on gralloc buffer", __func__);
  212. return INVALID_OPERATION;
  213. }
  214. if (m_previewWindow->set_buffers_geometry(m_previewWindow,
  215. previewW, previewH,
  216. hal_pixel_format) != 0) {
  217. ALOGE("ERR(%s):could not set buffers geometry to %s",
  218. __func__, str_preview_format);
  219. return INVALID_OPERATION;
  220. }
  221. if (m_previewRunning == true && m_previewStartDeferred == true) {
  222. ALOGV("DEBUG(%s):start/resume preview", __func__);
  223. if (m_startPreviewInternal() == true) {
  224. m_previewStartDeferred = false;
  225. m_previewCondition.signal();
  226. }
  227. }
  228. m_previewLock.unlock();
  229. return OK;
  230. }
  231. void ExynosCameraHWInterface::setCallbacks(camera_notify_callback notify_cb,
  232. camera_data_callback data_cb,
  233. camera_data_timestamp_callback data_cb_timestamp,
  234. camera_request_memory get_memory,
  235. void *user)
  236. {
  237. m_notifyCb = notify_cb;
  238. m_dataCb = data_cb;
  239. m_dataCbTimestamp = data_cb_timestamp;
  240. m_getMemoryCb = get_memory;
  241. m_callbackCookie = user;
  242. }
  243. void ExynosCameraHWInterface::enableMsgType(int32_t msgType)
  244. {
  245. ALOGV("DEBUG(%s):msgType = 0x%x, m_msgEnabled before = 0x%x",
  246. __func__, msgType, m_msgEnabled);
  247. m_msgEnabled |= msgType;
  248. m_previewLock.lock();
  249. if ( msgType & CAMERA_MSG_PREVIEW_FRAME
  250. && m_previewRunning == true
  251. && m_previewStartDeferred == true) {
  252. ALOGV("DEBUG(%s):starting deferred preview", __func__);
  253. if (m_startPreviewInternal() == true) {
  254. m_previewStartDeferred = false;
  255. m_previewCondition.signal();
  256. }
  257. }
  258. m_previewLock.unlock();
  259. ALOGV("DEBUG(%s):m_msgEnabled = 0x%x", __func__, m_msgEnabled);
  260. }
  261. void ExynosCameraHWInterface::disableMsgType(int32_t msgType)
  262. {
  263. ALOGV("DEBUG(%s):msgType = 0x%x, m_msgEnabled before = 0x%x",
  264. __func__, msgType, m_msgEnabled);
  265. m_msgEnabled &= ~msgType;
  266. ALOGV("DEBUG(%s):m_msgEnabled = 0x%x", __func__, m_msgEnabled);
  267. }
  268. bool ExynosCameraHWInterface::msgTypeEnabled(int32_t msgType)
  269. {
  270. return (m_msgEnabled & msgType);
  271. }
  272. status_t ExynosCameraHWInterface::startPreview()
  273. {
  274. int ret = OK;
  275. ALOGV("DEBUG(%s):", __func__);
  276. Mutex::Autolock lock(m_stateLock);
  277. if (m_captureInProgress == true) {
  278. ALOGE("%s : capture in progress, not allowed", __func__);
  279. return INVALID_OPERATION;
  280. }
  281. m_previewLock.lock();
  282. if (m_previewRunning == true) {
  283. ALOGE("%s : preview thread already running", __func__);
  284. m_previewLock.unlock();
  285. return INVALID_OPERATION;
  286. }
  287. m_previewRunning = true;
  288. m_previewStartDeferred = false;
  289. if (m_previewWindow == NULL) {
  290. if (!(m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME)) {
  291. ALOGV("DEBUG(%s):deferring", __func__);
  292. m_previewStartDeferred = true;
  293. m_previewLock.unlock();
  294. return NO_ERROR;
  295. }
  296. ALOGE("%s(%d): m_previewWindow is NULL", __func__, __LINE__);
  297. return UNKNOWN_ERROR;
  298. }
  299. if (m_startPreviewInternal() == true) {
  300. m_previewCondition.signal();
  301. ret = OK;
  302. } else {
  303. ret = UNKNOWN_ERROR;
  304. }
  305. m_previewLock.unlock();
  306. return ret;
  307. }
  308. void ExynosCameraHWInterface::stopPreview()
  309. {
  310. ALOGV("DEBUG(%s):", __func__);
  311. /* request that the preview thread stop. */
  312. m_previewLock.lock();
  313. m_stopPreviewInternal();
  314. m_previewLock.unlock();
  315. }
  316. bool ExynosCameraHWInterface::previewEnabled()
  317. {
  318. Mutex::Autolock lock(m_previewLock);
  319. ALOGV("DEBUG(%s):%d", __func__, m_previewRunning);
  320. return m_previewRunning;
  321. }
  322. status_t ExynosCameraHWInterface::storeMetaDataInBuffers(bool enable)
  323. {
  324. if (!enable) {
  325. ALOGE("Non-m_frameMetadata buffer mode is not supported!");
  326. return INVALID_OPERATION;
  327. }
  328. return OK;
  329. }
  330. status_t ExynosCameraHWInterface::startRecording()
  331. {
  332. ALOGV("DEBUG(%s):", __func__);
  333. Mutex::Autolock lock(m_videoLock);
  334. int videoW, videoH, videoFormat, videoFramesize;
  335. m_secCamera->getVideoSize(&videoW, &videoH);
  336. videoFormat = m_secCamera->getVideoFormat();
  337. videoFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), videoW, videoH);
  338. int orgVideoFrameSize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), m_orgVideoRect.w, m_orgVideoRect.h);
  339. for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) {
  340. #ifdef USE_3DNR_DMAOUT
  341. ExynosBuffer videoBuf;
  342. if (m_videoHeap[i] != NULL) {
  343. m_videoHeap[i]->release(m_videoHeap[i]);
  344. m_videoHeap[i] = 0;
  345. }
  346. m_videoHeap[i] = m_getMemoryCb(-1, videoFramesize, 1, NULL);
  347. if (!m_videoHeap[i]) {
  348. ALOGE("ERR(%s):m_getMemoryCb(m_videoHeap[%d], size(%d) fail", __func__, i, videoFramesize);
  349. return UNKNOWN_ERROR;
  350. }
  351. m_getAlignedYUVSize(videoFormat, videoW, videoH, &videoBuf);
  352. videoBuf.virt.extP[0] = (char *)m_videoHeap[i]->data;
  353. for (int j = 1; j < 3; j++) {
  354. if (videoBuf.size.extS[j] != 0)
  355. videoBuf.virt.extP[j] = videoBuf.virt.extP[j-1] + videoBuf.size.extS[j-1];
  356. else
  357. videoBuf.virt.extP[j] = NULL;
  358. }
  359. videoBuf.reserved.p = i;
  360. m_secCamera->setVideoBuf(&videoBuf);
  361. #endif
  362. // original VideoSized heap
  363. if (m_resizedVideoHeap[i] != NULL) {
  364. m_resizedVideoHeap[i]->release(m_resizedVideoHeap[i]);
  365. m_resizedVideoHeap[i] = 0;
  366. }
  367. m_resizedVideoHeap[i] = m_getMemoryCb(-1, orgVideoFrameSize, 1, NULL);
  368. if (!m_resizedVideoHeap[i]) {
  369. ALOGE("ERR(%s):m_getMemoryCb(m_resizedVideoHeap[%d], size(%d) fail", __func__, i, orgVideoFrameSize);
  370. return UNKNOWN_ERROR;
  371. }
  372. }
  373. if (m_videoRunning == false) {
  374. if (m_secCamera->startVideo() == false) {
  375. ALOGE("ERR(%s):Fail on m_secCamera->startVideo()", __func__);
  376. return UNKNOWN_ERROR;
  377. }
  378. m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF;
  379. #ifdef USE_3DNR_DMAOUT
  380. m_videoRunning = true;
  381. m_videoCondition.signal();
  382. #else
  383. m_videoStart = true;
  384. #endif
  385. }
  386. return NO_ERROR;
  387. }
  388. void ExynosCameraHWInterface::stopRecording()
  389. {
  390. ALOGV("DEBUG(%s):", __func__);
  391. #ifndef USE_3DNR_DMAOUT
  392. m_videoStart = false;
  393. #endif
  394. if (m_videoRunning == true) {
  395. m_videoRunning = false;
  396. Mutex::Autolock lock(m_videoLock);
  397. m_videoCondition.signal();
  398. /* wait until video thread is stopped */
  399. m_videoStoppedCondition.wait(m_videoLock);
  400. } else
  401. ALOGV("DEBUG(%s):video not running, doing nothing", __func__);
  402. }
  403. bool ExynosCameraHWInterface::recordingEnabled()
  404. {
  405. return m_videoStart;
  406. }
  407. void ExynosCameraHWInterface::releaseRecordingFrame(const void *opaque)
  408. {
  409. // This lock makes video lock up
  410. // Mutex::Autolock lock(m_videoLock);
  411. int i;
  412. bool find = false;
  413. // HACK : this causes recording slow
  414. /*
  415. for (i = 0; i < NUM_OF_VIDEO_BUF; i++) {
  416. if ((char *)m_videoHeap[i]->data == (char *)opaque) {
  417. find = true;
  418. break;
  419. }
  420. }
  421. if (find == true) {
  422. ExynosBuffer videoBuf;
  423. videoBuf.reserved.p = i;
  424. m_secCamera->putVideoBuf(&videoBuf);
  425. m_numOfAvailableVideoBuf++;
  426. if (NUM_OF_VIDEO_BUF <= m_numOfAvailableVideoBuf)
  427. m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF;
  428. } else {
  429. ALOGV("DEBUG(%s):no matched index(%p)", __func__, (char *)opaque);
  430. }
  431. */
  432. }
  433. status_t ExynosCameraHWInterface::autoFocus()
  434. {
  435. ALOGV("DEBUG(%s):", __func__);
  436. /* signal m_autoFocusThread to run once */
  437. m_focusCondition.signal();
  438. return NO_ERROR;
  439. }
  440. status_t ExynosCameraHWInterface::cancelAutoFocus()
  441. {
  442. if (m_secCamera->cancelAutoFocus() == false) {
  443. ALOGE("ERR(%s):Fail on m_secCamera->cancelAutoFocus()", __func__);
  444. return UNKNOWN_ERROR;
  445. }
  446. return NO_ERROR;
  447. }
  448. status_t ExynosCameraHWInterface::takePicture()
  449. {
  450. Mutex::Autolock lock(m_stateLock);
  451. if (m_captureInProgress == true) {
  452. ALOGE("%s : capture already in progress", __func__);
  453. return INVALID_OPERATION;
  454. }
  455. if (m_pictureRunning == false) {
  456. ALOGI("%s(%d): m_pictureRunning is false", __func__, __LINE__);
  457. if (m_startPictureInternal() == false) {
  458. ALOGE("%s(%d): m_startPictureInternal() fail!!!", __func__, __LINE__);
  459. return INVALID_OPERATION;
  460. }
  461. }
  462. m_pictureLock.lock();
  463. m_captureInProgress = true;
  464. m_pictureLock.unlock();
  465. if (m_pictureThread->run("CameraPictureThread", PRIORITY_DEFAULT) != NO_ERROR) {
  466. ALOGE("%s : couldn't run picture thread", __func__);
  467. return INVALID_OPERATION;
  468. }
  469. return NO_ERROR;
  470. }
  471. status_t ExynosCameraHWInterface::cancelPicture()
  472. {
  473. ALOGV("DEBUG(%s):", __func__);
  474. if (m_pictureThread.get()) {
  475. ALOGV("DEBUG(%s):waiting for picture thread to exit", __func__);
  476. m_pictureThread->requestExitAndWait();
  477. ALOGV("DEBUG(%s):picture thread has exited", __func__);
  478. }
  479. return NO_ERROR;
  480. }
  481. status_t ExynosCameraHWInterface::setParameters(const CameraParameters& params)
  482. {
  483. ALOGV("DEBUG(%s):", __func__);
  484. status_t ret = NO_ERROR;
  485. /* if someone calls us while picture thread is running, it could screw
  486. * up the sensor quite a bit so return error. we can't wait because
  487. * that would cause deadlock with the callbacks
  488. */
  489. m_stateLock.lock();
  490. if (m_captureInProgress == true) {
  491. m_stateLock.unlock();
  492. m_pictureLock.lock();
  493. m_pictureCondition.waitRelative(m_pictureLock, (2000 * 1000000));
  494. m_pictureLock.unlock();
  495. }
  496. m_stateLock.unlock();
  497. ///////////////////////////////////////////////////
  498. // Google Official API : Camera.Parameters
  499. // http://developer.android.com/reference/android/hardware/Camera.Parameters.html
  500. ///////////////////////////////////////////////////
  501. // recording hint
  502. const char *newRecordingHint = params.get(CameraParameters::KEY_RECORDING_HINT);
  503. if (newRecordingHint != NULL) {
  504. if (strcmp(newRecordingHint, "true") == 0)
  505. m_recordingHint = true;
  506. else
  507. m_recordingHint = false;
  508. m_secCamera->setRecordingHint(m_recordingHint);
  509. }
  510. // preview size
  511. int newPreviewW = 0;
  512. int newPreviewH = 0;
  513. int newCalPreviewW = 0;
  514. int newCalPreviewH = 0;
  515. int previewMaxW = 0;
  516. int previewMaxH = 0;
  517. params.getPreviewSize(&newPreviewW, &newPreviewH);
  518. // In general, it will show preview max size
  519. m_secCamera->getSupportedPreviewSizes(&previewMaxW, &previewMaxH);
  520. newCalPreviewW = previewMaxW;
  521. newCalPreviewH = previewMaxH;
  522. // When recording, it will show video max size
  523. if (m_recordingHint == true) {
  524. m_secCamera->getSupportedVideoSizes(&newCalPreviewW, &newCalPreviewH);
  525. if ( previewMaxW < newCalPreviewW
  526. || previewMaxH < newCalPreviewH) {
  527. newCalPreviewW = previewMaxW;
  528. newCalPreviewH = previewMaxH;
  529. }
  530. }
  531. m_orgPreviewRect.w = newPreviewW;
  532. m_orgPreviewRect.h = newPreviewH;
  533. // TODO : calibrate original preview ratio
  534. //m_getRatioSize(newCalPreviewW, newCalPreviewH, newPreviewW, newPreviewH, &newPreviewW, &newPreviewH);
  535. newPreviewW = newCalPreviewW;
  536. newPreviewH = newCalPreviewH;
  537. const char *strNewPreviewFormat = params.getPreviewFormat();
  538. ALOGV("DEBUG(%s):newPreviewW x newPreviewH = %dx%d, format = %s",
  539. __func__, newPreviewW, newPreviewH, strNewPreviewFormat);
  540. if (0 < newPreviewW &&
  541. 0 < newPreviewH &&
  542. strNewPreviewFormat != NULL &&
  543. m_isSupportedPreviewSize(newPreviewW, newPreviewH) == true) {
  544. int newPreviewFormat = 0;
  545. if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGB565))
  546. newPreviewFormat = V4L2_PIX_FMT_RGB565;
  547. else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGBA8888))
  548. newPreviewFormat = V4L2_PIX_FMT_RGB32;
  549. else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420SP))
  550. newPreviewFormat = V4L2_PIX_FMT_NV21;
  551. else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420P))
  552. newPreviewFormat = V4L2_PIX_FMT_YVU420M;
  553. else if (!strcmp(strNewPreviewFormat, "yuv420sp_custom"))
  554. newPreviewFormat = V4L2_PIX_FMT_NV12T;
  555. else if (!strcmp(strNewPreviewFormat, "yuv422i"))
  556. newPreviewFormat = V4L2_PIX_FMT_YUYV;
  557. else if (!strcmp(strNewPreviewFormat, "yuv422p"))
  558. newPreviewFormat = V4L2_PIX_FMT_YUV422P;
  559. else
  560. newPreviewFormat = V4L2_PIX_FMT_NV21; //for 3rd party
  561. m_orgPreviewRect.colorFormat = newPreviewFormat;
  562. int curPreviewW, curPreviewH;
  563. m_secCamera->getPreviewSize(&curPreviewW, &curPreviewH);
  564. int curPreviewFormat = m_secCamera->getPreviewFormat();
  565. if (curPreviewW != newPreviewW ||
  566. curPreviewH != newPreviewH ||
  567. curPreviewFormat != newPreviewFormat) {
  568. if ( m_secCamera->setPreviewSize(newPreviewW, newPreviewH) == false
  569. || m_secCamera->setPreviewFormat(newPreviewFormat) == false) {
  570. ALOGE("ERR(%s):Fail on m_secCamera->setPreviewSize(width(%d), height(%d), format(%d))",
  571. __func__, newPreviewW, newPreviewH, newPreviewFormat);
  572. ret = UNKNOWN_ERROR;
  573. } else {
  574. if (m_previewWindow) {
  575. if (m_previewRunning == true && m_previewStartDeferred == false) {
  576. ALOGE("ERR(%s):preview is running, cannot change size and format!", __func__);
  577. ret = INVALID_OPERATION;
  578. }
  579. ALOGV("DEBUG(%s):m_previewWindow (%p) set_buffers_geometry", __func__, m_previewWindow);
  580. ALOGV("DEBUG(%s):m_previewWindow->set_buffers_geometry (%p)", __func__,
  581. m_previewWindow->set_buffers_geometry);
  582. m_previewWindow->set_buffers_geometry(m_previewWindow,
  583. newPreviewW, newPreviewH,
  584. newPreviewFormat);
  585. ALOGV("DEBUG(%s):DONE m_previewWindow (%p) set_buffers_geometry", __func__, m_previewWindow);
  586. }
  587. m_params.setPreviewSize(newPreviewW, newPreviewH);
  588. m_params.setPreviewFormat(strNewPreviewFormat);
  589. }
  590. }
  591. else {
  592. ALOGV("DEBUG(%s):preview size and format has not changed", __func__);
  593. }
  594. } else {
  595. ALOGE("ERR(%s):Invalid preview size(%dx%d)", __func__, newPreviewW, newPreviewH);
  596. ret = INVALID_OPERATION;
  597. }
  598. int newPictureW = 0;
  599. int newPictureH = 0;
  600. params.getPictureSize(&newPictureW, &newPictureH);
  601. ALOGV("DEBUG(%s):newPictureW x newPictureH = %dx%d", __func__, newPictureW, newPictureH);
  602. if (0 < newPictureW && 0 < newPictureH) {
  603. int orgPictureW, orgPictureH = 0;
  604. m_secCamera->getPictureSize(&orgPictureW, &orgPictureH);
  605. if (m_secCamera->setPictureSize(newPictureW, newPictureH) == false) {
  606. ALOGE("ERR(%s):Fail on m_secCamera->setPictureSize(width(%d), height(%d))",
  607. __func__, newPictureW, newPictureH);
  608. ret = UNKNOWN_ERROR;
  609. } else {
  610. int tempW, tempH = 0;
  611. m_secCamera->getPictureSize(&tempW, &tempH);
  612. if (tempW != orgPictureW || tempH != orgPictureH) {
  613. if (m_pictureRunning == true) {
  614. if (m_stopPictureInternal() == false)
  615. ALOGE("ERR(%s):m_stopPictureInternal() fail", __func__);
  616. if (m_startPictureInternal() == false)
  617. ALOGE("ERR(%s):m_startPictureInternal() fail", __func__);
  618. }
  619. }
  620. m_orgPictureRect.w = newPictureW;
  621. m_orgPictureRect.h = newPictureH;
  622. m_params.setPictureSize(newPictureW, newPictureH);
  623. }
  624. }
  625. // picture format
  626. const char *newPictureFormat = params.getPictureFormat();
  627. ALOGV("DEBUG(%s):newPictureFormat %s", __func__, newPictureFormat);
  628. if (newPictureFormat != NULL) {
  629. int value = 0;
  630. if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_RGB565))
  631. value = V4L2_PIX_FMT_RGB565;
  632. else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_RGBA8888))
  633. value = V4L2_PIX_FMT_RGB32;
  634. else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_YUV420SP))
  635. value = V4L2_PIX_FMT_NV21;
  636. else if (!strcmp(newPictureFormat, "yuv420sp_custom"))
  637. value = V4L2_PIX_FMT_NV12T;
  638. else if (!strcmp(newPictureFormat, "yuv420p"))
  639. value = V4L2_PIX_FMT_YUV420;
  640. else if (!strcmp(newPictureFormat, "yuv422i"))
  641. value = V4L2_PIX_FMT_YUYV;
  642. else if (!strcmp(newPictureFormat, "uyv422i_custom")) //Zero copy UYVY format
  643. value = V4L2_PIX_FMT_UYVY;
  644. else if (!strcmp(newPictureFormat, "uyv422i")) //Non-zero copy UYVY format
  645. value = V4L2_PIX_FMT_UYVY;
  646. else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_JPEG))
  647. value = V4L2_PIX_FMT_YUYV;
  648. else if (!strcmp(newPictureFormat, "yuv422p"))
  649. value = V4L2_PIX_FMT_YUV422P;
  650. else
  651. value = V4L2_PIX_FMT_NV21; //for 3rd party
  652. if (value != m_secCamera->getPictureFormat()) {
  653. if (m_secCamera->setPictureFormat(value) == false) {
  654. ALOGE("ERR(%s):Fail on m_secCamera->setPictureFormat(format(%d))", __func__, value);
  655. ret = UNKNOWN_ERROR;
  656. } else {
  657. m_orgPictureRect.colorFormat = value;
  658. m_params.setPictureFormat(newPictureFormat);
  659. }
  660. }
  661. }
  662. // JPEG image quality
  663. int newJpegQuality = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
  664. ALOGV("DEBUG(%s):newJpegQuality %d", __func__, newJpegQuality);
  665. // we ignore bad values
  666. if (newJpegQuality >=1 && newJpegQuality <= 100) {
  667. if (m_secCamera->setJpegQuality(newJpegQuality) == false) {
  668. ALOGE("ERR(%s):Fail on m_secCamera->setJpegQuality(quality(%d))", __func__, newJpegQuality);
  669. ret = UNKNOWN_ERROR;
  670. } else {
  671. m_params.set(CameraParameters::KEY_JPEG_QUALITY, newJpegQuality);
  672. }
  673. }
  674. // JPEG thumbnail size
  675. int newJpegThumbnailW = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
  676. int newJpegThumbnailH = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
  677. if (0 <= newJpegThumbnailW && 0 <= newJpegThumbnailH) {
  678. if (m_secCamera->setJpegThumbnailSize(newJpegThumbnailW, newJpegThumbnailH) == false) {
  679. ALOGE("ERR(%s):Fail on m_secCamera->setJpegThumbnailSize(width(%d), height(%d))", __func__, newJpegThumbnailW, newJpegThumbnailH);
  680. ret = UNKNOWN_ERROR;
  681. } else {
  682. m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, newJpegThumbnailW);
  683. m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, newJpegThumbnailH);
  684. }
  685. }
  686. // JPEG thumbnail quality
  687. int newJpegThumbnailQuality = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
  688. ALOGV("DEBUG(%s):newJpegThumbnailQuality %d", __func__, newJpegThumbnailQuality);
  689. // we ignore bad values
  690. if (newJpegThumbnailQuality >=1 && newJpegThumbnailQuality <= 100) {
  691. if (m_secCamera->setJpegThumbnailQuality(newJpegThumbnailQuality) == false) {
  692. ALOGE("ERR(%s):Fail on m_secCamera->setJpegThumbnailQuality(quality(%d))",
  693. __func__, newJpegThumbnailQuality);
  694. ret = UNKNOWN_ERROR;
  695. } else {
  696. m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, newJpegThumbnailQuality);
  697. }
  698. }
  699. // Video size
  700. int newVideoW = 0;
  701. int newVideoH = 0;
  702. params.getVideoSize(&newVideoW, &newVideoH);
  703. ALOGV("DEBUG(%s):newVideoW (%d) newVideoH (%d)", __func__, newVideoW, newVideoH);
  704. if (0 < newVideoW && 0 < newVideoH && m_videoStart == false) {
  705. m_orgVideoRect.w = newVideoW;
  706. m_orgVideoRect.h = newVideoH;
  707. if (m_secCamera->setVideoSize(newVideoW, newVideoH) == false) {
  708. ALOGE("ERR(%s):Fail on m_secCamera->setVideoSize(width(%d), height(%d))",
  709. __func__, newVideoW, newVideoH);
  710. ret = UNKNOWN_ERROR;
  711. }
  712. m_params.setVideoSize(newVideoW, newVideoH);
  713. }
  714. // video stablization
  715. const char *newVideoStabilization = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
  716. bool currVideoStabilization = m_secCamera->getVideoStabilization();
  717. ALOGV("DEBUG(%s):newVideoStabilization %s", __func__, newVideoStabilization);
  718. if (newVideoStabilization != NULL) {
  719. bool toggle = false;
  720. if (!strcmp(newVideoStabilization, "true"))
  721. toggle = true;
  722. if ( currVideoStabilization != toggle) {
  723. if (m_secCamera->setVideoStabilization(toggle) == false) {
  724. ALOGE("ERR(%s):setVideoStabilization() fail", __func__);
  725. ret = UNKNOWN_ERROR;
  726. } else {
  727. m_params.set(CameraParameters::KEY_VIDEO_STABILIZATION, newVideoStabilization);
  728. }
  729. }
  730. }
  731. // 3dnr
  732. const char *new3dnr = params.get("3dnr");
  733. ALOGV("DEBUG(%s):new3drn %s", __func__, new3dnr);
  734. if (new3dnr != NULL) {
  735. bool toggle = false;
  736. if (!strcmp(new3dnr, "true"))
  737. toggle = true;
  738. if (m_secCamera->set3DNR(toggle) == false) {
  739. ALOGE("ERR(%s):set3DNR() fail", __func__);
  740. ret = UNKNOWN_ERROR;
  741. } else {
  742. m_params.set("3dnr", new3dnr);
  743. }
  744. }
  745. // odc
  746. const char *newOdc = params.get("odc");
  747. ALOGV("DEBUG(%s):newOdc %s", __func__, new3dnr);
  748. if (newOdc != NULL) {
  749. bool toggle = false;
  750. if (!strcmp(newOdc, "true"))
  751. toggle = true;
  752. if (m_secCamera->setODC(toggle) == false) {
  753. ALOGE("ERR(%s):setODC() fail", __func__);
  754. ret = UNKNOWN_ERROR;
  755. } else {
  756. m_params.set("odc", newOdc);
  757. }
  758. }
  759. // frame rate
  760. int newFrameRate = params.getPreviewFrameRate();
  761. ALOGV("DEBUG(%s):newFrameRate %d", __func__, newFrameRate);
  762. // ignore any fps request, we're determine fps automatically based
  763. // on scene mode. don't return an error because it causes CTS failure.
  764. if (newFrameRate != m_params.getPreviewFrameRate()) {
  765. if (m_secCamera->setPreviewFrameRate(newFrameRate) == false) {
  766. ALOGE("ERR(%s):Fail on m_secCamera->setPreviewFrameRate(%d)", __func__, newFrameRate);
  767. ret = UNKNOWN_ERROR;
  768. } else {
  769. m_params.setPreviewFrameRate(newFrameRate);
  770. }
  771. }
  772. // zoom
  773. int newZoom = params.getInt(CameraParameters::KEY_ZOOM);
  774. ALOGV("DEBUG(%s):newZoom %d", __func__, newZoom);
  775. if (0 <= newZoom) {
  776. if (m_secCamera->setZoom(newZoom) == false) {
  777. ALOGE("ERR(%s):Fail on m_secCamera->setZoom(newZoom(%d))", __func__, newZoom);
  778. ret = UNKNOWN_ERROR;
  779. }
  780. else {
  781. m_params.set(CameraParameters::KEY_ZOOM, newZoom);
  782. }
  783. }
  784. // rotation
  785. int newRotation = params.getInt(CameraParameters::KEY_ROTATION);
  786. ALOGV("DEBUG(%s):newRotation %d", __func__, newRotation);
  787. if (0 <= newRotation) {
  788. ALOGV("DEBUG(%s):set orientation:%d", __func__, newRotation);
  789. if (m_secCamera->setRotation(newRotation) == false) {
  790. ALOGE("ERR(%s):Fail on m_secCamera->setRotation(%d)", __func__, newRotation);
  791. ret = UNKNOWN_ERROR;
  792. } else {
  793. m_params.set(CameraParameters::KEY_ROTATION, newRotation);
  794. }
  795. }
  796. // auto exposure lock
  797. const char *newAutoExposureLock = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
  798. if (newAutoExposureLock != NULL) {
  799. bool toggle = false;
  800. if (!strcmp(newAutoExposureLock, "true"))
  801. toggle = true;
  802. if (m_secCamera->setAutoExposureLock(toggle) == false) {
  803. ALOGE("ERR(%s):Fail on m_secCamera->setAutoExposureLock()", __func__);
  804. ret = UNKNOWN_ERROR;
  805. } else {
  806. m_params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, newAutoExposureLock);
  807. }
  808. }
  809. // exposure
  810. int minExposureCompensation = params.getInt(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION);
  811. int maxExposureCompensation = params.getInt(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION);
  812. int newExposureCompensation = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
  813. ALOGV("DEBUG(%s):newExposureCompensation %d", __func__, newExposureCompensation);
  814. if ((minExposureCompensation <= newExposureCompensation) &&
  815. (newExposureCompensation <= maxExposureCompensation)) {
  816. if (m_secCamera->setExposureCompensation(newExposureCompensation) == false) {
  817. ALOGE("ERR(%s):Fail on m_secCamera->setExposureCompensation(exposure(%d))", __func__, newExposureCompensation);
  818. ret = UNKNOWN_ERROR;
  819. } else {
  820. m_params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, newExposureCompensation);
  821. }
  822. }
  823. // auto white balance lock
  824. const char *newAutoWhitebalanceLock = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
  825. if (newAutoWhitebalanceLock != NULL) {
  826. bool toggle = false;
  827. if (!strcmp(newAutoWhitebalanceLock, "true"))
  828. toggle = true;
  829. if (m_secCamera->setAutoWhiteBalanceLock(toggle) == false) {
  830. ALOGE("ERR(%s):Fail on m_secCamera->setAutoWhiteBalanceLock()", __func__);
  831. ret = UNKNOWN_ERROR;
  832. } else {
  833. m_params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, newAutoWhitebalanceLock);
  834. }
  835. }
  836. // white balance
  837. const char *newWhiteBalance = params.get(CameraParameters::KEY_WHITE_BALANCE);
  838. ALOGV("DEBUG(%s):newWhiteBalance %s", __func__, newWhiteBalance);
  839. if (newWhiteBalance != NULL) {
  840. int value = -1;
  841. if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_AUTO))
  842. value = ExynosCamera::WHITE_BALANCE_AUTO;
  843. else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_INCANDESCENT))
  844. value = ExynosCamera::WHITE_BALANCE_INCANDESCENT;
  845. else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_FLUORESCENT))
  846. value = ExynosCamera::WHITE_BALANCE_FLUORESCENT;
  847. else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT))
  848. value = ExynosCamera::WHITE_BALANCE_WARM_FLUORESCENT;
  849. else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_DAYLIGHT))
  850. value = ExynosCamera::WHITE_BALANCE_DAYLIGHT;
  851. else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT))
  852. value = ExynosCamera::WHITE_BALANCE_CLOUDY_DAYLIGHT;
  853. else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_TWILIGHT))
  854. value = ExynosCamera::WHITE_BALANCE_TWILIGHT;
  855. else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_SHADE))
  856. value = ExynosCamera::WHITE_BALANCE_SHADE;
  857. else {
  858. ALOGE("ERR(%s):Invalid white balance(%s)", __func__, newWhiteBalance); //twilight, shade, warm_flourescent
  859. ret = UNKNOWN_ERROR;
  860. }
  861. if (0 <= value) {
  862. if (m_secCamera->setWhiteBalance(value) == false) {
  863. ALOGE("ERR(%s):Fail on m_secCamera->setWhiteBalance(white(%d))", __func__, value);
  864. ret = UNKNOWN_ERROR;
  865. } else {
  866. m_params.set(CameraParameters::KEY_WHITE_BALANCE, newWhiteBalance);
  867. }
  868. }
  869. }
  870. // Metering
  871. // This is the additional API(not Google API).
  872. // But, This is set berfore the below KEY_METERING_AREAS.
  873. const char *strNewMetering = params.get("metering");
  874. ALOGV("DEBUG(%s):strNewMetering %s", __func__, strNewMetering);
  875. if (strNewMetering != NULL) {
  876. int newMetering = -1;
  877. if (!strcmp(strNewMetering, "average"))
  878. newMetering = ExynosCamera::METERING_MODE_AVERAGE;
  879. else if (!strcmp(strNewMetering, "center"))
  880. newMetering = ExynosCamera::METERING_MODE_CENTER;
  881. else if (!strcmp(strNewMetering, "matrix"))
  882. newMetering = ExynosCamera::METERING_MODE_MATRIX;
  883. else if (!strcmp(strNewMetering, "spot"))
  884. newMetering = ExynosCamera::METERING_MODE_SPOT;
  885. else {
  886. ALOGE("ERR(%s):Invalid metering newMetering(%s)", __func__, strNewMetering);
  887. ret = UNKNOWN_ERROR;
  888. }
  889. if (0 <= newMetering) {
  890. if (m_secCamera->setMeteringMode(newMetering) == false) {
  891. ALOGE("ERR(%s):Fail on m_secCamera->setMeteringMode(%d)", __func__, newMetering);
  892. ret = UNKNOWN_ERROR;
  893. } else {
  894. m_params.set("metering", strNewMetering);
  895. }
  896. }
  897. }
  898. // metering areas
  899. const char *newMeteringAreas = params.get(CameraParameters::KEY_METERING_AREAS);
  900. int maxNumMeteringAreas = m_secCamera->getMaxNumMeteringAreas();
  901. if (newMeteringAreas != NULL && maxNumMeteringAreas != 0) {
  902. // ex : (-10,-10,0,0,300),(0,0,10,10,700)
  903. ExynosRect2 *rect2s = new ExynosRect2[maxNumMeteringAreas];
  904. int *weights = new int[maxNumMeteringAreas];
  905. int validMeteringAreas = m_bracketsStr2Ints((char *)newMeteringAreas, maxNumMeteringAreas, rect2s, weights);
  906. if (0 < validMeteringAreas) {
  907. for (int i = 0; i < validMeteringAreas; i++) {
  908. rect2s[i].x1 = m_calibratePosition(2000, newPreviewW, rect2s[i].x1 + 1000);
  909. rect2s[i].y1 = m_calibratePosition(2000, newPreviewH, rect2s[i].y1 + 1000);
  910. rect2s[i].x2 = m_calibratePosition(2000, newPreviewW, rect2s[i].x2 + 1000);
  911. rect2s[i].y2 = m_calibratePosition(2000, newPreviewH, rect2s[i].y2 + 1000);
  912. }
  913. if (m_secCamera->setMeteringAreas(validMeteringAreas, rect2s, weights) == false) {
  914. ALOGE("ERR(%s):setMeteringAreas(%s) fail", __func__, newMeteringAreas);
  915. ret = UNKNOWN_ERROR;
  916. }
  917. else {
  918. m_params.set(CameraParameters::KEY_METERING_AREAS, newMeteringAreas);
  919. }
  920. }
  921. delete [] rect2s;
  922. delete [] weights;
  923. }
  924. // anti banding
  925. const char *newAntibanding = params.get(CameraParameters::KEY_ANTIBANDING);
  926. ALOGV("DEBUG(%s):newAntibanding %s", __func__, newAntibanding);
  927. if (newAntibanding != NULL) {
  928. int value = -1;
  929. if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_AUTO))
  930. value = ExynosCamera::ANTIBANDING_AUTO;
  931. else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_50HZ))
  932. value = ExynosCamera::ANTIBANDING_50HZ;
  933. else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_60HZ))
  934. value = ExynosCamera::ANTIBANDING_60HZ;
  935. else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_OFF))
  936. value = ExynosCamera::ANTIBANDING_OFF;
  937. else {
  938. ALOGE("ERR(%s):Invalid antibanding value(%s)", __func__, newAntibanding);
  939. ret = UNKNOWN_ERROR;
  940. }
  941. if (0 <= value) {
  942. if (m_secCamera->setAntibanding(value) == false) {
  943. ALOGE("ERR(%s):Fail on m_secCamera->setAntibanding(%d)", __func__, value);
  944. ret = UNKNOWN_ERROR;
  945. } else {
  946. m_params.set(CameraParameters::KEY_ANTIBANDING, newAntibanding);
  947. }
  948. }
  949. }
  950. // scene mode
  951. const char *strNewSceneMode = params.get(CameraParameters::KEY_SCENE_MODE);
  952. const char *strCurSceneMode = m_params.get(CameraParameters::KEY_SCENE_MODE);
  953. // fps range
  954. int newMinFps = 0;
  955. int newMaxFps = 0;
  956. int curMinFps = 0;
  957. int curMaxFps = 0;
  958. params.getPreviewFpsRange(&newMinFps, &newMaxFps);
  959. m_params.getPreviewFpsRange(&curMinFps, &curMaxFps);
  960. /* our fps range is determined by the sensor, reject any request
  961. * that isn't exactly what we're already at.
  962. * but the check is performed when requesting only changing fps range
  963. */
  964. if (strNewSceneMode && strCurSceneMode) {
  965. if (!strcmp(strNewSceneMode, strCurSceneMode)) {
  966. if ((newMinFps != curMinFps) || (newMaxFps != curMaxFps)) {
  967. ALOGW("%s : requested newMinFps = %d, newMaxFps = %d not allowed",
  968. __func__, newMinFps, newMaxFps);
  969. ALOGE("%s : curMinFps = %d, curMaxFps = %d",
  970. __func__, curMinFps, curMaxFps);
  971. ret = UNKNOWN_ERROR;
  972. }
  973. }
  974. } else {
  975. /* Check basic validation if scene mode is different */
  976. if ((newMaxFps < newMinFps) ||
  977. (newMinFps < 0) || (newMaxFps < 0))
  978. ret = UNKNOWN_ERROR;
  979. }
  980. if (strNewSceneMode != NULL) {
  981. int newSceneMode = -1;
  982. const char *strNewFlashMode = params.get(CameraParameters::KEY_FLASH_MODE);
  983. const char *strNewFocusMode = params.get(CameraParameters::KEY_FOCUS_MODE);
  984. // fps range is (15000,30000) by default.
  985. m_params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(15000,30000)");
  986. m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "15000,30000");
  987. if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_AUTO)) {
  988. newSceneMode = ExynosCamera::SCENE_MODE_AUTO;
  989. } else {
  990. // defaults for non-auto scene modes
  991. if (m_secCamera->getSupportedFocusModes() != 0)
  992. strNewFocusMode = CameraParameters::FOCUS_MODE_AUTO;
  993. strNewFlashMode = CameraParameters::FLASH_MODE_OFF;
  994. if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_ACTION)) {
  995. newSceneMode = ExynosCamera::SCENE_MODE_ACTION;
  996. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PORTRAIT)) {
  997. newSceneMode = ExynosCamera::SCENE_MODE_PORTRAIT;
  998. strNewFlashMode = CameraParameters::FLASH_MODE_AUTO;
  999. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_LANDSCAPE)) {
  1000. newSceneMode = ExynosCamera::SCENE_MODE_LANDSCAPE;
  1001. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT)) {
  1002. newSceneMode = ExynosCamera::SCENE_MODE_NIGHT;
  1003. m_params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(4000,30000)");
  1004. m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "4000,30000");
  1005. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT_PORTRAIT)) {
  1006. newSceneMode = ExynosCamera::SCENE_MODE_NIGHT_PORTRAIT;
  1007. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_THEATRE)) {
  1008. newSceneMode = ExynosCamera::SCENE_MODE_THEATRE;
  1009. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_BEACH)) {
  1010. newSceneMode = ExynosCamera::SCENE_MODE_BEACH;
  1011. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SNOW)) {
  1012. newSceneMode = ExynosCamera::SCENE_MODE_SNOW;
  1013. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SUNSET)) {
  1014. newSceneMode = ExynosCamera::SCENE_MODE_SUNSET;
  1015. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_STEADYPHOTO)) {
  1016. newSceneMode = ExynosCamera::SCENE_MODE_STEADYPHOTO;
  1017. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_FIREWORKS)) {
  1018. newSceneMode = ExynosCamera::SCENE_MODE_FIREWORKS;
  1019. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SPORTS)) {
  1020. newSceneMode = ExynosCamera::SCENE_MODE_SPORTS;
  1021. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PARTY)) {
  1022. newSceneMode = ExynosCamera::SCENE_MODE_PARTY;
  1023. strNewFlashMode = CameraParameters::FLASH_MODE_AUTO;
  1024. } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_CANDLELIGHT)) {
  1025. newSceneMode = ExynosCamera::SCENE_MODE_CANDLELIGHT;
  1026. } else {
  1027. ALOGE("ERR(%s):unmatched scene_mode(%s)",
  1028. __func__, strNewSceneMode); //action, night-portrait, theatre, steadyphoto
  1029. ret = UNKNOWN_ERROR;
  1030. }
  1031. }
  1032. // focus mode
  1033. if (strNewFocusMode != NULL) {
  1034. int newFocusMode = -1;
  1035. if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_AUTO)) {
  1036. newFocusMode = ExynosCamera::FOCUS_MODE_AUTO;
  1037. m_params.set(CameraParameters::KEY_FOCUS_DISTANCES,
  1038. BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR);
  1039. } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_INFINITY)) {
  1040. newFocusMode = ExynosCamera::FOCUS_MODE_INFINITY;
  1041. m_params.set(CameraParameters::KEY_FOCUS_DISTANCES,
  1042. BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR);
  1043. } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_MACRO)) {
  1044. newFocusMode = ExynosCamera::FOCUS_MODE_MACRO;
  1045. m_params.set(CameraParameters::KEY_FOCUS_DISTANCES,
  1046. BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR);
  1047. } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_FIXED)) {
  1048. newFocusMode = ExynosCamera::FOCUS_MODE_FIXED;
  1049. } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_EDOF)) {
  1050. newFocusMode = ExynosCamera::FOCUS_MODE_EDOF;
  1051. } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
  1052. newFocusMode = ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO;
  1053. } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
  1054. newFocusMode = ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE;
  1055. } else {
  1056. ALOGE("ERR(%s):unmatched focus_mode(%s)", __func__, strNewFocusMode);
  1057. ret = UNKNOWN_ERROR;
  1058. }
  1059. if (0 <= newFocusMode) {
  1060. if (m_secCamera->setFocusMode(newFocusMode) == false) {
  1061. ALOGE("ERR(%s):m_secCamera->setFocusMode(%d) fail", __func__, newFocusMode);
  1062. ret = UNKNOWN_ERROR;
  1063. } else {
  1064. m_params.set(CameraParameters::KEY_FOCUS_MODE, strNewFocusMode);
  1065. }
  1066. }
  1067. }
  1068. // flash mode
  1069. if (strNewFlashMode != NULL) {
  1070. int newFlashMode = -1;
  1071. if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_OFF))
  1072. newFlashMode = ExynosCamera::FLASH_MODE_OFF;
  1073. else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_AUTO))
  1074. newFlashMode = ExynosCamera::FLASH_MODE_AUTO;
  1075. else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_ON))
  1076. newFlashMode = ExynosCamera::FLASH_MODE_ON;
  1077. else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_RED_EYE))
  1078. newFlashMode = ExynosCamera::FLASH_MODE_RED_EYE;
  1079. else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_TORCH))
  1080. newFlashMode = ExynosCamera::FLASH_MODE_TORCH;
  1081. else {
  1082. ALOGE("ERR(%s):unmatched flash_mode(%s)", __func__, strNewFlashMode); //red-eye
  1083. ret = UNKNOWN_ERROR;
  1084. }
  1085. if (0 <= newFlashMode) {
  1086. if (m_secCamera->setFlashMode(newFlashMode) == false) {
  1087. ALOGE("ERR(%s):m_secCamera->setFlashMode(%d) fail", __func__, newFlashMode);
  1088. ret = UNKNOWN_ERROR;
  1089. } else {
  1090. m_params.set(CameraParameters::KEY_FLASH_MODE, strNewFlashMode);
  1091. }
  1092. }
  1093. }
  1094. // scene mode
  1095. if (0 <= newSceneMode) {
  1096. if (m_secCamera->setSceneMode(newSceneMode) == false) {
  1097. ALOGE("ERR(%s):m_secCamera->setSceneMode(%d) fail", __func__, newSceneMode);
  1098. ret = UNKNOWN_ERROR;
  1099. } else {
  1100. m_params.set(CameraParameters::KEY_SCENE_MODE, strNewSceneMode);
  1101. }
  1102. }
  1103. }
  1104. // focus areas
  1105. const char *newFocusAreas = params.get(CameraParameters::KEY_FOCUS_AREAS);
  1106. int maxNumFocusAreas = m_secCamera->getMaxNumFocusAreas();
  1107. if (newFocusAreas != NULL && maxNumFocusAreas != 0) {
  1108. int curFocusMode = m_secCamera->getFocusMode();
  1109. // In CameraParameters.h
  1110. // Focus area only has effect if the cur focus mode is FOCUS_MODE_AUTO,
  1111. // FOCUS_MODE_MACRO, FOCUS_MODE_CONTINUOUS_VIDEO, or
  1112. // FOCUS_MODE_CONTINUOUS_PICTURE.
  1113. if ( curFocusMode & ExynosCamera::FOCUS_MODE_AUTO
  1114. || curFocusMode & ExynosCamera::FOCUS_MODE_MACRO
  1115. || curFocusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO
  1116. || curFocusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE) {
  1117. // ex : (-10,-10,0,0,300),(0,0,10,10,700)
  1118. ExynosRect2 *rect2s = new ExynosRect2[maxNumFocusAreas];
  1119. int *weights = new int[maxNumFocusAreas];
  1120. int validFocusedAreas = m_bracketsStr2Ints((char *)newFocusAreas, maxNumFocusAreas, rect2s, weights);
  1121. if (0 < validFocusedAreas) {
  1122. // CameraParameters.h
  1123. // A special case of single focus area (0,0,0,0,0) means driver to decide
  1124. // the focus area. For example, the driver may use more signals to decide
  1125. // focus areas and change them dynamically. Apps can set (0,0,0,0,0) if they
  1126. // want the driver to decide focus areas.
  1127. if ( validFocusedAreas == 1
  1128. && rect2s[0].x1 == 0 && rect2s[0].y1 == 0 && rect2s[0].x2 == 0 && rect2s[0].y2 == 0) {
  1129. rect2s[0].x1 = 0;
  1130. rect2s[0].y1 = 0;
  1131. rect2s[0].x2 = newPreviewW;
  1132. rect2s[0].y2 = newPreviewH;
  1133. } else {
  1134. for (int i = 0; i < validFocusedAreas; i++) {
  1135. rect2s[i].x1 = (rect2s[i].x1 + 1000) * 1023 / 2000;
  1136. rect2s[i].y1 = (rect2s[i].y1 + 1000) * 1023 / 2000;
  1137. rect2s[i].x2 = (rect2s[i].x2 + 1000) * 1023 / 2000;
  1138. rect2s[i].y2 = (rect2s[i].y2 + 1000) * 1023 / 2000;
  1139. }
  1140. if (m_secCamera->setFocusAreas(validFocusedAreas, rect2s, weights) == false) {
  1141. ALOGE("ERR(%s):setFocusAreas(%s) fail", __func__, newFocusAreas);
  1142. ret = UNKNOWN_ERROR;
  1143. } else {
  1144. m_params.set(CameraParameters::KEY_FOCUS_AREAS, newFocusAreas);
  1145. }
  1146. }
  1147. }
  1148. delete [] rect2s;
  1149. delete [] weights;
  1150. }
  1151. }
  1152. // image effect
  1153. const char *strNewEffect = params.get(CameraParameters::KEY_EFFECT);
  1154. if (strNewEffect != NULL) {
  1155. int newEffect = -1;
  1156. if (!strcmp(strNewEffect, CameraParameters::EFFECT_NONE)) {
  1157. newEffect = ExynosCamera::EFFECT_NONE;
  1158. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_MONO)) {
  1159. newEffect = ExynosCamera::EFFECT_MONO;
  1160. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_NEGATIVE)) {
  1161. newEffect = ExynosCamera::EFFECT_NEGATIVE;
  1162. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_SOLARIZE)) {
  1163. newEffect = ExynosCamera::EFFECT_SOLARIZE;
  1164. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_SEPIA)) {
  1165. newEffect = ExynosCamera::EFFECT_SEPIA;
  1166. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_POSTERIZE)) {
  1167. newEffect = ExynosCamera::EFFECT_POSTERIZE;
  1168. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_WHITEBOARD)) {
  1169. newEffect = ExynosCamera::EFFECT_WHITEBOARD;
  1170. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_BLACKBOARD)) {
  1171. newEffect = ExynosCamera::EFFECT_BLACKBOARD;
  1172. } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_AQUA)) {
  1173. newEffect = ExynosCamera::EFFECT_AQUA;
  1174. } else {
  1175. ALOGE("ERR(%s):Invalid effect(%s)", __func__, strNewEffect);
  1176. ret = UNKNOWN_ERROR;
  1177. }
  1178. if (0 <= newEffect) {
  1179. if (m_secCamera->setColorEffect(newEffect) == false) {
  1180. ALOGE("ERR(%s):Fail on m_secCamera->setColorEffect(effect(%d))", __func__, newEffect);
  1181. ret = UNKNOWN_ERROR;
  1182. } else {
  1183. const char *oldStrEffect = m_params.get(CameraParameters::KEY_EFFECT);
  1184. if (oldStrEffect) {
  1185. if (strcmp(oldStrEffect, strNewEffect)) {
  1186. m_setSkipFrame(EFFECT_SKIP_FRAME);
  1187. }
  1188. }
  1189. m_params.set(CameraParameters::KEY_EFFECT, strNewEffect);
  1190. }
  1191. }
  1192. }
  1193. // gps altitude
  1194. const char *strNewGpsAltitude = params.get(CameraParameters::KEY_GPS_ALTITUDE);
  1195. if (m_secCamera->setGpsAltitude(strNewGpsAltitude) == false) {
  1196. ALOGE("ERR(%s):m_secCamera->setGpsAltitude(%s) fail", __func__, strNewGpsAltitude);
  1197. ret = UNKNOWN_ERROR;
  1198. } else {
  1199. if (strNewGpsAltitude)
  1200. m_params.set(CameraParameters::KEY_GPS_ALTITUDE, strNewGpsAltitude);
  1201. else
  1202. m_params.remove(CameraParameters::KEY_GPS_ALTITUDE);
  1203. }
  1204. // gps latitude
  1205. const char *strNewGpsLatitude = params.get(CameraParameters::KEY_GPS_LATITUDE);
  1206. if (m_secCamera->setGpsLatitude(strNewGpsLatitude) == false) {
  1207. ALOGE("ERR(%s):m_secCamera->setGpsLatitude(%s) fail", __func__, strNewGpsLatitude);
  1208. ret = UNKNOWN_ERROR;
  1209. } else {
  1210. if (strNewGpsLatitude)
  1211. m_params.set(CameraParameters::KEY_GPS_LATITUDE, strNewGpsLatitude);
  1212. else
  1213. m_params.remove(CameraParameters::KEY_GPS_LATITUDE);
  1214. }
  1215. // gps longitude
  1216. const char *strNewGpsLongtitude = params.get(CameraParameters::KEY_GPS_LONGITUDE);
  1217. if (m_secCamera->setGpsLongitude(strNewGpsLongtitude) == false) {
  1218. ALOGE("ERR(%s):m_secCamera->setGpsLongitude(%s) fail", __func__, strNewGpsLongtitude);
  1219. ret = UNKNOWN_ERROR;
  1220. } else {
  1221. if (strNewGpsLongtitude)
  1222. m_params.set(CameraParameters::KEY_GPS_LONGITUDE, strNewGpsLongtitude);
  1223. else
  1224. m_params.remove(CameraParameters::KEY_GPS_LONGITUDE);
  1225. }
  1226. // gps processing method
  1227. const char *strNewGpsProcessingMethod = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
  1228. if (m_secCamera->setGpsProcessingMethod(strNewGpsProcessingMethod) == false) {
  1229. ALOGE("ERR(%s):m_secCamera->setGpsProcessingMethod(%s) fail", __func__, strNewGpsProcessingMethod);
  1230. ret = UNKNOWN_ERROR;
  1231. } else {
  1232. if (strNewGpsProcessingMethod)
  1233. m_params.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, strNewGpsProcessingMethod);
  1234. else
  1235. m_params.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
  1236. }
  1237. // gps timestamp
  1238. const char *strNewGpsTimestamp = params.get(CameraParameters::KEY_GPS_TIMESTAMP);
  1239. if (m_secCamera->setGpsTimeStamp(strNewGpsTimestamp) == false) {
  1240. ALOGE("ERR(%s):m_secCamera->setGpsTimeStamp(%s) fail", __func__, strNewGpsTimestamp);
  1241. ret = UNKNOWN_ERROR;
  1242. } else {
  1243. if (strNewGpsTimestamp)
  1244. m_params.set(CameraParameters::KEY_GPS_TIMESTAMP, strNewGpsTimestamp);
  1245. else
  1246. m_params.remove(CameraParameters::KEY_GPS_TIMESTAMP);
  1247. }
  1248. ///////////////////////////////////////////////////
  1249. // Additional API.
  1250. ///////////////////////////////////////////////////
  1251. // brightness
  1252. int newBrightness = params.getInt("brightness");
  1253. int maxBrightness = params.getInt("brightness-max");
  1254. int minBrightness = params.getInt("brightness-min");
  1255. ALOGV("DEBUG(%s):newBrightness %d", __func__, newBrightness);
  1256. if ((minBrightness <= newBrightness) && (newBrightness <= maxBrightness)) {
  1257. if (m_secCamera->setBrightness(newBrightness) == false) {
  1258. ALOGE("ERR(%s):Fail on m_secCamera->setBrightness(%d)", __func__, newBrightness);
  1259. ret = UNKNOWN_ERROR;
  1260. } else {
  1261. m_params.set("brightness", newBrightness);
  1262. }
  1263. }
  1264. // saturation
  1265. int newSaturation = params.getInt("saturation");
  1266. int maxSaturation = params.getInt("saturation-max");
  1267. int minSaturation = params.getInt("saturation-min");
  1268. ALOGV("DEBUG(%s):newSaturation %d", __func__, newSaturation);
  1269. if ((minSaturation <= newSaturation) && (newSaturation <= maxSaturation)) {
  1270. if (m_secCamera->setSaturation(newSaturation) == false) {
  1271. ALOGE("ERR(%s):Fail on m_secCamera->setSaturation(%d)", __func__, newSaturation);
  1272. ret = UNKNOWN_ERROR;
  1273. } else {
  1274. m_params.set("saturation", newSaturation);
  1275. }
  1276. }
  1277. // sharpness
  1278. int newSharpness = params.getInt("sharpness");
  1279. int maxSharpness = params.getInt("sharpness-max");
  1280. int minSharpness = params.getInt("sharpness-min");
  1281. ALOGV("DEBUG(%s):newSharpness %d", __func__, newSharpness);
  1282. if ((minSharpness <= newSharpness) && (newSharpness <= maxSharpness)) {
  1283. if (m_secCamera->setSharpness(newSharpness) == false) {
  1284. ALOGE("ERR(%s):Fail on m_secCamera->setSharpness(%d)", __func__, newSharpness);
  1285. ret = UNKNOWN_ERROR;
  1286. } else {
  1287. m_params.set("sharpness", newSharpness);
  1288. }
  1289. }
  1290. // hue
  1291. int newHue = params.getInt("hue");
  1292. int maxHue = params.getInt("hue-max");
  1293. int minHue = params.getInt("hue-min");
  1294. ALOGV("DEBUG(%s):newHue %d", __func__, newHue);
  1295. if ((minHue <= newHue) && (maxHue >= newHue)) {
  1296. if (m_secCamera->setHue(newHue) == false) {
  1297. ALOGE("ERR(%s):Fail on m_secCamera->setHue(hue(%d))", __func__, newHue);
  1298. ret = UNKNOWN_ERROR;
  1299. } else {
  1300. m_params.set("hue", newHue);
  1301. }
  1302. }
  1303. // ISO
  1304. const char *strNewISO = params.get("iso");
  1305. ALOGV("DEBUG(%s):strNewISO %s", __func__, strNewISO);
  1306. if (strNewISO != NULL) {
  1307. int newISO = -1;
  1308. if (!strcmp(strNewISO, "auto"))
  1309. newISO = 0;
  1310. else {
  1311. newISO = (int)atoi(strNewISO);
  1312. if (newISO == 0) {
  1313. ALOGE("ERR(%s):Invalid iso value(%s)", __func__, strNewISO);
  1314. ret = UNKNOWN_ERROR;
  1315. }
  1316. }
  1317. if (0 <= newISO) {
  1318. if (m_secCamera->setISO(newISO) == false) {
  1319. ALOGE("ERR(%s):Fail on m_secCamera->setISO(iso(%d))", __func__, newISO);
  1320. ret = UNKNOWN_ERROR;
  1321. } else {
  1322. m_params.set("iso", strNewISO);
  1323. }
  1324. }
  1325. }
  1326. //contrast
  1327. const char *strNewContrast = params.get("contrast");
  1328. ALOGV("DEBUG(%s):strNewContrast %s", __func__, strNewContrast);
  1329. if (strNewContrast != NULL) {
  1330. int newContrast = -1;
  1331. if (!strcmp(strNewContrast, "auto"))
  1332. newContrast = ExynosCamera::CONTRAST_AUTO;
  1333. else if (!strcmp(strNewContrast, "-2"))
  1334. newContrast = ExynosCamera::CONTRAST_MINUS_2;
  1335. else if (!strcmp(strNewContrast, "-1"))
  1336. newContrast = ExynosCamera::CONTRAST_MINUS_1;
  1337. else if (!strcmp(strNewContrast, "0"))
  1338. newContrast = ExynosCamera::CONTRAST_DEFAULT;
  1339. else if (!strcmp(strNewContrast, "1"))
  1340. newContrast = ExynosCamera::CONTRAST_PLUS_1;
  1341. else if (!strcmp(strNewContrast, "2"))
  1342. newContrast = ExynosCamera::CONTRAST_PLUS_2;
  1343. else {
  1344. ALOGE("ERR(%s):Invalid contrast value(%s)", __func__, strNewContrast);
  1345. ret = UNKNOWN_ERROR;
  1346. }
  1347. if (0 <= newContrast) {
  1348. if (m_secCamera->setContrast(newContrast) == false) {
  1349. ALOGE("ERR(%s):Fail on m_secCamera->setContrast(contrast(%d))", __func__, newContrast);
  1350. ret = UNKNOWN_ERROR;
  1351. } else {
  1352. m_params.set("contrast", strNewContrast);
  1353. }
  1354. }
  1355. }
  1356. //WDR
  1357. int newWdr = params.getInt("wdr");
  1358. ALOGV("DEBUG(%s):newWdr %d", __func__, newWdr);
  1359. if (0 <= newWdr) {
  1360. if (m_secCamera->setWDR(newWdr) == false) {
  1361. ALOGE("ERR(%s):Fail on m_secCamera->setWDR(%d)", __func__, newWdr);
  1362. ret = UNKNOWN_ERROR;
  1363. }
  1364. }
  1365. //anti shake
  1366. int newAntiShake = m_internalParams.getInt("anti-shake");
  1367. ALOGV("DEBUG(%s):newAntiShake %d", __func__, newAntiShake);
  1368. if (0 <= newAntiShake) {
  1369. bool toggle = false;
  1370. if (newAntiShake == 1)
  1371. toggle = true;
  1372. if (m_secCamera->setAntiShake(toggle) == false) {
  1373. ALOGE("ERR(%s):Fail on m_secCamera->setAntiShake(%d)", __func__, newAntiShake);
  1374. ret = UNKNOWN_ERROR;
  1375. }
  1376. }
  1377. //gamma
  1378. const char *strNewGamma = m_internalParams.get("video_recording_gamma");
  1379. ALOGV("DEBUG(%s):strNewGamma %s", __func__, strNewGamma);
  1380. if (strNewGamma != NULL) {
  1381. int newGamma = -1;
  1382. if (!strcmp(strNewGamma, "off"))
  1383. newGamma = 0;
  1384. else if (!strcmp(strNewGamma, "on"))
  1385. newGamma = 1;
  1386. else {
  1387. ALOGE("ERR(%s):unmatched gamma(%s)", __func__, strNewGamma);
  1388. ret = UNKNOWN_ERROR;
  1389. }
  1390. if (0 <= newGamma) {
  1391. bool toggle = false;
  1392. if (newGamma == 1)
  1393. toggle = true;
  1394. if (m_secCamera->setGamma(toggle) == false) {
  1395. ALOGE("ERR(%s):m_secCamera->setGamma(%s) fail", __func__, strNewGamma);
  1396. ret = UNKNOWN_ERROR;
  1397. }
  1398. }
  1399. }
  1400. //slow ae
  1401. const char *strNewSlowAe = m_internalParams.get("slow_ae");
  1402. ALOGV("DEBUG(%s):strNewSlowAe %s", __func__, strNewSlowAe);
  1403. if (strNewSlowAe != NULL) {
  1404. int newSlowAe = -1;
  1405. if (!strcmp(strNewSlowAe, "off"))
  1406. newSlowAe = 0;
  1407. else if (!strcmp(strNewSlowAe, "on"))
  1408. newSlowAe = 1;
  1409. else {
  1410. ALOGE("ERR(%s):unmatched slow_ae(%s)", __func__, strNewSlowAe);
  1411. ret = UNKNOWN_ERROR;
  1412. }
  1413. if (0 <= newSlowAe) {
  1414. bool toggle = false;
  1415. if (newSlowAe == 1)
  1416. toggle = true;
  1417. if (m_secCamera->setSlowAE(newSlowAe) == false) {
  1418. ALOGE("ERR(%s):m_secCamera->setSlowAE(%d) fail", __func__, newSlowAe);
  1419. ret = UNKNOWN_ERROR;
  1420. }
  1421. }
  1422. }
  1423. // Shot mode
  1424. int newShotMode = m_internalParams.getInt("shot_mode");
  1425. ALOGV("DEBUG(%s):newShotMode %d", __func__, newShotMode);
  1426. if (0 <= newShotMode) {
  1427. if (m_secCamera->setShotMode(newShotMode) == false) {
  1428. ALOGE("ERR(%s):Fail on m_secCamera->setShotMode(%d)", __func__, newShotMode);
  1429. ret = UNKNOWN_ERROR;
  1430. }
  1431. } else {
  1432. newShotMode=0;
  1433. }
  1434. ALOGV("DEBUG(%s):return ret = %d", __func__, ret);
  1435. return ret;
  1436. }
  1437. CameraParameters ExynosCameraHWInterface::getParameters() const
  1438. {
  1439. ALOGV("DEBUG(%s):", __func__);
  1440. return m_params;
  1441. }
  1442. status_t ExynosCameraHWInterface::sendCommand(int32_t command, int32_t arg1, int32_t arg2)
  1443. {
  1444. switch (command) {
  1445. case CAMERA_CMD_START_FACE_DETECTION:
  1446. case CAMERA_CMD_STOP_FACE_DETECTION:
  1447. if (m_secCamera->getMaxNumDetectedFaces() == 0) {
  1448. ALOGE("ERR(%s):getMaxNumDetectedFaces == 0", __func__);
  1449. return BAD_VALUE;
  1450. }
  1451. if (arg1 == CAMERA_FACE_DETECTION_SW) {
  1452. ALOGE("ERR(%s):only support HW face dectection", __func__);
  1453. return BAD_VALUE;
  1454. }
  1455. if (command == CAMERA_CMD_START_FACE_DETECTION) {
  1456. if ( m_secCamera->flagStartFaceDetection() == false
  1457. && m_secCamera->startFaceDetection() == false) {
  1458. ALOGE("ERR(%s):startFaceDetection() fail", __func__);
  1459. return BAD_VALUE;
  1460. }
  1461. } else { // if (command == CAMERA_CMD_STOP_FACE_DETECTION)
  1462. if ( m_secCamera->flagStartFaceDetection() == true
  1463. && m_secCamera->stopFaceDetection() == false) {
  1464. ALOGE("ERR(%s):stopFaceDetection() fail", __func__);
  1465. return BAD_VALUE;
  1466. }
  1467. }
  1468. break;
  1469. default:
  1470. ALOGE("ERR(%s):unexpectect command(%d) fail", __func__, command);
  1471. return BAD_VALUE;
  1472. break;
  1473. }
  1474. return NO_ERROR;
  1475. }
  1476. void ExynosCameraHWInterface::release()
  1477. {
  1478. ALOGV("DEBUG(%s):", __func__);
  1479. /* shut down any threads we have that might be running. do it here
  1480. * instead of the destructor. we're guaranteed to be on another thread
  1481. * than the ones below. if we used the destructor, since the threads
  1482. * have a reference to this object, we could wind up trying to wait
  1483. * for ourself to exit, which is a deadlock.
  1484. */
  1485. if (m_videoThread != NULL) {
  1486. m_videoThread->requestExit();
  1487. m_exitVideoThread = true;
  1488. m_videoRunning = true; // let it run so it can exit
  1489. m_videoCondition.signal();
  1490. m_videoThread->requestExitAndWait();
  1491. m_videoThread.clear();
  1492. }
  1493. if (m_previewThread != NULL) {
  1494. /* this thread is normally already in it's threadLoop but blocked
  1495. * on the condition variable or running. signal it so it wakes
  1496. * up and can exit.
  1497. */
  1498. m_previewThread->requestExit();
  1499. m_exitPreviewThread = true;
  1500. m_previewRunning = true; // let it run so it can exit
  1501. m_previewCondition.signal();
  1502. m_previewThread->requestExitAndWait();
  1503. m_previewThread.clear();
  1504. }
  1505. if (m_autoFocusThread != NULL) {
  1506. /* this thread is normally already in it's threadLoop but blocked
  1507. * on the condition variable. signal it so it wakes up and can exit.
  1508. */
  1509. m_focusLock.lock();
  1510. m_autoFocusThread->requestExit();
  1511. m_exitAutoFocusThread = true;
  1512. m_focusCondition.signal();
  1513. m_focusLock.unlock();
  1514. m_autoFocusThread->requestExitAndWait();
  1515. m_autoFocusThread.clear();
  1516. }
  1517. if (m_pictureThread != NULL) {
  1518. m_pictureThread->requestExitAndWait();
  1519. m_pictureThread.clear();
  1520. }
  1521. for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) {
  1522. if (m_videoHeap[i]) {
  1523. m_videoHeap[i]->release(m_videoHeap[i]);
  1524. m_videoHeap[i] = 0;
  1525. }
  1526. if (m_resizedVideoHeap[i]) {
  1527. m_resizedVideoHeap[i]->release(m_resizedVideoHeap[i]);
  1528. m_resizedVideoHeap[i] = 0;
  1529. }
  1530. }
  1531. for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
  1532. if (m_previewHeap[i]) {
  1533. m_previewHeap[i]->release(m_previewHeap[i]);
  1534. m_previewHeap[i] = 0;
  1535. }
  1536. }
  1537. if (m_pictureRunning == true) {
  1538. if (m_stopPictureInternal() == false)
  1539. ALOGE("ERR(%s):m_stopPictureInternal() fail", __func__);
  1540. }
  1541. if (m_exynosVideoCSC)
  1542. csc_deinit(m_exynosVideoCSC);
  1543. m_exynosVideoCSC = NULL;
  1544. if (m_exynosPictureCSC)
  1545. csc_deinit(m_exynosPictureCSC);
  1546. m_exynosPictureCSC = NULL;
  1547. if (m_exynosPreviewCSC)
  1548. csc_deinit(m_exynosPreviewCSC);
  1549. m_exynosPreviewCSC = NULL;
  1550. /* close after all the heaps are cleared since those
  1551. * could have dup'd our file descriptor.
  1552. */
  1553. if (m_secCamera->flagCreate() == true)
  1554. m_secCamera->destroy();
  1555. }
  1556. status_t ExynosCameraHWInterface::dump(int fd) const
  1557. {
  1558. const size_t SIZE = 256;
  1559. char buffer[SIZE];
  1560. String8 result;
  1561. const Vector<String16> args;
  1562. if (m_secCamera != 0) {
  1563. m_params.dump(fd, args);
  1564. m_internalParams.dump(fd, args);
  1565. snprintf(buffer, 255, " preview running(%s)\n", m_previewRunning?"true": "false");
  1566. result.append(buffer);
  1567. } else {
  1568. result.append("No camera client yet.\n");
  1569. }
  1570. write(fd, result.string(), result.size());
  1571. return NO_ERROR;
  1572. }
  1573. int ExynosCameraHWInterface::getCameraId() const
  1574. {
  1575. return m_secCamera->getCameraId();
  1576. }
  1577. void ExynosCameraHWInterface::m_initDefaultParameters(int cameraId)
  1578. {
  1579. if (m_secCamera == NULL) {
  1580. ALOGE("ERR(%s):m_secCamera object is NULL", __func__);
  1581. return;
  1582. }
  1583. CameraParameters p;
  1584. CameraParameters ip;
  1585. String8 parameterString;
  1586. char * cameraName;
  1587. cameraName = m_secCamera->getCameraName();
  1588. if (cameraName == NULL)
  1589. ALOGE("ERR(%s):getCameraName() fail", __func__);
  1590. /*
  1591. if (cameraId == ExynosCamera::CAMERA_ID_BACK) {
  1592. p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
  1593. "3264x2448,2576x1948,1920x1080,1280x720,800x480,720x480,640x480,320x240,528x432,176x144");
  1594. p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
  1595. "3264x2448,1920x1080,1280x720,800x480,720x480,640x480");
  1596. p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
  1597. "1920x1080,1280x720,640x480,176x144");
  1598. } else {
  1599. p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
  1600. "1392x1392,1280x720,640x480,352x288,320x240,176x144");
  1601. p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
  1602. "1392x1392,1280x960,640x480");
  1603. p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
  1604. "1280x720,640x480,176x144");
  1605. }
  1606. */
  1607. char strBuf[256];
  1608. String8 listString;
  1609. // preview
  1610. int previewMaxW = 0;
  1611. int previewMaxH = 0;
  1612. m_secCamera->getSupportedPreviewSizes(&previewMaxW, &previewMaxH);
  1613. listString.setTo("");
  1614. if (m_getResolutionList(listString, strBuf, previewMaxW, previewMaxH) == false) {
  1615. ALOGE("ERR(%s):m_getResolutionList() fail", __func__);
  1616. previewMaxW = 640;
  1617. previewMaxH = 480;
  1618. listString = String8::format("%dx%d", previewMaxW, previewMaxH);
  1619. }
  1620. p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, listString.string());
  1621. p.setPreviewSize(previewMaxW, previewMaxH);
  1622. p.getSupportedPreviewSizes(m_supportedPreviewSizes);
  1623. listString.setTo("");
  1624. listString = String8::format("%s,%s", CameraParameters::PIXEL_FORMAT_YUV420SP, CameraParameters::PIXEL_FORMAT_YUV420P);
  1625. p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, listString);
  1626. p.setPreviewFormat(CameraParameters::PIXEL_FORMAT_YUV420P);
  1627. // video
  1628. int videoMaxW = 0;
  1629. int videoMaxH = 0;
  1630. m_secCamera->getSupportedVideoSizes(&videoMaxW, &videoMaxH);
  1631. listString.setTo("");
  1632. if (m_getResolutionList(listString, strBuf, videoMaxW, videoMaxH) == false) {
  1633. ALOGE("ERR(%s):m_getResolutionList() fail", __func__);
  1634. videoMaxW = 640;
  1635. videoMaxH = 480;
  1636. listString = String8::format("%dx%d", videoMaxW, videoMaxH);
  1637. }
  1638. p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, listString.string());
  1639. p.setVideoSize(videoMaxW, videoMaxH);
  1640. int preferredPreviewW = 0;
  1641. int preferredPreviewH = 0;
  1642. m_secCamera->getPreferredPreivewSizeForVideo(&preferredPreviewW, &preferredPreviewH);
  1643. listString.setTo("");
  1644. listString = String8::format("%dx%d", preferredPreviewW, preferredPreviewH);
  1645. p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, listString.string());
  1646. p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, CameraParameters::PIXEL_FORMAT_YUV420SP);
  1647. if (m_secCamera->isVideoSnapshotSupported() == true)
  1648. p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "true");
  1649. else
  1650. p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "false");
  1651. if (m_secCamera->isVideoStabilizationSupported() == true)
  1652. p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "true");
  1653. else
  1654. p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "false");
  1655. // picture
  1656. int pictureMaxW = 0;
  1657. int pictureMaxH = 0;
  1658. m_secCamera->getSupportedPictureSizes(&pictureMaxW, &pictureMaxH);
  1659. listString.setTo("");
  1660. if (m_getResolutionList(listString, strBuf, pictureMaxW, pictureMaxH) == false) {
  1661. ALOGE("ERR(%s):m_getResolutionList() fail", __func__);
  1662. pictureMaxW = 640;
  1663. pictureMaxW = 480;
  1664. listString = String8::format("%dx%d", pictureMaxW, pictureMaxH);
  1665. }
  1666. p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, listString.string());
  1667. p.setPictureSize(pictureMaxW, pictureMaxH);
  1668. p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
  1669. CameraParameters::PIXEL_FORMAT_JPEG);
  1670. p.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);
  1671. p.set(CameraParameters::KEY_JPEG_QUALITY, "100"); // maximum quality
  1672. // thumbnail
  1673. int thumbnailMaxW = 0;
  1674. int thumbnailMaxH = 0;
  1675. m_secCamera->getSupportedJpegThumbnailSizes(&thumbnailMaxW, &thumbnailMaxH);
  1676. listString = String8::format("%dx%d", thumbnailMaxW, thumbnailMaxH);
  1677. listString.append(",0x0");
  1678. p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, listString.string());
  1679. p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, thumbnailMaxW);
  1680. p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, thumbnailMaxH);
  1681. p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "100");
  1682. // exposure
  1683. p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, m_secCamera->getMinExposureCompensation());
  1684. p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, m_secCamera->getMaxExposureCompensation());
  1685. p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, m_secCamera->getExposureCompensation());
  1686. p.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, m_secCamera->getExposureCompensationStep());
  1687. if (m_secCamera->isAutoExposureLockSupported() == true)
  1688. p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true");
  1689. else
  1690. p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "false");
  1691. // face detection
  1692. p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, m_secCamera->getMaxNumDetectedFaces());
  1693. p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, 0);
  1694. // focus mode
  1695. int focusMode = m_secCamera->getSupportedFocusModes();
  1696. parameterString.setTo("");
  1697. if (focusMode & ExynosCamera::FOCUS_MODE_AUTO) {
  1698. parameterString.append(CameraParameters::FOCUS_MODE_AUTO);
  1699. parameterString.append(",");
  1700. }
  1701. if (focusMode & ExynosCamera::FOCUS_MODE_INFINITY) {
  1702. parameterString.append(CameraParameters::FOCUS_MODE_INFINITY);
  1703. parameterString.append(",");
  1704. }
  1705. if (focusMode & ExynosCamera::FOCUS_MODE_MACRO) {
  1706. parameterString.append(CameraParameters::FOCUS_MODE_MACRO);
  1707. parameterString.append(",");
  1708. }
  1709. if (focusMode & ExynosCamera::FOCUS_MODE_FIXED) {
  1710. parameterString.append(CameraParameters::FOCUS_MODE_FIXED);
  1711. parameterString.append(",");
  1712. }
  1713. if (focusMode & ExynosCamera::FOCUS_MODE_EDOF) {
  1714. parameterString.append(CameraParameters::FOCUS_MODE_EDOF);
  1715. parameterString.append(",");
  1716. }
  1717. if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO) {
  1718. parameterString.append(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
  1719. parameterString.append(",");
  1720. }
  1721. if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE)
  1722. parameterString.append(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE);
  1723. p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES,
  1724. parameterString.string());
  1725. if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE)
  1726. p.set(CameraParameters::KEY_FOCUS_MODE,
  1727. CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE);
  1728. else if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO)
  1729. p.set(CameraParameters::KEY_FOCUS_MODE,
  1730. CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
  1731. else if (focusMode & ExynosCamera::FOCUS_MODE_AUTO)
  1732. p.set(CameraParameters::KEY_FOCUS_MODE,
  1733. CameraParameters::FOCUS_MODE_AUTO);
  1734. else
  1735. p.set(CameraParameters::KEY_FOCUS_MODE,
  1736. CameraParameters::FOCUS_MODE_FIXED);
  1737. // HACK
  1738. if (cameraId == ExynosCamera::CAMERA_ID_BACK) {
  1739. p.set(CameraParameters::KEY_FOCUS_DISTANCES,
  1740. BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR);
  1741. p.set(CameraParameters::FOCUS_DISTANCE_INFINITY,
  1742. BACK_CAMERA_FOCUS_DISTANCE_INFINITY);
  1743. } else {
  1744. p.set(CameraParameters::KEY_FOCUS_DISTANCES,
  1745. FRONT_CAMERA_FOCUS_DISTANCES_STR);
  1746. p.set(CameraParameters::FOCUS_DISTANCE_INFINITY,
  1747. FRONT_CAMERA_FOCUS_DISTANCE_INFINITY);
  1748. }
  1749. if (focusMode & ExynosCamera::FOCUS_MODE_TOUCH)
  1750. p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, m_secCamera->getMaxNumFocusAreas());
  1751. // flash
  1752. int flashMode = m_secCamera->getSupportedFlashModes();
  1753. parameterString.setTo("");
  1754. if (flashMode & ExynosCamera::FLASH_MODE_OFF) {
  1755. parameterString.append(CameraParameters::FLASH_MODE_OFF);
  1756. parameterString.append(",");
  1757. }
  1758. if (flashMode & ExynosCamera::FLASH_MODE_AUTO) {
  1759. parameterString.append(CameraParameters::FLASH_MODE_AUTO);
  1760. parameterString.append(",");
  1761. }
  1762. if (flashMode & ExynosCamera::FLASH_MODE_ON) {
  1763. parameterString.append(CameraParameters::FLASH_MODE_ON);
  1764. parameterString.append(",");
  1765. }
  1766. if (flashMode & ExynosCamera::FLASH_MODE_RED_EYE) {
  1767. parameterString.append(CameraParameters::FLASH_MODE_RED_EYE);
  1768. parameterString.append(",");
  1769. }
  1770. if (flashMode & ExynosCamera::FLASH_MODE_TORCH)
  1771. parameterString.append(CameraParameters::FLASH_MODE_TORCH);
  1772. p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, parameterString.string());
  1773. p.set(CameraParameters::KEY_FLASH_MODE, CameraParameters::FLASH_MODE_OFF);
  1774. // scene mode
  1775. int sceneMode = m_secCamera->getSupportedSceneModes();
  1776. parameterString.setTo("");
  1777. if (sceneMode & ExynosCamera::SCENE_MODE_AUTO) {
  1778. parameterString.append(CameraParameters::SCENE_MODE_AUTO);
  1779. parameterString.append(",");
  1780. }
  1781. if (sceneMode & ExynosCamera::SCENE_MODE_ACTION) {
  1782. parameterString.append(CameraParameters::SCENE_MODE_ACTION);
  1783. parameterString.append(",");
  1784. }
  1785. if (sceneMode & ExynosCamera::SCENE_MODE_PORTRAIT) {
  1786. parameterString.append(CameraParameters::SCENE_MODE_PORTRAIT);
  1787. parameterString.append(",");
  1788. }
  1789. if (sceneMode & ExynosCamera::SCENE_MODE_LANDSCAPE) {
  1790. parameterString.append(CameraParameters::SCENE_MODE_LANDSCAPE);
  1791. parameterString.append(",");
  1792. }
  1793. if (sceneMode & ExynosCamera::SCENE_MODE_NIGHT) {
  1794. parameterString.append(CameraParameters::SCENE_MODE_NIGHT);
  1795. parameterString.append(",");
  1796. }
  1797. if (sceneMode & ExynosCamera::SCENE_MODE_NIGHT_PORTRAIT) {
  1798. parameterString.append(CameraParameters::SCENE_MODE_NIGHT_PORTRAIT);
  1799. parameterString.append(",");
  1800. }
  1801. if (sceneMode & ExynosCamera::SCENE_MODE_THEATRE) {
  1802. parameterString.append(CameraParameters::SCENE_MODE_THEATRE);
  1803. parameterString.append(",");
  1804. }
  1805. if (sceneMode & ExynosCamera::SCENE_MODE_BEACH) {
  1806. parameterString.append(CameraParameters::SCENE_MODE_BEACH);
  1807. parameterString.append(",");
  1808. }
  1809. if (sceneMode & ExynosCamera::SCENE_MODE_SNOW) {
  1810. parameterString.append(CameraParameters::SCENE_MODE_SNOW);
  1811. parameterString.append(",");
  1812. }
  1813. if (sceneMode & ExynosCamera::SCENE_MODE_SUNSET) {
  1814. parameterString.append(CameraParameters::SCENE_MODE_SUNSET);
  1815. parameterString.append(",");
  1816. }
  1817. if (sceneMode & ExynosCamera::SCENE_MODE_STEADYPHOTO) {
  1818. parameterString.append(CameraParameters::SCENE_MODE_STEADYPHOTO);
  1819. parameterString.append(",");
  1820. }
  1821. if (sceneMode & ExynosCamera::SCENE_MODE_FIREWORKS) {
  1822. parameterString.append(CameraParameters::SCENE_MODE_FIREWORKS);
  1823. parameterString.append(",");
  1824. }
  1825. if (sceneMode & ExynosCamera::SCENE_MODE_SPORTS) {
  1826. parameterString.append(CameraParameters::SCENE_MODE_SPORTS);
  1827. parameterString.append(",");
  1828. }
  1829. if (sceneMode & ExynosCamera::SCENE_MODE_PARTY) {
  1830. parameterString.append(CameraParameters::SCENE_MODE_PARTY);
  1831. parameterString.append(",");
  1832. }
  1833. if (sceneMode & ExynosCamera::SCENE_MODE_CANDLELIGHT)
  1834. parameterString.append(CameraParameters::SCENE_MODE_CANDLELIGHT);
  1835. p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES,
  1836. parameterString.string());
  1837. p.set(CameraParameters::KEY_SCENE_MODE,
  1838. CameraParameters::SCENE_MODE_AUTO);
  1839. // effect
  1840. int effect = m_secCamera->getSupportedColorEffects();
  1841. parameterString.setTo("");
  1842. if (effect & ExynosCamera::EFFECT_NONE) {
  1843. parameterString.append(CameraParameters::EFFECT_NONE);
  1844. parameterString.append(",");
  1845. }
  1846. if (effect & ExynosCamera::EFFECT_MONO) {
  1847. parameterString.append(CameraParameters::EFFECT_MONO);
  1848. parameterString.append(",");
  1849. }
  1850. if (effect & ExynosCamera::EFFECT_NEGATIVE) {
  1851. parameterString.append(CameraParameters::EFFECT_NEGATIVE);
  1852. parameterString.append(",");
  1853. }
  1854. if (effect & ExynosCamera::EFFECT_SOLARIZE) {
  1855. parameterString.append(CameraParameters::EFFECT_SOLARIZE);
  1856. parameterString.append(",");
  1857. }
  1858. if (effect & ExynosCamera::EFFECT_SEPIA) {
  1859. parameterString.append(CameraParameters::EFFECT_SEPIA);
  1860. parameterString.append(",");
  1861. }
  1862. if (effect & ExynosCamera::EFFECT_POSTERIZE) {
  1863. parameterString.append(CameraParameters::EFFECT_POSTERIZE);
  1864. parameterString.append(",");
  1865. }
  1866. if (effect & ExynosCamera::EFFECT_WHITEBOARD) {
  1867. parameterString.append(CameraParameters::EFFECT_WHITEBOARD);
  1868. parameterString.append(",");
  1869. }
  1870. if (effect & ExynosCamera::EFFECT_BLACKBOARD) {
  1871. parameterString.append(CameraParameters::EFFECT_BLACKBOARD);
  1872. parameterString.append(",");
  1873. }
  1874. if (effect & ExynosCamera::EFFECT_AQUA)
  1875. parameterString.append(CameraParameters::EFFECT_AQUA);
  1876. p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, parameterString.string());
  1877. p.set(CameraParameters::KEY_EFFECT, CameraParameters::EFFECT_NONE);
  1878. // white balance
  1879. int whiteBalance = m_secCamera->getSupportedWhiteBalance();
  1880. parameterString.setTo("");
  1881. if (whiteBalance & ExynosCamera::WHITE_BALANCE_AUTO) {
  1882. parameterString.append(CameraParameters::WHITE_BALANCE_AUTO);
  1883. parameterString.append(",");
  1884. }
  1885. if (whiteBalance & ExynosCamera::WHITE_BALANCE_INCANDESCENT) {
  1886. parameterString.append(CameraParameters::WHITE_BALANCE_INCANDESCENT);
  1887. parameterString.append(",");
  1888. }
  1889. if (whiteBalance & ExynosCamera::WHITE_BALANCE_FLUORESCENT) {
  1890. parameterString.append(CameraParameters::WHITE_BALANCE_FLUORESCENT);
  1891. parameterString.append(",");
  1892. }
  1893. if (whiteBalance & ExynosCamera::WHITE_BALANCE_WARM_FLUORESCENT) {
  1894. parameterString.append(CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT);
  1895. parameterString.append(",");
  1896. }
  1897. if (whiteBalance & ExynosCamera::WHITE_BALANCE_DAYLIGHT) {
  1898. parameterString.append(CameraParameters::WHITE_BALANCE_DAYLIGHT);
  1899. parameterString.append(",");
  1900. }
  1901. if (whiteBalance & ExynosCamera::WHITE_BALANCE_CLOUDY_DAYLIGHT) {
  1902. parameterString.append(CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT);
  1903. parameterString.append(",");
  1904. }
  1905. if (whiteBalance & ExynosCamera::WHITE_BALANCE_TWILIGHT) {
  1906. parameterString.append(CameraParameters::WHITE_BALANCE_TWILIGHT);
  1907. parameterString.append(",");
  1908. }
  1909. if (whiteBalance & ExynosCamera::WHITE_BALANCE_SHADE)
  1910. parameterString.append(CameraParameters::WHITE_BALANCE_SHADE);
  1911. p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE,
  1912. parameterString.string());
  1913. p.set(CameraParameters::KEY_WHITE_BALANCE, CameraParameters::WHITE_BALANCE_AUTO);
  1914. if (m_secCamera->isAutoWhiteBalanceLockSupported() == true)
  1915. p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true");
  1916. else
  1917. p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "false");
  1918. // anti banding
  1919. int antiBanding = m_secCamera->getSupportedAntibanding();
  1920. parameterString.setTo("");
  1921. if (antiBanding & ExynosCamera::ANTIBANDING_AUTO) {
  1922. parameterString.append(CameraParameters::ANTIBANDING_AUTO);
  1923. parameterString.append(",");
  1924. }
  1925. if (antiBanding & ExynosCamera::ANTIBANDING_50HZ) {
  1926. parameterString.append(CameraParameters::ANTIBANDING_50HZ);
  1927. parameterString.append(",");
  1928. }
  1929. if (antiBanding & ExynosCamera::ANTIBANDING_60HZ) {
  1930. parameterString.append(CameraParameters::ANTIBANDING_60HZ);
  1931. parameterString.append(",");
  1932. }
  1933. if (antiBanding & ExynosCamera::ANTIBANDING_OFF)
  1934. parameterString.append(CameraParameters::ANTIBANDING_OFF);
  1935. p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING,
  1936. parameterString.string());
  1937. p.set(CameraParameters::KEY_ANTIBANDING, CameraParameters::ANTIBANDING_OFF);
  1938. // rotation
  1939. p.set(CameraParameters::KEY_ROTATION, 0);
  1940. // view angle
  1941. p.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, m_secCamera->getHorizontalViewAngle());
  1942. p.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, m_secCamera->getVerticalViewAngle());
  1943. // metering
  1944. if (0 < m_secCamera->getMaxNumMeteringAreas())
  1945. p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, m_secCamera->getMaxNumMeteringAreas());
  1946. // zoom
  1947. if (m_secCamera->isZoomSupported() == true) {
  1948. int maxZoom = m_secCamera->getMaxZoom();
  1949. if (0 < maxZoom) {
  1950. p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "true");
  1951. if (m_secCamera->isSmoothZoomSupported() == true)
  1952. p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true");
  1953. else
  1954. p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false");
  1955. p.set(CameraParameters::KEY_MAX_ZOOM, maxZoom);
  1956. p.set(CameraParameters::KEY_ZOOM, m_secCamera->getZoom());
  1957. int max_zoom_ratio = m_secCamera->getMaxZoomRatio();
  1958. listString.setTo("");
  1959. if (m_getZoomRatioList(listString, strBuf, maxZoom, 100, max_zoom_ratio) == true)
  1960. p.set(CameraParameters::KEY_ZOOM_RATIOS, listString.string());
  1961. else
  1962. p.set(CameraParameters::KEY_ZOOM_RATIOS, "100");
  1963. } else {
  1964. p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false");
  1965. p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false");
  1966. }
  1967. } else {
  1968. p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false");
  1969. p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false");
  1970. }
  1971. // fps
  1972. int minPreviewFps, maxPreviewFps;
  1973. m_secCamera->getPreviewFpsRange(&minPreviewFps, &maxPreviewFps);
  1974. int baseFps = ((minPreviewFps + 5) / 5) * 5;
  1975. listString.setTo("");
  1976. snprintf(strBuf, 256, "%d", minPreviewFps);
  1977. listString.append(strBuf);
  1978. for (int i = baseFps; i <= maxPreviewFps; i += 5) {
  1979. int step = (i / 5) * 5;
  1980. snprintf(strBuf, 256, ",%d", step);
  1981. listString.append(strBuf);
  1982. }
  1983. p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, listString.string());
  1984. p.setPreviewFrameRate(maxPreviewFps);
  1985. int minFpsRange = minPreviewFps * 1000; // 15 -> 15000
  1986. int maxFpsRange = maxPreviewFps * 1000; // 30 -> 30000
  1987. snprintf(strBuf, 256, "(%d,%d)", minFpsRange, maxFpsRange);
  1988. p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, strBuf);
  1989. snprintf(strBuf, 256, "%d,%d", minFpsRange, maxFpsRange);
  1990. p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, strBuf);
  1991. //p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(15000,30000)");
  1992. //p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "15000,30000")
  1993. // focal length
  1994. int num = 0;
  1995. int den = 0;
  1996. int precision = 0;
  1997. m_secCamera->getFocalLength(&num, &den);
  1998. switch (den) {
  1999. default:
  2000. case 1000:
  2001. precision = 3;
  2002. break;
  2003. case 100:
  2004. precision = 2;
  2005. break;
  2006. case 10:
  2007. precision = 1;
  2008. break;
  2009. case 1:
  2010. precision = 0;
  2011. break;
  2012. }
  2013. snprintf(strBuf, 256, "%.*f", precision, ((float)num / (float)den));
  2014. p.set(CameraParameters::KEY_FOCAL_LENGTH, strBuf);
  2015. //p.set(CameraParameters::KEY_FOCAL_LENGTH, "3.43");
  2016. //p.set(CameraParameters::KEY_FOCAL_LENGTH, "0.9");
  2017. // Additional params.
  2018. p.set("contrast", "auto");
  2019. p.set("iso", "auto");
  2020. p.set("wdr", 0);
  2021. p.set("metering", "center");
  2022. p.set("brightness", 0);
  2023. p.set("brightness-max", 2);
  2024. p.set("brightness-min", -2);
  2025. p.set("saturation", 0);
  2026. p.set("saturation-max", 2);
  2027. p.set("saturation-min", -2);
  2028. p.set("sharpness", 0);
  2029. p.set("sharpness-max", 2);
  2030. p.set("sharpness-min", -2);
  2031. p.set("hue", 0);
  2032. p.set("hue-max", 2);
  2033. p.set("hue-min", -2);
  2034. m_params = p;
  2035. m_internalParams = ip;
  2036. /* make sure m_secCamera has all the settings we do. applications
  2037. * aren't required to call setParameters themselves (only if they
  2038. * want to change something.
  2039. */
  2040. setParameters(p);
  2041. m_secCamera->setPreviewFrameRate(maxPreviewFps);
  2042. }
  2043. bool ExynosCameraHWInterface::m_startPreviewInternal(void)
  2044. {
  2045. ALOGV("DEBUG(%s):", __func__);
  2046. int i;
  2047. int previewW, previewH, previewFormat, previewFramesize;
  2048. m_secCamera->getPreviewSize(&previewW, &previewH);
  2049. previewFormat = m_secCamera->getPreviewFormat();
  2050. // we will use previewFramesize for m_previewHeap[i]
  2051. previewFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(m_orgPreviewRect.colorFormat), m_orgPreviewRect.w, m_orgPreviewRect.h);
  2052. ExynosBuffer previewBuf;
  2053. void *virtAddr[3];
  2054. int fd[3];
  2055. for (i = 0; i < 3; i++) {
  2056. virtAddr[i] = NULL;
  2057. fd[i] = -1;
  2058. }
  2059. for (i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
  2060. m_avaliblePreviewBufHandle[i] = false;
  2061. if (m_previewWindow->dequeue_buffer(m_previewWindow, &m_previewBufHandle[i], &m_previewStride[i]) != 0) {
  2062. ALOGE("ERR(%s):Could not dequeue gralloc buffer[%d]!!", __func__, i);
  2063. continue;
  2064. } else {
  2065. if (m_previewWindow->lock_buffer(m_previewWindow, m_previewBufHandle[i]) != 0)
  2066. ALOGE("ERR(%s):Could not lock gralloc buffer[%d]!!", __func__, i);
  2067. }
  2068. if (m_flagGrallocLocked[i] == false) {
  2069. if (m_grallocHal->lock(m_grallocHal,
  2070. *m_previewBufHandle[i],
  2071. GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR,
  2072. 0, 0, previewW, previewH, virtAddr) != 0) {
  2073. ALOGE("ERR(%s):could not obtain gralloc buffer", __func__);
  2074. if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[i]) != 0)
  2075. ALOGE("ERR(%s):Could not cancel_buffer gralloc buffer[%d]!!", __func__, i);
  2076. continue;
  2077. }
  2078. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*m_previewBufHandle[i]);
  2079. fd[0] = priv_handle->fd;
  2080. fd[1] = priv_handle->u_fd;
  2081. fd[2] = priv_handle->v_fd;
  2082. m_grallocVirtAddr[i] = virtAddr[0];
  2083. m_matchedGrallocIndex[i] = i;
  2084. m_flagGrallocLocked[i] = true;
  2085. }
  2086. m_getAlignedYUVSize(previewFormat, previewW, previewH, &previewBuf);
  2087. previewBuf.reserved.p = i;
  2088. previewBuf.virt.extP[0] = (char *)virtAddr[0];
  2089. previewBuf.virt.extP[1] = (char *)virtAddr[1];
  2090. previewBuf.virt.extP[2] = (char *)virtAddr[2];
  2091. previewBuf.fd.extFd[0] = fd[0];
  2092. previewBuf.fd.extFd[1] = fd[1];
  2093. previewBuf.fd.extFd[2] = fd[2];
  2094. m_secCamera->setPreviewBuf(&previewBuf);
  2095. if (m_previewHeap[i]) {
  2096. m_previewHeap[i]->release(m_previewHeap[i]);
  2097. m_previewHeap[i] = 0;
  2098. }
  2099. m_previewHeap[i] = m_getMemoryCb(-1, previewFramesize, 1, 0);
  2100. if (!m_previewHeap[i]) {
  2101. ALOGE("ERR(%s):m_getMemoryCb(m_previewHeap[%d], size(%d) fail", __func__, i, previewFramesize);
  2102. continue;
  2103. }
  2104. m_avaliblePreviewBufHandle[i] = true;
  2105. }
  2106. if (m_secCamera->startPreview() == false) {
  2107. ALOGE("ERR(%s):Fail on m_secCamera->startPreview()", __func__);
  2108. return false;
  2109. }
  2110. for (i = NUM_OF_PREVIEW_BUF - m_minUndequeuedBufs; i < NUM_OF_PREVIEW_BUF; i++) {
  2111. if (m_secCamera->getPreviewBuf(&previewBuf) == false) {
  2112. ALOGE("ERR(%s):getPreviewBuf() fail", __func__);
  2113. return false;
  2114. }
  2115. if (m_grallocHal && m_flagGrallocLocked[previewBuf.reserved.p] == true) {
  2116. m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[previewBuf.reserved.p]);
  2117. m_flagGrallocLocked[previewBuf.reserved.p] = false;
  2118. }
  2119. if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[previewBuf.reserved.p]) != 0)
  2120. ALOGE("ERR(%s):Could not cancel_buffer gralloc buffer[%d]!!", __func__, previewBuf.reserved.p);
  2121. m_avaliblePreviewBufHandle[previewBuf.reserved.p] = false;
  2122. }
  2123. m_setSkipFrame(INITIAL_SKIP_FRAME);
  2124. if (m_pictureRunning == false
  2125. && m_startPictureInternal() == false)
  2126. ALOGE("ERR(%s):m_startPictureInternal() fail", __func__);
  2127. return true;
  2128. }
  2129. void ExynosCameraHWInterface::m_stopPreviewInternal(void)
  2130. {
  2131. ALOGV("DEBUG(%s):", __func__);
  2132. /* request that the preview thread stop. */
  2133. if (m_previewRunning == true) {
  2134. m_previewRunning = false;
  2135. if (m_previewStartDeferred == false) {
  2136. m_previewCondition.signal();
  2137. /* wait until preview thread is stopped */
  2138. m_previewStoppedCondition.wait(m_previewLock);
  2139. for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
  2140. if (m_previewBufHandle[i] != NULL) {
  2141. if (m_grallocHal && m_flagGrallocLocked[i] == true) {
  2142. m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[i]);
  2143. m_flagGrallocLocked[i] = false;
  2144. }
  2145. if (m_avaliblePreviewBufHandle[i] == true) {
  2146. if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[i]) != 0) {
  2147. ALOGE("ERR(%s):Fail to cancel buffer(%d)", __func__, i);
  2148. } else {
  2149. m_previewBufHandle[i] = NULL;
  2150. m_previewStride[i] = NULL;
  2151. }
  2152. m_avaliblePreviewBufHandle[i] = false;
  2153. }
  2154. }
  2155. }
  2156. } else {
  2157. ALOGV("DEBUG(%s):preview running but deferred, doing nothing", __func__);
  2158. }
  2159. } else {
  2160. ALOGV("DEBUG(%s):preview not running, doing nothing", __func__);
  2161. }
  2162. }
  2163. bool ExynosCameraHWInterface::m_previewThreadFuncWrapper(void)
  2164. {
  2165. ALOGV("DEBUG(%s):starting", __func__);
  2166. while (1) {
  2167. m_previewLock.lock();
  2168. while (m_previewRunning == false) {
  2169. if ( m_secCamera->flagStartPreview() == true
  2170. && m_secCamera->stopPreview() == false)
  2171. ALOGE("ERR(%s):Fail on m_secCamera->stopPreview()", __func__);
  2172. ALOGV("DEBUG(%s):calling m_secCamera->stopPreview() and waiting", __func__);
  2173. m_previewStoppedCondition.signal();
  2174. m_previewCondition.wait(m_previewLock);
  2175. ALOGV("DEBUG(%s):return from wait", __func__);
  2176. }
  2177. m_previewLock.unlock();
  2178. if (m_exitPreviewThread == true) {
  2179. if ( m_secCamera->flagStartPreview() == true
  2180. && m_secCamera->stopPreview() == false)
  2181. ALOGE("ERR(%s):Fail on m_secCamera->stopPreview()", __func__);
  2182. return true;
  2183. }
  2184. m_previewThreadFunc();
  2185. }
  2186. }
  2187. bool ExynosCameraHWInterface::m_previewThreadFunc(void)
  2188. {
  2189. ExynosBuffer previewBuf, callbackBuf;
  2190. int stride;
  2191. int previewW, previewH;
  2192. bool doPutPreviewBuf = true;
  2193. if (m_secCamera->getPreviewBuf(&previewBuf) == false) {
  2194. ALOGE("ERR(%s):getPreviewBuf() fail", __func__);
  2195. return false;
  2196. }
  2197. #ifndef USE_3DNR_DMAOUT
  2198. if (m_videoStart == true) {
  2199. copy_previewBuf = previewBuf;
  2200. m_videoRunning = true;
  2201. m_videoCondition.signal();
  2202. }
  2203. #endif
  2204. m_skipFrameLock.lock();
  2205. if (0 < m_skipFrame) {
  2206. m_skipFrame--;
  2207. m_skipFrameLock.unlock();
  2208. ALOGV("DEBUG(%s):skipping %d frame", __func__, previewBuf.reserved.p);
  2209. if ( doPutPreviewBuf == true
  2210. && m_secCamera->putPreviewBuf(&previewBuf) == false) {
  2211. ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p);
  2212. return false;
  2213. }
  2214. return true;
  2215. }
  2216. m_skipFrameLock.unlock();
  2217. callbackBuf = previewBuf;
  2218. m_secCamera->getPreviewSize(&previewW, &previewH);
  2219. if (m_previewWindow && m_grallocHal && m_previewRunning == true) {
  2220. bool findGrallocBuf = false;
  2221. buffer_handle_t *bufHandle = NULL;
  2222. void *virtAddr[3];
  2223. int fd[3];
  2224. /* Unlock grallocHal buffer if locked */
  2225. if (m_flagGrallocLocked[previewBuf.reserved.p] == true) {
  2226. m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[previewBuf.reserved.p]);
  2227. m_flagGrallocLocked[previewBuf.reserved.p] = false;
  2228. } else {
  2229. if (m_previewWindow->lock_buffer(m_previewWindow, bufHandle) != 0)
  2230. ALOGE("ERR(%s):Could not lock gralloc buffer!!", __func__);
  2231. }
  2232. /* Enqueue lastest buffer */
  2233. if (m_avaliblePreviewBufHandle[previewBuf.reserved.p] == true) {
  2234. if (m_previewWindow->enqueue_buffer(m_previewWindow,
  2235. m_previewBufHandle[previewBuf.reserved.p]) != 0) {
  2236. ALOGE("ERR(%s):Could not enqueue gralloc buffer[%d]!!", __func__, previewBuf.reserved.p);
  2237. goto callbacks;
  2238. }
  2239. m_avaliblePreviewBufHandle[previewBuf.reserved.p] = false;
  2240. }
  2241. /* Dequeue buffer from Gralloc */
  2242. if (m_previewWindow->dequeue_buffer(m_previewWindow,
  2243. &bufHandle,
  2244. &stride) != 0) {
  2245. ALOGE("ERR(%s):Could not dequeue gralloc buffer!!", __func__);
  2246. goto callbacks;
  2247. }
  2248. /* Get virtual address from dequeued buf */
  2249. if (m_grallocHal->lock(m_grallocHal,
  2250. *bufHandle,
  2251. GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR,
  2252. 0, 0, previewW, previewH, virtAddr) != 0) {
  2253. ALOGE("ERR(%s):could not obtain gralloc buffer", __func__);
  2254. goto callbacks;
  2255. }
  2256. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*bufHandle);
  2257. fd[0] = priv_handle->fd;
  2258. fd[1] = priv_handle->u_fd;
  2259. fd[2] = priv_handle->v_fd;
  2260. for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
  2261. if ((unsigned int)m_grallocVirtAddr[i] == (unsigned int)virtAddr[0]) {
  2262. findGrallocBuf = true;
  2263. m_previewBufHandle[i] = bufHandle;
  2264. m_previewStride[i] = stride;
  2265. previewBuf.reserved.p = i;
  2266. previewBuf.virt.extP[0] = (char *)virtAddr[0];
  2267. previewBuf.virt.extP[1] = (char *)virtAddr[1];
  2268. previewBuf.virt.extP[2] = (char *)virtAddr[2];
  2269. previewBuf.fd.extFd[0] = fd[0];
  2270. previewBuf.fd.extFd[1] = fd[1];
  2271. previewBuf.fd.extFd[2] = fd[2];
  2272. m_secCamera->setPreviewBuf(&previewBuf);
  2273. m_matchedGrallocIndex[previewBuf.reserved.p] = i;
  2274. m_avaliblePreviewBufHandle[i] = true;
  2275. break;
  2276. }
  2277. }
  2278. if (findGrallocBuf == false) {
  2279. ALOGE("%s:addr(%x) is not matched any gralloc buffer's addr", __func__, virtAddr[0]);
  2280. goto callbacks;
  2281. }
  2282. if ( doPutPreviewBuf == true
  2283. && m_secCamera->putPreviewBuf(&previewBuf) == false)
  2284. ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p);
  2285. else
  2286. doPutPreviewBuf = false;
  2287. }
  2288. callbacks:
  2289. if ( m_previewRunning == true
  2290. && m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
  2291. // resize from previewBuf(max size) to m_previewHeap(user's set size)
  2292. if (m_exynosPreviewCSC) {
  2293. int previewFormat = m_secCamera->getPreviewFormat();
  2294. csc_set_src_format(m_exynosPreviewCSC,
  2295. previewW, previewH - 8,
  2296. 0, 0, previewW, previewH - 8,
  2297. V4L2_PIX_2_HAL_PIXEL_FORMAT(previewFormat),
  2298. 0);
  2299. csc_set_dst_format(m_exynosPreviewCSC,
  2300. m_orgPreviewRect.w, m_orgPreviewRect.h,
  2301. 0, 0, m_orgPreviewRect.w, m_orgPreviewRect.h,
  2302. V4L2_PIX_2_HAL_PIXEL_FORMAT(m_orgPreviewRect.colorFormat),
  2303. 1);
  2304. csc_set_src_buffer(m_exynosPreviewCSC,
  2305. (unsigned char *)callbackBuf.virt.extP[0],
  2306. (unsigned char *)callbackBuf.virt.extP[1],
  2307. (unsigned char *)callbackBuf.virt.extP[2],
  2308. 0);
  2309. ExynosBuffer dstBuf;
  2310. m_getAlignedYUVSize(m_orgPreviewRect.colorFormat, m_orgPreviewRect.w, m_orgPreviewRect.h, &dstBuf);
  2311. dstBuf.virt.extP[0] = (char *)m_previewHeap[callbackBuf.reserved.p]->data;
  2312. for (int i = 1; i < 3; i++) {
  2313. if (dstBuf.size.extS[i] != 0)
  2314. dstBuf.virt.extP[i] = dstBuf.virt.extP[i-1] + dstBuf.size.extS[i-1];
  2315. }
  2316. csc_set_dst_buffer(m_exynosPreviewCSC,
  2317. (unsigned char *)dstBuf.virt.extP[0],
  2318. (unsigned char *)dstBuf.virt.extP[1],
  2319. (unsigned char *)dstBuf.virt.extP[2],
  2320. 0);
  2321. if (csc_convert(m_exynosPreviewCSC) != 0)
  2322. ALOGE("ERR(%s):csc_convert() fail", __func__);
  2323. } else {
  2324. ALOGE("ERR(%s):m_exynosPreviewCSC == NULL", __func__);
  2325. }
  2326. }
  2327. /* TODO: We need better error handling scheme than this scheme */
  2328. if ( doPutPreviewBuf == true
  2329. && m_secCamera->putPreviewBuf(&previewBuf) == false)
  2330. ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p);
  2331. else
  2332. doPutPreviewBuf = false;
  2333. if ( m_previewRunning == true
  2334. && m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
  2335. m_dataCb(CAMERA_MSG_PREVIEW_FRAME, m_previewHeap[callbackBuf.reserved.p], 0, NULL, m_callbackCookie);
  2336. }
  2337. /* Face detection */
  2338. if ( m_previewRunning == true
  2339. && m_msgEnabled & CAMERA_MSG_PREVIEW_METADATA
  2340. && m_secCamera->flagStartFaceDetection() == true) {
  2341. camera_frame_metadata_t *ptrMetadata = NULL;
  2342. int id[NUM_OF_DETECTED_FACES];
  2343. int score[NUM_OF_DETECTED_FACES];
  2344. ExynosRect2 detectedFace[NUM_OF_DETECTED_FACES];
  2345. ExynosRect2 detectedLeftEye[NUM_OF_DETECTED_FACES];
  2346. ExynosRect2 detectedRightEye[NUM_OF_DETECTED_FACES];
  2347. ExynosRect2 detectedMouth[NUM_OF_DETECTED_FACES];
  2348. int numOfDetectedFaces = m_secCamera->getDetectedFacesAreas(NUM_OF_DETECTED_FACES,
  2349. id,
  2350. score,
  2351. detectedFace,
  2352. detectedLeftEye,
  2353. detectedRightEye,
  2354. detectedMouth);
  2355. if (0 < numOfDetectedFaces) {
  2356. // camera.h
  2357. // width : -1000~1000
  2358. // height : -1000~1000
  2359. // if eye, mouth is not detectable : -2000, -2000.
  2360. int realNumOfDetectedFaces = 0;
  2361. m_faceDetected = true;
  2362. for (int i = 0; i < numOfDetectedFaces; i++) {
  2363. // over 50s, we will catch
  2364. //if (score[i] < 50)
  2365. // continue;
  2366. m_faces[realNumOfDetectedFaces].rect[0] = m_calibratePosition(previewW, 2000, detectedFace[i].x1) - 1000;
  2367. m_faces[realNumOfDetectedFaces].rect[1] = m_calibratePosition(previewH, 2000, detectedFace[i].y1) - 1000;
  2368. m_faces[realNumOfDetectedFaces].rect[2] = m_calibratePosition(previewW, 2000, detectedFace[i].x2) - 1000;
  2369. m_faces[realNumOfDetectedFaces].rect[3] = m_calibratePosition(previewH, 2000, detectedFace[i].y2) - 1000;
  2370. m_faces[realNumOfDetectedFaces].id = id[i];
  2371. m_faces[realNumOfDetectedFaces].score = score[i];
  2372. m_faces[realNumOfDetectedFaces].left_eye[0] = (detectedLeftEye[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedLeftEye[i].x1) - 1000;
  2373. m_faces[realNumOfDetectedFaces].left_eye[1] = (detectedLeftEye[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedLeftEye[i].y1) - 1000;
  2374. m_faces[realNumOfDetectedFaces].right_eye[0] = (detectedRightEye[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedRightEye[i].x1) - 1000;
  2375. m_faces[realNumOfDetectedFaces].right_eye[1] = (detectedRightEye[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedRightEye[i].y1) - 1000;
  2376. m_faces[realNumOfDetectedFaces].mouth[0] = (detectedMouth[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedMouth[i].x1) - 1000;
  2377. m_faces[realNumOfDetectedFaces].mouth[1] = (detectedMouth[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedMouth[i].y1) - 1000;
  2378. realNumOfDetectedFaces++;
  2379. }
  2380. m_frameMetadata.number_of_faces = realNumOfDetectedFaces;
  2381. m_frameMetadata.faces = m_faces;
  2382. ptrMetadata = &m_frameMetadata;
  2383. m_dataCb(CAMERA_MSG_PREVIEW_METADATA, m_previewHeap[callbackBuf.reserved.p], 0, ptrMetadata, m_callbackCookie);
  2384. } else if (numOfDetectedFaces == 0 && m_faceDetected == true) {
  2385. m_frameMetadata.number_of_faces = 0;
  2386. m_frameMetadata.faces = m_faces;
  2387. ptrMetadata = &m_frameMetadata;
  2388. m_dataCb(CAMERA_MSG_PREVIEW_METADATA, m_previewHeap[callbackBuf.reserved.p], 0, ptrMetadata, m_callbackCookie);
  2389. m_faceDetected = false;
  2390. }
  2391. }
  2392. // zero shutter lag
  2393. if (m_pictureRunning == false
  2394. && m_startPictureInternal() == false)
  2395. ALOGE("ERR(%s):m_startPictureInternal() fail", __func__);
  2396. m_stateLock.lock();
  2397. if (m_captureInProgress == true) {
  2398. m_stateLock.unlock();
  2399. } else {
  2400. m_stateLock.unlock();
  2401. if (m_numOfAvaliblePictureBuf < NUM_OF_PICTURE_BUF) {
  2402. ExynosBufferQueue *cur = m_oldPictureBufQueueHead;
  2403. do {
  2404. if(cur->next == NULL) {
  2405. cur->buf = m_pictureBuf;
  2406. break;
  2407. }
  2408. cur = cur->next;
  2409. } while (cur->next);
  2410. if (m_secCamera->getPictureBuf(&m_pictureBuf) == false)
  2411. ALOGE("ERR(%s):getPictureBuf() fail", __func__);
  2412. else
  2413. m_numOfAvaliblePictureBuf++;
  2414. }
  2415. if (NUM_OF_WAITING_PUT_PICTURE_BUF < m_numOfAvaliblePictureBuf) {
  2416. ExynosBuffer nullBuf;
  2417. ExynosBuffer oldBuf;
  2418. oldBuf = m_oldPictureBufQueueHead->buf;
  2419. m_oldPictureBufQueueHead->buf = nullBuf;
  2420. if (m_oldPictureBufQueueHead->next) {
  2421. ExynosBufferQueue *newQueueHead = m_oldPictureBufQueueHead->next;
  2422. m_oldPictureBufQueueHead->next = NULL;
  2423. m_oldPictureBufQueueHead = newQueueHead;
  2424. } else {
  2425. m_oldPictureBufQueueHead = &m_oldPictureBufQueue[0];
  2426. }
  2427. if (oldBuf != nullBuf) {
  2428. if (m_secCamera->putPictureBuf(&oldBuf) == false)
  2429. ALOGE("ERR(%s):putPictureBuf(%d) fail", __func__, oldBuf.reserved.p);
  2430. else {
  2431. m_numOfAvaliblePictureBuf--;
  2432. if (m_numOfAvaliblePictureBuf < 0)
  2433. m_numOfAvaliblePictureBuf = 0;
  2434. }
  2435. }
  2436. }
  2437. }
  2438. return true;
  2439. }
  2440. bool ExynosCameraHWInterface::m_videoThreadFuncWrapper(void)
  2441. {
  2442. while (1) {
  2443. while (m_videoRunning == false) {
  2444. m_videoLock.lock();
  2445. #ifdef USE_3DNR_DMAOUT
  2446. if ( m_secCamera->flagStartVideo() == true
  2447. && m_secCamera->stopVideo() == false)
  2448. ALOGE("ERR(%s):Fail on m_secCamera->stopVideo()", __func__);
  2449. #endif
  2450. ALOGV("DEBUG(%s):calling mExynosCamera->stopVideo() and waiting", __func__);
  2451. m_videoStoppedCondition.signal();
  2452. m_videoCondition.wait(m_videoLock);
  2453. ALOGV("DEBUG(%s):return from wait", __func__);
  2454. m_videoLock.unlock();
  2455. }
  2456. if (m_exitVideoThread == true) {
  2457. m_videoLock.lock();
  2458. #ifdef USE_3DNR_DMAOUT
  2459. if ( m_secCamera->flagStartVideo() == true
  2460. && m_secCamera->stopVideo() == false)
  2461. ALOGE("ERR(%s):Fail on m_secCamera->stopVideo()", __func__);
  2462. #endif
  2463. m_videoLock.unlock();
  2464. return true;
  2465. }
  2466. m_videoThreadFunc();
  2467. #ifndef USE_3DNR_DMAOUT
  2468. m_videoRunning = false;
  2469. #endif
  2470. }
  2471. return true;
  2472. }
  2473. bool ExynosCameraHWInterface::m_videoThreadFunc(void)
  2474. {
  2475. nsecs_t timestamp;
  2476. #ifdef USE_3DNR_DMAOUT
  2477. ExynosBuffer videoBuf;
  2478. #endif
  2479. if (m_numOfAvailableVideoBuf == 0)
  2480. usleep(1000); // sleep 1msec for other threads.
  2481. {
  2482. if ( m_msgEnabled & CAMERA_MSG_VIDEO_FRAME
  2483. && m_videoRunning == true) {
  2484. Mutex::Autolock lock(m_videoLock);
  2485. if (m_numOfAvailableVideoBuf == 0) {
  2486. ALOGV("DEBUG(%s):waiting releaseRecordingFrame()", __func__);
  2487. return true;
  2488. }
  2489. #ifdef USE_3DNR_DMAOUT
  2490. if (m_secCamera->getVideoBuf(&videoBuf) == false) {
  2491. ALOGE("ERR(%s):Fail on ExynosCamera->getVideoBuf()", __func__);
  2492. return false;
  2493. }
  2494. #endif
  2495. m_numOfAvailableVideoBuf--;
  2496. if (m_numOfAvailableVideoBuf < 0)
  2497. m_numOfAvailableVideoBuf = 0;
  2498. timestamp = systemTime(SYSTEM_TIME_MONOTONIC);
  2499. // Notify the client of a new frame.
  2500. if ( m_msgEnabled & CAMERA_MSG_VIDEO_FRAME
  2501. && m_videoRunning == true) {
  2502. // resize from videoBuf(max size) to m_videoHeap(user's set size)
  2503. if (m_exynosVideoCSC) {
  2504. int videoW, videoH, videoFormat = 0;
  2505. int cropX, cropY, cropW, cropH = 0;
  2506. #ifndef USE_3DNR_DMAOUT
  2507. int previewW, previewH, previewFormat = 0;
  2508. previewFormat = m_secCamera->getPreviewFormat();
  2509. m_secCamera->getPreviewSize(&previewW, &previewH);
  2510. #endif
  2511. videoFormat = m_secCamera->getVideoFormat();
  2512. m_secCamera->getVideoSize(&videoW, &videoH);
  2513. m_getRatioSize(videoW, videoH,
  2514. m_orgVideoRect.w, m_orgVideoRect.h,
  2515. &cropX, &cropY,
  2516. &cropW, &cropH,
  2517. m_secCamera->getZoom());
  2518. ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
  2519. __func__, cropX, cropY, cropW, cropH);
  2520. #ifdef USE_3DNR_DMAOUT
  2521. csc_set_src_format(m_exynosVideoCSC,
  2522. videoW, videoH,
  2523. cropX, cropY, cropW, cropH,
  2524. V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat),
  2525. 0);
  2526. #else
  2527. csc_set_src_format(m_exynosVideoCSC,
  2528. previewW, previewH - 8,
  2529. 0, 0, previewW, previewH - 8,
  2530. V4L2_PIX_2_HAL_PIXEL_FORMAT(previewFormat),
  2531. 0);
  2532. #endif
  2533. csc_set_dst_format(m_exynosVideoCSC,
  2534. m_orgVideoRect.w, m_orgVideoRect.h,
  2535. 0, 0, m_orgVideoRect.w, m_orgVideoRect.h,
  2536. V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat),
  2537. 1);
  2538. #ifdef USE_3DNR_DMAOUT
  2539. csc_set_src_buffer(m_exynosVideoCSC,
  2540. (unsigned char *)videoBuf.virt.extP[0],
  2541. (unsigned char *)videoBuf.virt.extP[1],
  2542. (unsigned char *)videoBuf.virt.extP[2],
  2543. 0);
  2544. #else
  2545. csc_set_src_buffer(m_exynosVideoCSC,
  2546. (unsigned char *)copy_previewBuf.virt.extP[0],
  2547. (unsigned char *)copy_previewBuf.virt.extP[2],
  2548. (unsigned char *)copy_previewBuf.virt.extP[1],
  2549. 0);
  2550. #endif
  2551. ExynosBuffer dstBuf;
  2552. m_getAlignedYUVSize(videoFormat, m_orgVideoRect.w, m_orgVideoRect.h, &dstBuf);
  2553. #ifdef USE_3DNR_DMAOUT
  2554. dstBuf.virt.extP[0] = (char *)m_resizedVideoHeap[videoBuf.reserved.p]->data;
  2555. #else
  2556. dstBuf.virt.extP[0] = (char *)m_resizedVideoHeap[m_cntVideoBuf]->data;
  2557. #endif
  2558. for (int i = 1; i < 3; i++) {
  2559. if (dstBuf.size.extS[i] != 0)
  2560. dstBuf.virt.extP[i] = dstBuf.virt.extP[i-1] + dstBuf.size.extS[i-1];
  2561. }
  2562. csc_set_dst_buffer(m_exynosVideoCSC,
  2563. (unsigned char *)dstBuf.virt.extP[0],
  2564. (unsigned char *)dstBuf.virt.extP[1],
  2565. (unsigned char *)dstBuf.virt.extP[2],
  2566. 0);
  2567. if (csc_convert(m_exynosVideoCSC) != 0)
  2568. ALOGE("ERR(%s):csc_convert() fail", __func__);
  2569. } else {
  2570. ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __func__);
  2571. }
  2572. #ifdef USE_3DNR_DMAOUT
  2573. m_dataCbTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME,
  2574. m_resizedVideoHeap[videoBuf.reserved.p], 0, m_callbackCookie);
  2575. #else
  2576. m_dataCbTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME,
  2577. m_resizedVideoHeap[m_cntVideoBuf], 0, m_callbackCookie);
  2578. m_cntVideoBuf++;
  2579. if (m_cntVideoBuf == NUM_OF_VIDEO_BUF)
  2580. m_cntVideoBuf = 0;
  2581. #endif
  2582. }
  2583. // HACK : This must can handle on releaseRecordingFrame()
  2584. #ifdef USE_3DNR_DMAOUT
  2585. m_secCamera->putVideoBuf(&videoBuf);
  2586. #endif
  2587. m_numOfAvailableVideoBuf++;
  2588. if (NUM_OF_VIDEO_BUF <= m_numOfAvailableVideoBuf)
  2589. m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF;
  2590. // until here
  2591. } else
  2592. usleep(1000); // sleep 1msec for stopRecording
  2593. }
  2594. return true;
  2595. }
  2596. bool ExynosCameraHWInterface::m_autoFocusThreadFunc(void)
  2597. {
  2598. int count =0;
  2599. bool afResult = false;
  2600. ALOGV("DEBUG(%s):starting", __func__);
  2601. /* block until we're told to start. we don't want to use
  2602. * a restartable thread and requestExitAndWait() in cancelAutoFocus()
  2603. * because it would cause deadlock between our callbacks and the
  2604. * caller of cancelAutoFocus() which both want to grab the same lock
  2605. * in CameraServices layer.
  2606. */
  2607. m_focusLock.lock();
  2608. /* check early exit request */
  2609. if (m_exitAutoFocusThread == true) {
  2610. m_focusLock.unlock();
  2611. ALOGV("DEBUG(%s):exiting on request0", __func__);
  2612. return true;
  2613. }
  2614. m_focusCondition.wait(m_focusLock);
  2615. /* check early exit request */
  2616. if (m_exitAutoFocusThread == true) {
  2617. m_focusLock.unlock();
  2618. ALOGV("DEBUG(%s):exiting on request1", __func__);
  2619. return true;
  2620. }
  2621. m_focusLock.unlock();
  2622. if (m_secCamera->autoFocus() == false) {
  2623. ALOGE("ERR(%s):Fail on m_secCamera->autoFocus()", __func__);
  2624. return false;
  2625. }
  2626. switch (m_secCamera->getFucusModeResult()) {
  2627. case 0:
  2628. ALOGV("DEBUG(%s):AF Cancelled !!", __func__);
  2629. afResult = true;
  2630. break;
  2631. case 1:
  2632. ALOGV("DEBUG(%s):AF Success!!", __func__);
  2633. afResult = true;
  2634. break;
  2635. default:
  2636. ALOGV("DEBUG(%s):AF Fail !!", __func__);
  2637. afResult = false;
  2638. break;
  2639. }
  2640. // CAMERA_MSG_FOCUS only takes a bool. true for
  2641. // finished and false for failure. cancel is still
  2642. // considered a true result.
  2643. if (m_msgEnabled & CAMERA_MSG_FOCUS)
  2644. m_notifyCb(CAMERA_MSG_FOCUS, afResult, 0, m_callbackCookie);
  2645. ALOGV("DEBUG(%s):exiting with no error", __func__);
  2646. return true;
  2647. }
  2648. bool ExynosCameraHWInterface::m_startPictureInternal(void)
  2649. {
  2650. if (m_pictureRunning == true) {
  2651. ALOGE("ERR(%s):Aready m_pictureRunning is running", __func__);
  2652. return false;
  2653. }
  2654. int pictureW, pictureH, pictureFormat;
  2655. unsigned int pictureFrameSize, pictureChromaSize;
  2656. ExynosBuffer nullBuf;
  2657. int numPlanes;
  2658. m_secCamera->getPictureSize(&pictureW, &pictureH);
  2659. pictureFormat = m_secCamera->getPictureFormat();
  2660. PLANAR_FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16), pictureW, pictureH, &pictureFrameSize,
  2661. &pictureChromaSize);
  2662. numPlanes = NUM_PLANES(V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16));
  2663. #if 0
  2664. if (m_rawHeap) {
  2665. m_rawHeap->release(m_rawHeap);
  2666. m_rawHeap = 0;
  2667. }
  2668. m_rawHeap = m_getMemoryCb(-1, pictureFramesize, 1, NULL);
  2669. if (!m_rawHeap) {
  2670. ALOGE("ERR(%s):m_getMemoryCb(m_rawHeap, size(%d) fail", __func__, pictureFramesize);
  2671. return false;
  2672. }
  2673. pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
  2674. #endif
  2675. for (int i = 0; i < NUM_OF_PICTURE_BUF; i++) {
  2676. for (int j = 0; j < 3; j++)
  2677. if (m_pictureFds[i][j] >= 0) {
  2678. close(m_pictureFds[i][j]);
  2679. m_pictureFds[i][j] = -1;
  2680. }
  2681. m_pictureFds[i][0] = ion_alloc(m_ion_client, pictureFrameSize, 0, ION_HEAP_SYSTEM_MASK, 0);
  2682. if (m_pictureFds[i][0] < 0) {
  2683. ALOGE("ERR(%s):ion_alloc(m_pictureFds[%d], size(%d) fail", __func__, i, pictureFrameSize);
  2684. return false;
  2685. }
  2686. for (int j = 1; j < numPlanes; j++) {
  2687. m_pictureFds[i][j] = ion_alloc(m_ion_client, pictureChromaSize, 0, ION_HEAP_SYSTEM_MASK, 0);
  2688. if (m_pictureFds[i][j]) {
  2689. ALOGE("ERR(%s):ion_alloc(m_pictureFds[%d][%d], size(%d) fail", __func__, i, j, pictureFrameSize);
  2690. return false;
  2691. }
  2692. }
  2693. m_getAlignedYUVSize(pictureFormat, pictureW, pictureH, &m_pictureBuf);
  2694. m_pictureBuf.fd.extFd[0] = m_pictureFds[i][0];
  2695. for (int j = 1; j < 3; j++) {
  2696. if (m_pictureBuf.size.extS[j] != 0)
  2697. m_pictureBuf.fd.extFd[j] = m_pictureFds[i][j];
  2698. else
  2699. m_pictureBuf.fd.extFd[j] = -1;
  2700. }
  2701. m_pictureBuf.reserved.p = i;
  2702. m_secCamera->setPictureBuf(&m_pictureBuf);
  2703. }
  2704. // zero shutter lag
  2705. if (m_secCamera->startPicture() == false) {
  2706. ALOGE("ERR(%s):Fail on m_secCamera->startPicture()", __func__);
  2707. return false;
  2708. }
  2709. m_numOfAvaliblePictureBuf = 0;
  2710. m_pictureBuf = nullBuf;
  2711. for (int i = 0; i < NUM_OF_PICTURE_BUF; i++) {
  2712. m_oldPictureBufQueue[i].buf = nullBuf;
  2713. m_oldPictureBufQueue[i].next = NULL;
  2714. }
  2715. m_oldPictureBufQueueHead = &m_oldPictureBufQueue[0];
  2716. m_pictureRunning = true;
  2717. return true;
  2718. }
  2719. bool ExynosCameraHWInterface::m_stopPictureInternal(void)
  2720. {
  2721. if (m_pictureRunning == false) {
  2722. ALOGE("ERR(%s):Aready m_pictureRunning is stop", __func__);
  2723. return false;
  2724. }
  2725. if (m_secCamera->flagStartPicture() == true
  2726. && m_secCamera->stopPicture() == false)
  2727. ALOGE("ERR(%s):Fail on m_secCamera->stopPicture()", __func__);
  2728. for (int i = 0; i < NUM_OF_PICTURE_BUF; i++) {
  2729. if (m_pictureHeap[i]) {
  2730. m_pictureHeap[i]->release(m_pictureHeap[i]);
  2731. m_pictureHeap[i] = 0;
  2732. }
  2733. }
  2734. if (m_rawHeap) {
  2735. m_rawHeap->release(m_rawHeap);
  2736. m_rawHeap = 0;
  2737. }
  2738. m_pictureRunning = false;
  2739. return true;
  2740. }
  2741. bool ExynosCameraHWInterface::m_pictureThreadFunc(void)
  2742. {
  2743. bool ret = false;
  2744. int pictureW, pictureH, pictureFramesize = 0;
  2745. int pictureFormat;
  2746. int cropX, cropY, cropW, cropH = 0;
  2747. ExynosBuffer pictureBuf;
  2748. ExynosBuffer jpegBuf;
  2749. camera_memory_t *JpegHeap = NULL;
  2750. camera_memory_t *JpegHeapOut = NULL;
  2751. m_secCamera->getPictureSize(&pictureW, &pictureH);
  2752. pictureFormat = m_secCamera->getPictureFormat();
  2753. pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
  2754. JpegHeap = m_getMemoryCb(-1, pictureFramesize, 1, 0);
  2755. if (!JpegHeap) {
  2756. ALOGE("ERR(%s):m_getMemoryCb(JpegHeap, size(%d) fail", __func__, pictureFramesize);
  2757. return false;
  2758. }
  2759. // resize from pictureBuf(max size) to rawHeap(user's set size)
  2760. if (m_exynosPictureCSC) {
  2761. m_getRatioSize(pictureW, pictureH,
  2762. m_orgPictureRect.w, m_orgPictureRect.h,
  2763. &cropX, &cropY,
  2764. &cropW, &cropH,
  2765. m_secCamera->getZoom());
  2766. ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
  2767. __func__, cropX, cropY, cropW, cropH);
  2768. csc_set_src_format(m_exynosPictureCSC,
  2769. pictureW, pictureH,
  2770. cropX, cropY, cropW, cropH,
  2771. V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
  2772. 1);
  2773. //0);
  2774. csc_set_dst_format(m_exynosPictureCSC,
  2775. m_orgPictureRect.w, m_orgPictureRect.h,
  2776. 0, 0, m_orgPictureRect.w, m_orgPictureRect.h,
  2777. V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
  2778. 1);
  2779. //0);
  2780. csc_set_src_buffer(m_exynosPictureCSC,
  2781. (unsigned char *)m_pictureBuf.virt.extP[0],
  2782. (unsigned char *)m_pictureBuf.virt.extP[1],
  2783. (unsigned char *)m_pictureBuf.virt.extP[2],
  2784. 0);
  2785. pictureBuf.size.extS[0] = ALIGN(m_orgPictureRect.w, 16) * ALIGN(m_orgPictureRect.h, 16) * 2;
  2786. pictureBuf.size.extS[1] = 0;
  2787. pictureBuf.size.extS[2] = 0;
  2788. pictureBuf.virt.extP[0] = (char *)m_rawHeap->data;
  2789. csc_set_dst_buffer(m_exynosPictureCSC,
  2790. (unsigned char *)pictureBuf.virt.extP[0],
  2791. (unsigned char *)pictureBuf.virt.extP[1],
  2792. (unsigned char *)pictureBuf.virt.extP[2],
  2793. 0);
  2794. if (csc_convert(m_exynosPictureCSC) != 0)
  2795. ALOGE("ERR(%s):csc_convert() fail", __func__);
  2796. } else {
  2797. ALOGE("ERR(%s):m_exynosPictureCSC == NULL", __func__);
  2798. }
  2799. if (m_msgEnabled & CAMERA_MSG_SHUTTER)
  2800. m_notifyCb(CAMERA_MSG_SHUTTER, 0, 0, m_callbackCookie);
  2801. m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &pictureBuf);
  2802. for (int i = 1; i < 3; i++) {
  2803. if (pictureBuf.size.extS[i] != 0)
  2804. pictureBuf.virt.extP[i] = pictureBuf.virt.extP[i-1] + pictureBuf.size.extS[i-1];
  2805. ALOGV("(%s): pictureBuf.size.extS[%d] = %d", __func__, i, pictureBuf.size.extS[i]);
  2806. }
  2807. if (m_msgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) {
  2808. jpegBuf.virt.p = (char *)JpegHeap->data;
  2809. jpegBuf.size.s = pictureFramesize;
  2810. ExynosRect jpegRect;
  2811. jpegRect.w = m_orgPictureRect.w;
  2812. jpegRect.h = m_orgPictureRect.h;
  2813. jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
  2814. if (m_secCamera->yuv2Jpeg(&pictureBuf, &jpegBuf, &jpegRect) == false) {
  2815. ALOGE("ERR(%s):yuv2Jpeg() fail", __func__);
  2816. m_stateLock.lock();
  2817. m_captureInProgress = false;
  2818. m_pictureLock.lock();
  2819. m_pictureCondition.signal();
  2820. m_pictureLock.unlock();
  2821. m_stateLock.unlock();
  2822. goto out;
  2823. }
  2824. }
  2825. m_stateLock.lock();
  2826. m_captureInProgress = false;
  2827. m_pictureLock.lock();
  2828. m_pictureCondition.signal();
  2829. m_pictureLock.unlock();
  2830. m_stateLock.unlock();
  2831. if (m_msgEnabled & CAMERA_MSG_RAW_IMAGE)
  2832. m_dataCb(CAMERA_MSG_RAW_IMAGE, m_rawHeap, 0, NULL, m_callbackCookie);
  2833. /* TODO: Currently framework dose not support CAMERA_MSG_RAW_IMAGE_NOTIFY callback */
  2834. /*
  2835. if (m_msgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)
  2836. m_dataCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, m_rawHeap, 0, NULL, m_callbackCookie);
  2837. */
  2838. if (m_msgEnabled & CAMERA_MSG_POSTVIEW_FRAME)
  2839. m_dataCb(CAMERA_MSG_POSTVIEW_FRAME, m_rawHeap, 0, NULL, m_callbackCookie);
  2840. if (m_msgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) {
  2841. JpegHeapOut = m_getMemoryCb(-1, jpegBuf.size.s, 1, 0);
  2842. if (!JpegHeapOut) {
  2843. ALOGE("ERR(%s):m_getMemoryCb(JpegHeapOut, size(%d) fail", __func__, jpegBuf.size.s);
  2844. return false;
  2845. }
  2846. // TODO : we shall pass JpegHeap mem directly?
  2847. memcpy(JpegHeapOut->data, JpegHeap->data, jpegBuf.size.s);
  2848. m_dataCb(CAMERA_MSG_COMPRESSED_IMAGE, JpegHeapOut, 0, NULL, m_callbackCookie);
  2849. }
  2850. if (m_videoStart == false)
  2851. stopPreview();
  2852. ALOGV("DEBUG(%s):m_pictureThread end", __func__);
  2853. ret = true;
  2854. out:
  2855. if (JpegHeapOut) {
  2856. JpegHeapOut->release(JpegHeapOut);
  2857. JpegHeapOut = 0;
  2858. }
  2859. if (JpegHeap) {
  2860. JpegHeap->release(JpegHeap);
  2861. JpegHeap = 0;
  2862. }
  2863. return ret;
  2864. }
  2865. #ifdef LOG_NDEBUG
  2866. bool ExynosCameraHWInterface::m_fileDump(char *filename, void *srcBuf, uint32_t size)
  2867. {
  2868. FILE *yuv_fd = NULL;
  2869. char *buffer = NULL;
  2870. static int count = 0;
  2871. yuv_fd = fopen(filename, "w+");
  2872. if (yuv_fd == NULL) {
  2873. ALOGE("ERR file open fail: %s", filename);
  2874. return 0;
  2875. }
  2876. buffer = (char *)malloc(size);
  2877. if (buffer == NULL) {
  2878. ALOGE("ERR malloc file");
  2879. fclose(yuv_fd);
  2880. return 0;
  2881. }
  2882. memcpy(buffer, srcBuf, size);
  2883. fflush(stdout);
  2884. fwrite(buffer, 1, size, yuv_fd);
  2885. fflush(yuv_fd);
  2886. if (yuv_fd)
  2887. fclose(yuv_fd);
  2888. if (buffer)
  2889. free(buffer);
  2890. ALOGV("filedump(%s) is successed!!", filename);
  2891. return true;
  2892. }
  2893. #endif
  2894. void ExynosCameraHWInterface::m_setSkipFrame(int frame)
  2895. {
  2896. Mutex::Autolock lock(m_skipFrameLock);
  2897. if (frame < m_skipFrame)
  2898. return;
  2899. m_skipFrame = frame;
  2900. }
  2901. int ExynosCameraHWInterface::m_saveJpeg( unsigned char *real_jpeg, int jpeg_size)
  2902. {
  2903. FILE *yuv_fp = NULL;
  2904. char filename[100], *buffer = NULL;
  2905. /* file create/open, note to "wb" */
  2906. yuv_fp = fopen("/data/camera_dump.jpeg", "wb");
  2907. if (yuv_fp == NULL) {
  2908. ALOGE("Save jpeg file open error");
  2909. return -1;
  2910. }
  2911. ALOGV("DEBUG(%s):[BestIQ] real_jpeg size ========> %d", __func__, jpeg_size);
  2912. buffer = (char *) malloc(jpeg_size);
  2913. if (buffer == NULL) {
  2914. ALOGE("Save YUV] buffer alloc failed");
  2915. if (yuv_fp)
  2916. fclose(yuv_fp);
  2917. return -1;
  2918. }
  2919. memcpy(buffer, real_jpeg, jpeg_size);
  2920. fflush(stdout);
  2921. fwrite(buffer, 1, jpeg_size, yuv_fp);
  2922. fflush(yuv_fp);
  2923. if (yuv_fp)
  2924. fclose(yuv_fp);
  2925. if (buffer)
  2926. free(buffer);
  2927. return 0;
  2928. }
  2929. void ExynosCameraHWInterface::m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
  2930. {
  2931. int nw;
  2932. int cnt = 0;
  2933. uint32_t written = 0;
  2934. ALOGD("opening file [%s]", fname);
  2935. int fd = open(fname, O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
  2936. if (fd < 0) {
  2937. ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
  2938. return;
  2939. }
  2940. ALOGD("writing %d bytes to file [%s]", size, fname);
  2941. while (written < size) {
  2942. nw = ::write(fd, buf + written, size - written);
  2943. if (nw < 0) {
  2944. ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
  2945. break;
  2946. }
  2947. written += nw;
  2948. cnt++;
  2949. }
  2950. ALOGD("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
  2951. ::close(fd);
  2952. }
  2953. bool ExynosCameraHWInterface::m_scaleDownYuv422(char *srcBuf, uint32_t srcWidth, uint32_t srcHeight,
  2954. char *dstBuf, uint32_t dstWidth, uint32_t dstHeight)
  2955. {
  2956. int32_t step_x, step_y;
  2957. int32_t iXsrc, iXdst;
  2958. int32_t x, y, src_y_start_pos, dst_pos, src_pos;
  2959. if (dstWidth % 2 != 0 || dstHeight % 2 != 0) {
  2960. ALOGE("scale_down_yuv422: invalid width, height for scaling");
  2961. return false;
  2962. }
  2963. step_x = srcWidth / dstWidth;
  2964. step_y = srcHeight / dstHeight;
  2965. dst_pos = 0;
  2966. for (uint32_t y = 0; y < dstHeight; y++) {
  2967. src_y_start_pos = (y * step_y * (srcWidth * 2));
  2968. for (uint32_t x = 0; x < dstWidth; x += 2) {
  2969. src_pos = src_y_start_pos + (x * (step_x * 2));
  2970. dstBuf[dst_pos++] = srcBuf[src_pos ];
  2971. dstBuf[dst_pos++] = srcBuf[src_pos + 1];
  2972. dstBuf[dst_pos++] = srcBuf[src_pos + 2];
  2973. dstBuf[dst_pos++] = srcBuf[src_pos + 3];
  2974. }
  2975. }
  2976. return true;
  2977. }
  2978. bool ExynosCameraHWInterface::m_YUY2toNV21(void *srcBuf, void *dstBuf, uint32_t srcWidth, uint32_t srcHeight)
  2979. {
  2980. int32_t x, y, src_y_start_pos, dst_cbcr_pos, dst_pos, src_pos;
  2981. unsigned char *srcBufPointer = (unsigned char *)srcBuf;
  2982. unsigned char *dstBufPointer = (unsigned char *)dstBuf;
  2983. dst_pos = 0;
  2984. dst_cbcr_pos = srcWidth*srcHeight;
  2985. for (uint32_t y = 0; y < srcHeight; y++) {
  2986. src_y_start_pos = (y * (srcWidth * 2));
  2987. for (uint32_t x = 0; x < (srcWidth * 2); x += 2) {
  2988. src_pos = src_y_start_pos + x;
  2989. dstBufPointer[dst_pos++] = srcBufPointer[src_pos];
  2990. }
  2991. }
  2992. for (uint32_t y = 0; y < srcHeight; y += 2) {
  2993. src_y_start_pos = (y * (srcWidth * 2));
  2994. for (uint32_t x = 0; x < (srcWidth * 2); x += 4) {
  2995. src_pos = src_y_start_pos + x;
  2996. dstBufPointer[dst_cbcr_pos++] = srcBufPointer[src_pos + 3];
  2997. dstBufPointer[dst_cbcr_pos++] = srcBufPointer[src_pos + 1];
  2998. }
  2999. }
  3000. return true;
  3001. }
  3002. bool ExynosCameraHWInterface::m_checkVideoStartMarker(unsigned char *pBuf)
  3003. {
  3004. if (!pBuf) {
  3005. ALOGE("m_checkVideoStartMarker() => pBuf is NULL");
  3006. return false;
  3007. }
  3008. if (HIBYTE(VIDEO_COMMENT_MARKER_H) == * pBuf && LOBYTE(VIDEO_COMMENT_MARKER_H) == *(pBuf + 1) &&
  3009. HIBYTE(VIDEO_COMMENT_MARKER_L) == *(pBuf + 2) && LOBYTE(VIDEO_COMMENT_MARKER_L) == *(pBuf + 3))
  3010. return true;
  3011. return false;
  3012. }
  3013. bool ExynosCameraHWInterface::m_checkEOIMarker(unsigned char *pBuf)
  3014. {
  3015. if (!pBuf) {
  3016. ALOGE("m_checkEOIMarker() => pBuf is NULL");
  3017. return false;
  3018. }
  3019. // EOI marker [FF D9]
  3020. if (HIBYTE(JPEG_EOI_MARKER) == *pBuf && LOBYTE(JPEG_EOI_MARKER) == *(pBuf + 1))
  3021. return true;
  3022. return false;
  3023. }
  3024. bool ExynosCameraHWInterface::m_findEOIMarkerInJPEG(unsigned char *pBuf, int dwBufSize, int *pnJPEGsize)
  3025. {
  3026. if (NULL == pBuf || 0 >= dwBufSize) {
  3027. ALOGE("m_findEOIMarkerInJPEG() => There is no contents.");
  3028. return false;
  3029. }
  3030. unsigned char *pBufEnd = pBuf + dwBufSize;
  3031. while (pBuf < pBufEnd) {
  3032. if (m_checkEOIMarker(pBuf++))
  3033. return true;
  3034. (*pnJPEGsize)++;
  3035. }
  3036. return false;
  3037. }
  3038. bool ExynosCameraHWInterface::m_splitFrame(unsigned char *pFrame, int dwSize,
  3039. int dwJPEGLineLength, int dwVideoLineLength, int dwVideoHeight,
  3040. void *pJPEG, int *pdwJPEGSize,
  3041. void *pVideo, int *pdwVideoSize)
  3042. {
  3043. ALOGV("DEBUG(%s):===========m_splitFrame Start==============", __func__);
  3044. if (NULL == pFrame || 0 >= dwSize) {
  3045. ALOGE("There is no contents (pFrame=%p, dwSize=%d", pFrame, dwSize);
  3046. return false;
  3047. }
  3048. if (0 == dwJPEGLineLength || 0 == dwVideoLineLength) {
  3049. ALOGE("There in no input information for decoding interleaved jpeg");
  3050. return false;
  3051. }
  3052. unsigned char *pSrc = pFrame;
  3053. unsigned char *pSrcEnd = pFrame + dwSize;
  3054. unsigned char *pJ = (unsigned char *)pJPEG;
  3055. int dwJSize = 0;
  3056. unsigned char *pV = (unsigned char *)pVideo;
  3057. int dwVSize = 0;
  3058. bool bRet = false;
  3059. bool isFinishJpeg = false;
  3060. while (pSrc < pSrcEnd) {
  3061. // Check video start marker
  3062. if (m_checkVideoStartMarker(pSrc)) {
  3063. int copyLength;
  3064. if (pSrc + dwVideoLineLength <= pSrcEnd)
  3065. copyLength = dwVideoLineLength;
  3066. else
  3067. copyLength = pSrcEnd - pSrc - VIDEO_COMMENT_MARKER_LENGTH;
  3068. // Copy video data
  3069. if (pV) {
  3070. memcpy(pV, pSrc + VIDEO_COMMENT_MARKER_LENGTH, copyLength);
  3071. pV += copyLength;
  3072. dwVSize += copyLength;
  3073. }
  3074. pSrc += copyLength + VIDEO_COMMENT_MARKER_LENGTH;
  3075. } else {
  3076. // Copy pure JPEG data
  3077. int size = 0;
  3078. int dwCopyBufLen = dwJPEGLineLength <= pSrcEnd-pSrc ? dwJPEGLineLength : pSrcEnd - pSrc;
  3079. if (m_findEOIMarkerInJPEG((unsigned char *)pSrc, dwCopyBufLen, &size)) {
  3080. isFinishJpeg = true;
  3081. size += 2; // to count EOF marker size
  3082. } else {
  3083. if ((dwCopyBufLen == 1) && (pJPEG < pJ)) {
  3084. unsigned char checkBuf[2] = { *(pJ - 1), *pSrc };
  3085. if (m_checkEOIMarker(checkBuf))
  3086. isFinishJpeg = true;
  3087. }
  3088. size = dwCopyBufLen;
  3089. }
  3090. memcpy(pJ, pSrc, size);
  3091. dwJSize += size;
  3092. pJ += dwCopyBufLen;
  3093. pSrc += dwCopyBufLen;
  3094. }
  3095. if (isFinishJpeg)
  3096. break;
  3097. }
  3098. if (isFinishJpeg) {
  3099. bRet = true;
  3100. if (pdwJPEGSize)
  3101. *pdwJPEGSize = dwJSize;
  3102. if (pdwVideoSize)
  3103. *pdwVideoSize = dwVSize;
  3104. } else {
  3105. ALOGE("DecodeInterleaveJPEG_WithOutDT() => Can not find EOI");
  3106. bRet = false;
  3107. if (pdwJPEGSize)
  3108. *pdwJPEGSize = 0;
  3109. if (pdwVideoSize)
  3110. *pdwVideoSize = 0;
  3111. }
  3112. ALOGV("DEBUG(%s):===========m_splitFrame end==============", __func__);
  3113. return bRet;
  3114. }
  3115. int ExynosCameraHWInterface::m_decodeInterleaveData(unsigned char *pInterleaveData,
  3116. int interleaveDataSize,
  3117. int yuvWidth,
  3118. int yuvHeight,
  3119. int *pJpegSize,
  3120. void *pJpegData,
  3121. void *pYuvData)
  3122. {
  3123. if (pInterleaveData == NULL)
  3124. return false;
  3125. bool ret = true;
  3126. unsigned int *interleave_ptr = (unsigned int *)pInterleaveData;
  3127. unsigned char *jpeg_ptr = (unsigned char *)pJpegData;
  3128. unsigned char *yuv_ptr = (unsigned char *)pYuvData;
  3129. unsigned char *p;
  3130. int jpeg_size = 0;
  3131. int yuv_size = 0;
  3132. int i = 0;
  3133. ALOGV("DEBUG(%s):m_decodeInterleaveData Start~~~", __func__);
  3134. while (i < interleaveDataSize) {
  3135. if ((*interleave_ptr == 0xFFFFFFFF) || (*interleave_ptr == 0x02FFFFFF) ||
  3136. (*interleave_ptr == 0xFF02FFFF)) {
  3137. // Padding Data
  3138. interleave_ptr++;
  3139. i += 4;
  3140. } else if ((*interleave_ptr & 0xFFFF) == 0x05FF) {
  3141. // Start-code of YUV Data
  3142. p = (unsigned char *)interleave_ptr;
  3143. p += 2;
  3144. i += 2;
  3145. // Extract YUV Data
  3146. if (pYuvData != NULL) {
  3147. memcpy(yuv_ptr, p, yuvWidth * 2);
  3148. yuv_ptr += yuvWidth * 2;
  3149. yuv_size += yuvWidth * 2;
  3150. }
  3151. p += yuvWidth * 2;
  3152. i += yuvWidth * 2;
  3153. // Check End-code of YUV Data
  3154. if ((*p == 0xFF) && (*(p + 1) == 0x06)) {
  3155. interleave_ptr = (unsigned int *)(p + 2);
  3156. i += 2;
  3157. } else {
  3158. ret = false;
  3159. break;
  3160. }
  3161. } else {
  3162. // Extract JPEG Data
  3163. if (pJpegData != NULL) {
  3164. memcpy(jpeg_ptr, interleave_ptr, 4);
  3165. jpeg_ptr += 4;
  3166. jpeg_size += 4;
  3167. }
  3168. interleave_ptr++;
  3169. i += 4;
  3170. }
  3171. }
  3172. if (ret) {
  3173. if (pJpegData != NULL) {
  3174. // Remove Padding after EOI
  3175. for (i = 0; i < 3; i++) {
  3176. if (*(--jpeg_ptr) != 0xFF) {
  3177. break;
  3178. }
  3179. jpeg_size--;
  3180. }
  3181. *pJpegSize = jpeg_size;
  3182. }
  3183. // Check YUV Data Size
  3184. if (pYuvData != NULL) {
  3185. if (yuv_size != (yuvWidth * yuvHeight * 2)) {
  3186. ret = false;
  3187. }
  3188. }
  3189. }
  3190. ALOGV("DEBUG(%s):m_decodeInterleaveData End~~~", __func__);
  3191. return ret;
  3192. }
  3193. bool ExynosCameraHWInterface::m_isSupportedPreviewSize(const int width,
  3194. const int height) const
  3195. {
  3196. unsigned int i;
  3197. for (i = 0; i < m_supportedPreviewSizes.size(); i++) {
  3198. if (m_supportedPreviewSizes[i].width == width &&
  3199. m_supportedPreviewSizes[i].height == height)
  3200. return true;
  3201. }
  3202. return false;
  3203. }
  3204. void ExynosCameraHWInterface::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
  3205. {
  3206. switch (colorFormat) {
  3207. // 1p
  3208. case V4L2_PIX_FMT_RGB565 :
  3209. case V4L2_PIX_FMT_YUYV :
  3210. case V4L2_PIX_FMT_UYVY :
  3211. case V4L2_PIX_FMT_VYUY :
  3212. case V4L2_PIX_FMT_YVYU :
  3213. buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
  3214. buf->size.extS[1] = 0;
  3215. buf->size.extS[2] = 0;
  3216. break;
  3217. // 2p
  3218. case V4L2_PIX_FMT_NV12 :
  3219. case V4L2_PIX_FMT_NV12T :
  3220. case V4L2_PIX_FMT_NV21 :
  3221. buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
  3222. buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
  3223. buf->size.extS[2] = 0;
  3224. break;
  3225. case V4L2_PIX_FMT_NV12M :
  3226. case V4L2_PIX_FMT_NV12MT_16X16 :
  3227. buf->size.extS[0] = ALIGN(ALIGN(w, 16) * ALIGN(h, 16), 2048);
  3228. buf->size.extS[1] = ALIGN(ALIGN(w, 16) * ALIGN(h >> 1, 8), 2048);
  3229. buf->size.extS[2] = 0;
  3230. break;
  3231. case V4L2_PIX_FMT_NV16 :
  3232. case V4L2_PIX_FMT_NV61 :
  3233. buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
  3234. buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16);
  3235. buf->size.extS[2] = 0;
  3236. break;
  3237. // 3p
  3238. case V4L2_PIX_FMT_YUV420 :
  3239. case V4L2_PIX_FMT_YVU420 :
  3240. buf->size.extS[0] = (w * h);
  3241. buf->size.extS[1] = (w * h) >> 2;
  3242. buf->size.extS[2] = (w * h) >> 2;
  3243. break;
  3244. case V4L2_PIX_FMT_YUV420M:
  3245. case V4L2_PIX_FMT_YVU420M :
  3246. case V4L2_PIX_FMT_YUV422P :
  3247. buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
  3248. buf->size.extS[1] = ALIGN(w/2, 8) * ALIGN(h/2, 8);
  3249. buf->size.extS[2] = ALIGN(w/2, 8) * ALIGN(h/2, 8);
  3250. break;
  3251. default:
  3252. ALOGE("ERR(%s):unmatched colorFormat(%d)", __func__, colorFormat);
  3253. return;
  3254. break;
  3255. }
  3256. }
  3257. bool ExynosCameraHWInterface::m_getResolutionList(String8 & string8Buf, char * strBuf, int w, int h)
  3258. {
  3259. bool ret = false;
  3260. bool flagFirst = true;
  3261. // this is up to /packages/apps/Camera/res/values/arrays.xml
  3262. int RESOLUTION_LIST[][2] =
  3263. {
  3264. { 3264, 2448},
  3265. { 2592, 1936},
  3266. { 2576, 1948},
  3267. { 2560, 1920},
  3268. { 2048, 1536},
  3269. { 1920, 1080},
  3270. { 1600, 1200},
  3271. { 1280, 720},
  3272. { 1024, 768},
  3273. { 800, 600},
  3274. { 800, 480},
  3275. { 720, 480},
  3276. { 640, 480},
  3277. { 528, 432},
  3278. { 480, 320},
  3279. { 352, 288},
  3280. { 320, 240},
  3281. { 176, 144}
  3282. };
  3283. int sizeOfResSize = sizeof(RESOLUTION_LIST) / (sizeof(int) * 2);
  3284. for (int i = 0; i < sizeOfResSize; i++) {
  3285. if ( RESOLUTION_LIST[i][0] <= w
  3286. && RESOLUTION_LIST[i][1] <= h) {
  3287. if (flagFirst == true)
  3288. flagFirst = false;
  3289. else
  3290. string8Buf.append(",");
  3291. sprintf(strBuf, "%dx%d", RESOLUTION_LIST[i][0], RESOLUTION_LIST[i][1]);
  3292. string8Buf.append(strBuf);
  3293. ret = true;
  3294. }
  3295. }
  3296. if (ret == false)
  3297. ALOGE("ERR(%s):cannot find resolutions", __func__);
  3298. return ret;
  3299. }
  3300. bool ExynosCameraHWInterface::m_getZoomRatioList(String8 & string8Buf, char * strBuf, int maxZoom, int start, int end)
  3301. {
  3302. bool flagFirst = true;
  3303. int cur = start;
  3304. int step = (end - start) / maxZoom;
  3305. for (int i = 0; i < maxZoom; i++) {
  3306. sprintf(strBuf, "%d", cur);
  3307. string8Buf.append(strBuf);
  3308. string8Buf.append(",");
  3309. cur += step;
  3310. }
  3311. sprintf(strBuf, "%d", end);
  3312. string8Buf.append(strBuf);
  3313. // ex : "100,130,160,190,220,250,280,310,340,360,400"
  3314. return true;
  3315. }
  3316. int ExynosCameraHWInterface::m_bracketsStr2Ints(char *str, int num, ExynosRect2 *rect2s, int *weights)
  3317. {
  3318. char *curStr = str;
  3319. char buf[128];
  3320. char *bracketsOpen;
  3321. char *bracketsClose;
  3322. int tempArray[5];
  3323. int validFocusedAreas = 0;
  3324. for (int i = 0; i < num; i++) {
  3325. if (curStr == NULL)
  3326. break;
  3327. bracketsOpen = strchr(curStr, '(');
  3328. if (bracketsOpen == NULL)
  3329. break;
  3330. bracketsClose = strchr(bracketsOpen, ')');
  3331. if (bracketsClose == NULL)
  3332. break;
  3333. strncpy(buf, bracketsOpen, bracketsClose - bracketsOpen + 1);
  3334. buf[bracketsClose - bracketsOpen + 1] = 0;
  3335. if (m_subBracketsStr2Ints(5, buf, tempArray) == false) {
  3336. ALOGE("ERR(%s):m_subBracketsStr2Ints(%s) fail", __func__, buf);
  3337. break;
  3338. }
  3339. rect2s[i].x1 = tempArray[0];
  3340. rect2s[i].y1 = tempArray[1];
  3341. rect2s[i].x2 = tempArray[2];
  3342. rect2s[i].y2 = tempArray[3];
  3343. weights[i] = tempArray[4];
  3344. validFocusedAreas++;
  3345. curStr = bracketsClose;
  3346. }
  3347. return validFocusedAreas;
  3348. }
  3349. bool ExynosCameraHWInterface::m_subBracketsStr2Ints(int num, char *str, int *arr)
  3350. {
  3351. if (str == NULL || arr == NULL) {
  3352. ALOGE("ERR(%s):str or arr is NULL", __func__);
  3353. return false;
  3354. }
  3355. // ex : (-10,-10,0,0,300)
  3356. char buf[128];
  3357. char *bracketsOpen;
  3358. char *bracketsClose;
  3359. char *tok;
  3360. bracketsOpen = strchr(str, '(');
  3361. if (bracketsOpen == NULL) {
  3362. ALOGE("ERR(%s):no '('", __func__);
  3363. return false;
  3364. }
  3365. bracketsClose = strchr(bracketsOpen, ')');
  3366. if (bracketsClose == NULL) {
  3367. ALOGE("ERR(%s):no ')'", __func__);
  3368. return false;
  3369. }
  3370. strncpy(buf, bracketsOpen + 1, bracketsClose - bracketsOpen + 1);
  3371. buf[bracketsClose - bracketsOpen + 1] = 0;
  3372. tok = strtok(buf, ",");
  3373. if (tok == NULL) {
  3374. ALOGE("ERR(%s):strtok(%s) fail", __func__, buf);
  3375. return false;
  3376. }
  3377. arr[0] = atoi(tok);
  3378. for (int i = 1; i < num; i++) {
  3379. tok = strtok(NULL, ",");
  3380. if (tok == NULL) {
  3381. if (i < num - 1) {
  3382. ALOGE("ERR(%s):strtok() (index : %d, num : %d) fail", __func__, i, num);
  3383. return false;
  3384. }
  3385. break;
  3386. }
  3387. arr[i] = atoi(tok);
  3388. }
  3389. return true;
  3390. }
  3391. bool ExynosCameraHWInterface::m_getRatioSize(int src_w, int src_h,
  3392. int dst_w, int dst_h,
  3393. int *crop_x, int *crop_y,
  3394. int *crop_w, int *crop_h,
  3395. int zoom)
  3396. {
  3397. *crop_w = src_w;
  3398. *crop_h = src_h;
  3399. if ( src_w != dst_w
  3400. || src_h != dst_h) {
  3401. float src_ratio = 1.0f;
  3402. float dst_ratio = 1.0f;
  3403. // ex : 1024 / 768
  3404. src_ratio = (float)src_w / (float)src_h;
  3405. // ex : 352 / 288
  3406. dst_ratio = (float)dst_w / (float)dst_h;
  3407. if (src_ratio != dst_ratio) {
  3408. if (dst_w * dst_h < src_w * src_h) {
  3409. if (src_ratio <= dst_ratio) {
  3410. // shrink h
  3411. *crop_w = src_w;
  3412. *crop_h = src_w / dst_ratio;
  3413. } else {
  3414. // shrink w
  3415. *crop_w = dst_h * dst_ratio;
  3416. *crop_h = dst_h;
  3417. }
  3418. } else {
  3419. if (src_ratio <= dst_ratio) {
  3420. // shrink h
  3421. *crop_w = src_w;
  3422. *crop_h = src_w / dst_ratio;
  3423. } else {
  3424. // shrink w
  3425. *crop_w = src_h * dst_ratio;
  3426. *crop_h = src_h;
  3427. }
  3428. }
  3429. if (zoom != 0) {
  3430. int zoomLevel = ((float)zoom + 10.0) / 10.0;
  3431. *crop_w = (int)((float)*crop_w / zoomLevel);
  3432. *crop_h = (int)((float)*crop_h / zoomLevel);
  3433. }
  3434. }
  3435. }
  3436. #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2)
  3437. unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
  3438. if (w_align != 0) {
  3439. if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
  3440. && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
  3441. *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
  3442. }
  3443. else
  3444. *crop_w -= w_align;
  3445. }
  3446. #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2)
  3447. unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
  3448. if (h_align != 0) {
  3449. if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
  3450. && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
  3451. *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
  3452. }
  3453. else
  3454. *crop_h -= h_align;
  3455. }
  3456. *crop_x = (src_w - *crop_w) >> 1;
  3457. *crop_y = (src_h - *crop_h) >> 1;
  3458. if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
  3459. *crop_x -= 1;
  3460. if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
  3461. *crop_y -= 1;
  3462. return true;
  3463. }
  3464. int ExynosCameraHWInterface::m_calibratePosition(int w, int new_w, int pos)
  3465. {
  3466. return (float)(pos * new_w) / (float)w;
  3467. }
  3468. static CameraInfo sCameraInfo[] = {
  3469. {
  3470. CAMERA_FACING_BACK,
  3471. 0, /* orientation */
  3472. },
  3473. {
  3474. CAMERA_FACING_FRONT,
  3475. 0, /* orientation */
  3476. }
  3477. };
  3478. /** Close this device */
  3479. static camera_device_t *g_cam_device;
  3480. static int HAL_camera_device_close(struct hw_device_t* device)
  3481. {
  3482. ALOGV("DEBUG(%s):", __func__);
  3483. if (device) {
  3484. camera_device_t *cam_device = (camera_device_t *)device;
  3485. delete static_cast<ExynosCameraHWInterface *>(cam_device->priv);
  3486. free(cam_device);
  3487. g_cam_device = 0;
  3488. }
  3489. return 0;
  3490. }
  3491. static inline ExynosCameraHWInterface *obj(struct camera_device *dev)
  3492. {
  3493. return reinterpret_cast<ExynosCameraHWInterface *>(dev->priv);
  3494. }
  3495. /** Set the preview_stream_ops to which preview frames are sent */
  3496. static int HAL_camera_device_set_preview_window(struct camera_device *dev,
  3497. struct preview_stream_ops *buf)
  3498. {
  3499. ALOGV("DEBUG(%s):", __func__);
  3500. return obj(dev)->setPreviewWindow(buf);
  3501. }
  3502. /** Set the notification and data callbacks */
  3503. static void HAL_camera_device_set_callbacks(struct camera_device *dev,
  3504. camera_notify_callback notify_cb,
  3505. camera_data_callback data_cb,
  3506. camera_data_timestamp_callback data_cb_timestamp,
  3507. camera_request_memory get_memory,
  3508. void* user)
  3509. {
  3510. ALOGV("DEBUG(%s):", __func__);
  3511. obj(dev)->setCallbacks(notify_cb, data_cb, data_cb_timestamp,
  3512. get_memory,
  3513. user);
  3514. }
  3515. /**
  3516. * The following three functions all take a msg_type, which is a bitmask of
  3517. * the messages defined in include/ui/Camera.h
  3518. */
  3519. /**
  3520. * Enable a message, or set of messages.
  3521. */
  3522. static void HAL_camera_device_enable_msg_type(struct camera_device *dev, int32_t msg_type)
  3523. {
  3524. ALOGV("DEBUG(%s):", __func__);
  3525. obj(dev)->enableMsgType(msg_type);
  3526. }
  3527. /**
  3528. * Disable a message, or a set of messages.
  3529. *
  3530. * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera
  3531. * HAL should not rely on its client to call releaseRecordingFrame() to
  3532. * release video recording frames sent out by the cameral HAL before and
  3533. * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL
  3534. * clients must not modify/access any video recording frame after calling
  3535. * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
  3536. */
  3537. static void HAL_camera_device_disable_msg_type(struct camera_device *dev, int32_t msg_type)
  3538. {
  3539. ALOGV("DEBUG(%s):", __func__);
  3540. obj(dev)->disableMsgType(msg_type);
  3541. }
  3542. /**
  3543. * Query whether a message, or a set of messages, is enabled. Note that
  3544. * this is operates as an AND, if any of the messages queried are off, this
  3545. * will return false.
  3546. */
  3547. static int HAL_camera_device_msg_type_enabled(struct camera_device *dev, int32_t msg_type)
  3548. {
  3549. ALOGV("DEBUG(%s):", __func__);
  3550. return obj(dev)->msgTypeEnabled(msg_type);
  3551. }
  3552. /**
  3553. * Start preview mode.
  3554. */
  3555. static int HAL_camera_device_start_preview(struct camera_device *dev)
  3556. {
  3557. ALOGV("DEBUG(%s):", __func__);
  3558. return obj(dev)->startPreview();
  3559. }
  3560. /**
  3561. * Stop a previously started preview.
  3562. */
  3563. static void HAL_camera_device_stop_preview(struct camera_device *dev)
  3564. {
  3565. ALOGV("DEBUG(%s):", __func__);
  3566. obj(dev)->stopPreview();
  3567. }
  3568. /**
  3569. * Returns true if preview is enabled.
  3570. */
  3571. static int HAL_camera_device_preview_enabled(struct camera_device *dev)
  3572. {
  3573. ALOGV("DEBUG(%s):", __func__);
  3574. return obj(dev)->previewEnabled();
  3575. }
  3576. /**
  3577. * Request the camera HAL to store meta data or real YUV data in the video
  3578. * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If
  3579. * it is not called, the default camera HAL behavior is to store real YUV
  3580. * data in the video buffers.
  3581. *
  3582. * This method should be called before startRecording() in order to be
  3583. * effective.
  3584. *
  3585. * If meta data is stored in the video buffers, it is up to the receiver of
  3586. * the video buffers to interpret the contents and to find the actual frame
  3587. * data with the help of the meta data in the buffer. How this is done is
  3588. * outside of the scope of this method.
  3589. *
  3590. * Some camera HALs may not support storing meta data in the video buffers,
  3591. * but all camera HALs should support storing real YUV data in the video
  3592. * buffers. If the camera HAL does not support storing the meta data in the
  3593. * video buffers when it is requested to do do, INVALID_OPERATION must be
  3594. * returned. It is very useful for the camera HAL to pass meta data rather
  3595. * than the actual frame data directly to the video encoder, since the
  3596. * amount of the uncompressed frame data can be very large if video size is
  3597. * large.
  3598. *
  3599. * @param enable if true to instruct the camera HAL to store
  3600. * meta data in the video buffers; false to instruct
  3601. * the camera HAL to store real YUV data in the video
  3602. * buffers.
  3603. *
  3604. * @return OK on success.
  3605. */
  3606. static int HAL_camera_device_store_meta_data_in_buffers(struct camera_device *dev, int enable)
  3607. {
  3608. ALOGV("DEBUG(%s):", __func__);
  3609. return obj(dev)->storeMetaDataInBuffers(enable);
  3610. }
  3611. /**
  3612. * Start record mode. When a record image is available, a
  3613. * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding
  3614. * frame. Every record frame must be released by a camera HAL client via
  3615. * releaseRecordingFrame() before the client calls
  3616. * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
  3617. * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
  3618. * responsibility to manage the life-cycle of the video recording frames,
  3619. * and the client must not modify/access any video recording frames.
  3620. */
  3621. static int HAL_camera_device_start_recording(struct camera_device *dev)
  3622. {
  3623. ALOGV("DEBUG(%s):", __func__);
  3624. return obj(dev)->startRecording();
  3625. }
  3626. /**
  3627. * Stop a previously started recording.
  3628. */
  3629. static void HAL_camera_device_stop_recording(struct camera_device *dev)
  3630. {
  3631. ALOGV("DEBUG(%s):", __func__);
  3632. obj(dev)->stopRecording();
  3633. }
  3634. /**
  3635. * Returns true if recording is enabled.
  3636. */
  3637. static int HAL_camera_device_recording_enabled(struct camera_device *dev)
  3638. {
  3639. ALOGV("DEBUG(%s):", __func__);
  3640. return obj(dev)->recordingEnabled();
  3641. }
  3642. /**
  3643. * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
  3644. *
  3645. * It is camera HAL client's responsibility to release video recording
  3646. * frames sent out by the camera HAL before the camera HAL receives a call
  3647. * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to
  3648. * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
  3649. * responsibility to manage the life-cycle of the video recording frames.
  3650. */
  3651. static void HAL_camera_device_release_recording_frame(struct camera_device *dev,
  3652. const void *opaque)
  3653. {
  3654. ALOGV("DEBUG(%s):", __func__);
  3655. obj(dev)->releaseRecordingFrame(opaque);
  3656. }
  3657. /**
  3658. * Start auto focus, the notification callback routine is called with
  3659. * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be
  3660. * called again if another auto focus is needed.
  3661. */
  3662. static int HAL_camera_device_auto_focus(struct camera_device *dev)
  3663. {
  3664. ALOGV("DEBUG(%s):", __func__);
  3665. return obj(dev)->autoFocus();
  3666. }
  3667. /**
  3668. * Cancels auto-focus function. If the auto-focus is still in progress,
  3669. * this function will cancel it. Whether the auto-focus is in progress or
  3670. * not, this function will return the focus position to the default. If
  3671. * the camera does not support auto-focus, this is a no-op.
  3672. */
  3673. static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev)
  3674. {
  3675. ALOGV("DEBUG(%s):", __func__);
  3676. return obj(dev)->cancelAutoFocus();
  3677. }
  3678. /**
  3679. * Take a picture.
  3680. */
  3681. static int HAL_camera_device_take_picture(struct camera_device *dev)
  3682. {
  3683. ALOGV("DEBUG(%s):", __func__);
  3684. return obj(dev)->takePicture();
  3685. }
  3686. /**
  3687. * Cancel a picture that was started with takePicture. Calling this method
  3688. * when no picture is being taken is a no-op.
  3689. */
  3690. static int HAL_camera_device_cancel_picture(struct camera_device *dev)
  3691. {
  3692. ALOGV("DEBUG(%s):", __func__);
  3693. return obj(dev)->cancelPicture();
  3694. }
  3695. /**
  3696. * Set the camera parameters. This returns BAD_VALUE if any parameter is
  3697. * invalid or not supported.
  3698. */
  3699. static int HAL_camera_device_set_parameters(struct camera_device *dev,
  3700. const char *parms)
  3701. {
  3702. ALOGV("DEBUG(%s):", __func__);
  3703. String8 str(parms);
  3704. CameraParameters p(str);
  3705. return obj(dev)->setParameters(p);
  3706. }
  3707. /** Return the camera parameters. */
  3708. char *HAL_camera_device_get_parameters(struct camera_device *dev)
  3709. {
  3710. ALOGV("DEBUG(%s):", __func__);
  3711. String8 str;
  3712. CameraParameters parms = obj(dev)->getParameters();
  3713. str = parms.flatten();
  3714. return strdup(str.string());
  3715. }
  3716. static void HAL_camera_device_put_parameters(struct camera_device *dev, char *parms)
  3717. {
  3718. ALOGV("DEBUG(%s):", __func__);
  3719. free(parms);
  3720. }
  3721. /**
  3722. * Send command to camera driver.
  3723. */
  3724. static int HAL_camera_device_send_command(struct camera_device *dev,
  3725. int32_t cmd, int32_t arg1, int32_t arg2)
  3726. {
  3727. ALOGV("DEBUG(%s):", __func__);
  3728. return obj(dev)->sendCommand(cmd, arg1, arg2);
  3729. }
  3730. /**
  3731. * Release the hardware resources owned by this object. Note that this is
  3732. * *not* done in the destructor.
  3733. */
  3734. static void HAL_camera_device_release(struct camera_device *dev)
  3735. {
  3736. ALOGV("DEBUG(%s):", __func__);
  3737. obj(dev)->release();
  3738. }
  3739. /**
  3740. * Dump state of the camera hardware
  3741. */
  3742. static int HAL_camera_device_dump(struct camera_device *dev, int fd)
  3743. {
  3744. ALOGV("DEBUG(%s):", __func__);
  3745. return obj(dev)->dump(fd);
  3746. }
  3747. static int HAL_getNumberOfCameras()
  3748. {
  3749. ALOGV("DEBUG(%s):", __func__);
  3750. return sizeof(sCameraInfo) / sizeof(sCameraInfo[0]);
  3751. }
  3752. static int HAL_getCameraInfo(int cameraId, struct camera_info *cameraInfo)
  3753. {
  3754. ALOGV("DEBUG(%s):", __func__);
  3755. memcpy(cameraInfo, &sCameraInfo[cameraId], sizeof(CameraInfo));
  3756. return 0;
  3757. }
  3758. #define SET_METHOD(m) m : HAL_camera_device_##m
  3759. static camera_device_ops_t camera_device_ops = {
  3760. SET_METHOD(set_preview_window),
  3761. SET_METHOD(set_callbacks),
  3762. SET_METHOD(enable_msg_type),
  3763. SET_METHOD(disable_msg_type),
  3764. SET_METHOD(msg_type_enabled),
  3765. SET_METHOD(start_preview),
  3766. SET_METHOD(stop_preview),
  3767. SET_METHOD(preview_enabled),
  3768. SET_METHOD(store_meta_data_in_buffers),
  3769. SET_METHOD(start_recording),
  3770. SET_METHOD(stop_recording),
  3771. SET_METHOD(recording_enabled),
  3772. SET_METHOD(release_recording_frame),
  3773. SET_METHOD(auto_focus),
  3774. SET_METHOD(cancel_auto_focus),
  3775. SET_METHOD(take_picture),
  3776. SET_METHOD(cancel_picture),
  3777. SET_METHOD(set_parameters),
  3778. SET_METHOD(get_parameters),
  3779. SET_METHOD(put_parameters),
  3780. SET_METHOD(send_command),
  3781. SET_METHOD(release),
  3782. SET_METHOD(dump),
  3783. };
  3784. #undef SET_METHOD
  3785. static int HAL_camera_device_open(const struct hw_module_t* module,
  3786. const char *id,
  3787. struct hw_device_t** device)
  3788. {
  3789. ALOGV("DEBUG(%s):", __func__);
  3790. int cameraId = atoi(id);
  3791. if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) {
  3792. ALOGE("ERR(%s):Invalid camera ID %s", __func__, id);
  3793. return -EINVAL;
  3794. }
  3795. if (g_cam_device) {
  3796. if (obj(g_cam_device)->getCameraId() == cameraId) {
  3797. ALOGV("DEBUG(%s):returning existing camera ID %s", __func__, id);
  3798. goto done;
  3799. } else {
  3800. ALOGE("ERR(%s):Cannot open camera %d. camera %d is already running!",
  3801. __func__, cameraId, obj(g_cam_device)->getCameraId());
  3802. return -ENOSYS;
  3803. }
  3804. }
  3805. g_cam_device = (camera_device_t *)malloc(sizeof(camera_device_t));
  3806. if (!g_cam_device)
  3807. return -ENOMEM;
  3808. g_cam_device->common.tag = HARDWARE_DEVICE_TAG;
  3809. g_cam_device->common.version = 1;
  3810. g_cam_device->common.module = const_cast<hw_module_t *>(module);
  3811. g_cam_device->common.close = HAL_camera_device_close;
  3812. g_cam_device->ops = &camera_device_ops;
  3813. ALOGV("DEBUG(%s):open camera %s", __func__, id);
  3814. g_cam_device->priv = new ExynosCameraHWInterface(cameraId, g_cam_device);
  3815. done:
  3816. *device = (hw_device_t *)g_cam_device;
  3817. ALOGV("DEBUG(%s):opened camera %s (%p)", __func__, id, *device);
  3818. return 0;
  3819. }
  3820. static hw_module_methods_t camera_module_methods = {
  3821. open : HAL_camera_device_open
  3822. };
  3823. extern "C" {
  3824. struct camera_module HAL_MODULE_INFO_SYM = {
  3825. common : {
  3826. tag : HARDWARE_MODULE_TAG,
  3827. version_major : 1,
  3828. version_minor : 0,
  3829. id : CAMERA_HARDWARE_MODULE_ID,
  3830. name : "orion camera HAL",
  3831. author : "Samsung Corporation",
  3832. methods : &camera_module_methods,
  3833. },
  3834. get_number_of_cameras : HAL_getNumberOfCameras,
  3835. get_camera_info : HAL_getCameraInfo
  3836. };
  3837. }
  3838. }; // namespace android