PageRenderTime 51ms CodeModel.GetById 19ms RepoModel.GetById 0ms app.codeStats 2ms

/libcamera2/ExynosCameraHWInterface2.cpp

https://bitbucket.org/sola/jcrom_hardware_samsung_slsi_exynos5
C++ | 6441 lines | 5504 code | 752 blank | 185 comment | 1257 complexity | 48cd632493d56125ab729d77884b90bb MD5 | raw file
  1. /*
  2. **
  3. ** Copyright 2008, The Android Open Source Project
  4. ** Copyright 2012, Samsung Electronics Co. LTD
  5. **
  6. ** Licensed under the Apache License, Version 2.0 (the "License");
  7. ** you may not use this file except in compliance with the License.
  8. ** You may obtain a copy of the License at
  9. **
  10. ** http://www.apache.org/licenses/LICENSE-2.0
  11. **
  12. ** Unless required by applicable law or agreed to in writing, software
  13. ** distributed under the License is distributed on an "AS IS" BASIS,
  14. ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. ** See the License for the specific language governing permissions and
  16. ** limitations under the License.
  17. */
  18. /*!
  19. * \file ExynosCameraHWInterface2.cpp
  20. * \brief source file for Android Camera API 2.0 HAL
  21. * \author Sungjoong Kang(sj3.kang@samsung.com)
  22. * \date 2012/07/10
  23. *
  24. * <b>Revision History: </b>
  25. * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
  26. * Initial Release
  27. *
  28. * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
  29. * 2nd Release
  30. *
  31. */
  32. //#define LOG_NDEBUG 0
  33. #define LOG_TAG "ExynosCameraHAL2"
  34. #include <utils/Log.h>
  35. #include <math.h>
  36. #include "ExynosCameraHWInterface2.h"
  37. #include "exynos_format.h"
  38. namespace android {
  39. void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
  40. {
  41. int nw;
  42. int cnt = 0;
  43. uint32_t written = 0;
  44. ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
  45. int fd = open(fname, O_RDWR | O_CREAT, 0644);
  46. if (fd < 0) {
  47. ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
  48. return;
  49. }
  50. ALOGV("writing %d bytes to file [%s]", size, fname);
  51. while (written < size) {
  52. nw = ::write(fd, buf + written, size - written);
  53. if (nw < 0) {
  54. ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
  55. break;
  56. }
  57. written += nw;
  58. cnt++;
  59. }
  60. ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
  61. ::close(fd);
  62. }
  63. int get_pixel_depth(uint32_t fmt)
  64. {
  65. int depth = 0;
  66. switch (fmt) {
  67. case V4L2_PIX_FMT_JPEG:
  68. depth = 8;
  69. break;
  70. case V4L2_PIX_FMT_NV12:
  71. case V4L2_PIX_FMT_NV21:
  72. case V4L2_PIX_FMT_YUV420:
  73. case V4L2_PIX_FMT_YVU420M:
  74. case V4L2_PIX_FMT_NV12M:
  75. case V4L2_PIX_FMT_NV12MT:
  76. depth = 12;
  77. break;
  78. case V4L2_PIX_FMT_RGB565:
  79. case V4L2_PIX_FMT_YUYV:
  80. case V4L2_PIX_FMT_YVYU:
  81. case V4L2_PIX_FMT_UYVY:
  82. case V4L2_PIX_FMT_VYUY:
  83. case V4L2_PIX_FMT_NV16:
  84. case V4L2_PIX_FMT_NV61:
  85. case V4L2_PIX_FMT_YUV422P:
  86. case V4L2_PIX_FMT_SBGGR10:
  87. case V4L2_PIX_FMT_SBGGR12:
  88. case V4L2_PIX_FMT_SBGGR16:
  89. depth = 16;
  90. break;
  91. case V4L2_PIX_FMT_RGB32:
  92. depth = 32;
  93. break;
  94. default:
  95. ALOGE("Get depth failed(format : %d)", fmt);
  96. break;
  97. }
  98. return depth;
  99. }
  100. int cam_int_s_fmt(node_info_t *node)
  101. {
  102. struct v4l2_format v4l2_fmt;
  103. unsigned int framesize;
  104. int ret;
  105. memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
  106. v4l2_fmt.type = node->type;
  107. framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
  108. if (node->planes >= 1) {
  109. v4l2_fmt.fmt.pix_mp.width = node->width;
  110. v4l2_fmt.fmt.pix_mp.height = node->height;
  111. v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
  112. v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
  113. } else {
  114. ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
  115. }
  116. /* Set up for capture */
  117. ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
  118. if (ret < 0)
  119. ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
  120. return ret;
  121. }
  122. int cam_int_reqbufs(node_info_t *node)
  123. {
  124. struct v4l2_requestbuffers req;
  125. int ret;
  126. req.count = node->buffers;
  127. req.type = node->type;
  128. req.memory = node->memory;
  129. ret = exynos_v4l2_reqbufs(node->fd, &req);
  130. if (ret < 0)
  131. ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
  132. return req.count;
  133. }
  134. int cam_int_qbuf(node_info_t *node, int index)
  135. {
  136. struct v4l2_buffer v4l2_buf;
  137. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  138. int i;
  139. int ret = 0;
  140. v4l2_buf.m.planes = planes;
  141. v4l2_buf.type = node->type;
  142. v4l2_buf.memory = node->memory;
  143. v4l2_buf.index = index;
  144. v4l2_buf.length = node->planes;
  145. for(i = 0; i < node->planes; i++){
  146. v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
  147. v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]);
  148. }
  149. ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
  150. if (ret < 0)
  151. ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
  152. return ret;
  153. }
  154. int cam_int_streamon(node_info_t *node)
  155. {
  156. enum v4l2_buf_type type = node->type;
  157. int ret;
  158. ret = exynos_v4l2_streamon(node->fd, type);
  159. if (ret < 0)
  160. ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
  161. ALOGV("On streaming I/O... ... fd(%d)", node->fd);
  162. return ret;
  163. }
  164. int cam_int_streamoff(node_info_t *node)
  165. {
  166. enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  167. int ret;
  168. ALOGV("Off streaming I/O... fd(%d)", node->fd);
  169. ret = exynos_v4l2_streamoff(node->fd, type);
  170. if (ret < 0)
  171. ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
  172. return ret;
  173. }
  174. int isp_int_streamoff(node_info_t *node)
  175. {
  176. enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  177. int ret;
  178. ALOGV("Off streaming I/O... fd(%d)", node->fd);
  179. ret = exynos_v4l2_streamoff(node->fd, type);
  180. if (ret < 0)
  181. ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
  182. return ret;
  183. }
  184. int cam_int_dqbuf(node_info_t *node)
  185. {
  186. struct v4l2_buffer v4l2_buf;
  187. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  188. int ret;
  189. v4l2_buf.type = node->type;
  190. v4l2_buf.memory = node->memory;
  191. v4l2_buf.m.planes = planes;
  192. v4l2_buf.length = node->planes;
  193. ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
  194. if (ret < 0)
  195. ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
  196. return v4l2_buf.index;
  197. }
  198. int cam_int_dqbuf(node_info_t *node, int num_plane)
  199. {
  200. struct v4l2_buffer v4l2_buf;
  201. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  202. int ret;
  203. v4l2_buf.type = node->type;
  204. v4l2_buf.memory = node->memory;
  205. v4l2_buf.m.planes = planes;
  206. v4l2_buf.length = num_plane;
  207. ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
  208. if (ret < 0)
  209. ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
  210. return v4l2_buf.index;
  211. }
  212. int cam_int_s_input(node_info_t *node, int index)
  213. {
  214. int ret;
  215. ret = exynos_v4l2_s_input(node->fd, index);
  216. if (ret < 0)
  217. ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
  218. return ret;
  219. }
  220. gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
  221. RequestManager::RequestManager(SignalDrivenThread* main_thread):
  222. m_lastAeMode(0),
  223. m_lastAaMode(0),
  224. m_lastAwbMode(0),
  225. m_vdisBubbleEn(false),
  226. m_lastAeComp(0),
  227. m_lastCompletedFrameCnt(-1)
  228. {
  229. m_metadataConverter = new MetadataConverter;
  230. m_mainThread = main_thread;
  231. ResetEntry();
  232. m_sensorPipelineSkipCnt = 0;
  233. return;
  234. }
  235. RequestManager::~RequestManager()
  236. {
  237. ALOGV("%s", __FUNCTION__);
  238. if (m_metadataConverter != NULL) {
  239. delete m_metadataConverter;
  240. m_metadataConverter = NULL;
  241. }
  242. releaseSensorQ();
  243. return;
  244. }
  245. void RequestManager::ResetEntry()
  246. {
  247. Mutex::Autolock lock(m_requestMutex);
  248. for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
  249. memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
  250. entries[i].internal_shot.shot.ctl.request.frameCount = -1;
  251. }
  252. m_numOfEntries = 0;
  253. m_entryInsertionIndex = -1;
  254. m_entryProcessingIndex = -1;
  255. m_entryFrameOutputIndex = -1;
  256. }
  257. int RequestManager::GetNumEntries()
  258. {
  259. return m_numOfEntries;
  260. }
  261. void RequestManager::SetDefaultParameters(int cropX)
  262. {
  263. m_cropX = cropX;
  264. }
  265. bool RequestManager::IsRequestQueueFull()
  266. {
  267. Mutex::Autolock lock(m_requestMutex);
  268. if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
  269. return true;
  270. else
  271. return false;
  272. }
  273. void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
  274. {
  275. ALOGV("DEBUG(%s):", __FUNCTION__);
  276. Mutex::Autolock lock(m_requestMutex);
  277. request_manager_entry * newEntry = NULL;
  278. int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
  279. ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
  280. newEntry = &(entries[newInsertionIndex]);
  281. if (newEntry->status!=EMPTY) {
  282. ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
  283. return;
  284. }
  285. newEntry->status = REGISTERED;
  286. newEntry->original_request = new_request;
  287. memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
  288. m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
  289. newEntry->output_stream_count = 0;
  290. if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
  291. newEntry->output_stream_count++;
  292. if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
  293. newEntry->output_stream_count++;
  294. m_numOfEntries++;
  295. m_entryInsertionIndex = newInsertionIndex;
  296. *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
  297. afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
  298. afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
  299. afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
  300. afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
  301. ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
  302. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
  303. }
  304. void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
  305. {
  306. ALOGV("DEBUG(%s):", __FUNCTION__);
  307. int frame_index;
  308. request_manager_entry * currentEntry;
  309. Mutex::Autolock lock(m_requestMutex);
  310. frame_index = GetCompletedIndex();
  311. currentEntry = &(entries[frame_index]);
  312. if (currentEntry->status != COMPLETED) {
  313. CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
  314. m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
  315. return;
  316. }
  317. if (deregistered_request) *deregistered_request = currentEntry->original_request;
  318. m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
  319. currentEntry->status = EMPTY;
  320. currentEntry->original_request = NULL;
  321. memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
  322. currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
  323. currentEntry->output_stream_count = 0;
  324. m_numOfEntries--;
  325. ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
  326. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
  327. CheckCompleted(GetNextIndex(frame_index));
  328. return;
  329. }
  330. bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
  331. camera_metadata_t ** prepared_frame, int afState)
  332. {
  333. ALOGV("DEBUG(%s):", __FUNCTION__);
  334. Mutex::Autolock lock(m_requestMutex);
  335. status_t res = NO_ERROR;
  336. int tempFrameOutputIndex = GetCompletedIndex();
  337. request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]);
  338. ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
  339. m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
  340. if (currentEntry->status != COMPLETED) {
  341. ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
  342. return false;
  343. }
  344. m_entryFrameOutputIndex = tempFrameOutputIndex;
  345. m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
  346. add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
  347. res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
  348. m_tempFrameMetadata);
  349. if (res!=NO_ERROR) {
  350. ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
  351. return false;
  352. }
  353. *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
  354. *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
  355. *prepared_frame = m_tempFrameMetadata;
  356. ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
  357. currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
  358. // Dump();
  359. return true;
  360. }
  361. int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
  362. {
  363. struct camera2_shot_ext * shot_ext;
  364. struct camera2_shot_ext * request_shot;
  365. int targetStreamIndex = 0;
  366. request_manager_entry * newEntry = NULL;
  367. static int count = 0;
  368. Mutex::Autolock lock(m_requestMutex);
  369. if (m_numOfEntries == 0) {
  370. CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
  371. return -1;
  372. }
  373. if ((m_entryProcessingIndex == m_entryInsertionIndex)
  374. && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
  375. ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
  376. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
  377. return -1;
  378. }
  379. int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
  380. ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
  381. newEntry = &(entries[newProcessingIndex]);
  382. request_shot = &(newEntry->internal_shot);
  383. if (newEntry->status != REGISTERED) {
  384. CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
  385. for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
  386. CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
  387. }
  388. return -1;
  389. }
  390. newEntry->status = REQUESTED;
  391. shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
  392. memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
  393. shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
  394. shot_ext->request_sensor = 1;
  395. shot_ext->dis_bypass = 1;
  396. shot_ext->dnr_bypass = 1;
  397. shot_ext->fd_bypass = 1;
  398. shot_ext->setfile = 0;
  399. targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
  400. shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
  401. if (targetStreamIndex & MASK_OUTPUT_SCP)
  402. shot_ext->request_scp = 1;
  403. if (targetStreamIndex & MASK_OUTPUT_SCC)
  404. shot_ext->request_scc = 1;
  405. if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
  406. shot_ext->fd_bypass = 0;
  407. if (count == 0){
  408. shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
  409. } else
  410. shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
  411. count++;
  412. shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
  413. shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
  414. shot_ext->shot.magicNumber = 0x23456789;
  415. shot_ext->shot.ctl.sensor.exposureTime = 0;
  416. shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
  417. shot_ext->shot.ctl.sensor.sensitivity = 0;
  418. shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
  419. shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
  420. shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
  421. m_entryProcessingIndex = newProcessingIndex;
  422. return newProcessingIndex;
  423. }
  424. void RequestManager::NotifyStreamOutput(int frameCnt)
  425. {
  426. int index;
  427. Mutex::Autolock lock(m_requestMutex);
  428. ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
  429. index = FindEntryIndexByFrameCnt(frameCnt);
  430. if (index == -1) {
  431. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  432. return;
  433. }
  434. ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count);
  435. entries[index].output_stream_count--; //TODO : match stream id also
  436. CheckCompleted(index);
  437. }
  438. void RequestManager::CheckCompleted(int index)
  439. {
  440. if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
  441. && (entries[index].output_stream_count <= 0)){
  442. ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
  443. index, entries[index].internal_shot.shot.ctl.request.frameCount );
  444. entries[index].status = COMPLETED;
  445. if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount)
  446. m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
  447. }
  448. }
  449. int RequestManager::GetCompletedIndex()
  450. {
  451. return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
  452. }
  453. void RequestManager::pushSensorQ(int index)
  454. {
  455. Mutex::Autolock lock(m_requestMutex);
  456. m_sensorQ.push_back(index);
  457. }
  458. int RequestManager::popSensorQ()
  459. {
  460. List<int>::iterator sensor_token;
  461. int index;
  462. Mutex::Autolock lock(m_requestMutex);
  463. if(m_sensorQ.size() == 0)
  464. return -1;
  465. sensor_token = m_sensorQ.begin()++;
  466. index = *sensor_token;
  467. m_sensorQ.erase(sensor_token);
  468. return (index);
  469. }
  470. void RequestManager::releaseSensorQ()
  471. {
  472. List<int>::iterator r;
  473. Mutex::Autolock lock(m_requestMutex);
  474. ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
  475. while(m_sensorQ.size() > 0){
  476. r = m_sensorQ.begin()++;
  477. m_sensorQ.erase(r);
  478. }
  479. return;
  480. }
  481. void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
  482. {
  483. int index;
  484. struct camera2_shot_ext * request_shot;
  485. nsecs_t timeStamp;
  486. int i;
  487. Mutex::Autolock lock(m_requestMutex);
  488. ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  489. for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  490. if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
  491. && (entries[i].status == CAPTURED)){
  492. entries[i].status = METADONE;
  493. break;
  494. }
  495. }
  496. if (i == NUM_MAX_REQUEST_MGR_ENTRY){
  497. ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  498. return;
  499. }
  500. request_manager_entry * newEntry = &(entries[i]);
  501. request_shot = &(newEntry->internal_shot);
  502. timeStamp = request_shot->shot.dm.sensor.timeStamp;
  503. memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
  504. request_shot->shot.dm.sensor.timeStamp = timeStamp;
  505. m_lastTimeStamp = timeStamp;
  506. CheckCompleted(i);
  507. }
  508. void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
  509. {
  510. int index, targetStreamIndex;
  511. struct camera2_shot_ext * request_shot;
  512. ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
  513. if (frameCnt < 0)
  514. return;
  515. index = FindEntryIndexByFrameCnt(frameCnt);
  516. if (index == -1) {
  517. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  518. return;
  519. }
  520. request_manager_entry * newEntry = &(entries[index]);
  521. request_shot = &(newEntry->internal_shot);
  522. memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
  523. shot_ext->shot.ctl.request.frameCount = frameCnt;
  524. shot_ext->request_sensor = 1;
  525. shot_ext->dis_bypass = 1;
  526. shot_ext->dnr_bypass = 1;
  527. shot_ext->fd_bypass = 1;
  528. shot_ext->drc_bypass = 1;
  529. shot_ext->setfile = 0;
  530. shot_ext->request_scc = 0;
  531. shot_ext->request_scp = 0;
  532. shot_ext->isReprocessing = request_shot->isReprocessing;
  533. shot_ext->reprocessInput = request_shot->reprocessInput;
  534. shot_ext->shot.ctl.request.outputStreams[0] = 0;
  535. shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
  536. shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
  537. shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
  538. shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
  539. // mapping flash UI mode from aeMode
  540. if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
  541. if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
  542. ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
  543. else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
  544. ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
  545. request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
  546. }
  547. // Apply ae/awb lock or unlock
  548. if (request_shot->ae_lock == AEMODE_LOCK_ON)
  549. request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
  550. if (request_shot->awb_lock == AWBMODE_LOCK_ON)
  551. request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
  552. if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
  553. shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
  554. }
  555. else {
  556. shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
  557. m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
  558. }
  559. if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
  560. shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
  561. }
  562. else {
  563. shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
  564. m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
  565. }
  566. if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
  567. shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
  568. }
  569. else {
  570. shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
  571. m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
  572. }
  573. if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
  574. shot_ext->shot.ctl.aa.aeExpCompensation = 0;
  575. }
  576. else {
  577. shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
  578. m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
  579. }
  580. if (request_shot->shot.ctl.aa.videoStabilizationMode) {
  581. m_vdisBubbleEn = true;
  582. shot_ext->dis_bypass = 0;
  583. shot_ext->dnr_bypass = 0;
  584. } else {
  585. m_vdisBubbleEn = false;
  586. shot_ext->dis_bypass = 1;
  587. shot_ext->dnr_bypass = 1;
  588. }
  589. shot_ext->shot.ctl.aa.afTrigger = 0;
  590. targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
  591. shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
  592. if (targetStreamIndex & MASK_OUTPUT_SCP)
  593. shot_ext->request_scp = 1;
  594. if (targetStreamIndex & MASK_OUTPUT_SCC)
  595. shot_ext->request_scc = 1;
  596. if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
  597. shot_ext->fd_bypass = 0;
  598. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
  599. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
  600. ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
  601. (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
  602. (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
  603. (int)(shot_ext->shot.ctl.aa.afMode));
  604. }
  605. bool RequestManager::IsVdisEnable(void)
  606. {
  607. return m_vdisBubbleEn;
  608. }
  609. int RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
  610. {
  611. for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  612. if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
  613. return i;
  614. }
  615. return -1;
  616. }
  617. void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
  618. {
  619. int index = FindEntryIndexByFrameCnt(frameCnt);
  620. if (index == -1) {
  621. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  622. return;
  623. }
  624. request_manager_entry * currentEntry = &(entries[index]);
  625. if (currentEntry->internal_shot.isReprocessing == 1) {
  626. ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
  627. index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
  628. } else {
  629. currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
  630. ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
  631. index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
  632. }
  633. }
  634. nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt)
  635. {
  636. int index = FindEntryIndexByFrameCnt(frameCnt);
  637. if (index == -1) {
  638. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
  639. return m_lastTimeStamp;
  640. }
  641. else
  642. return GetTimestamp(index);
  643. }
  644. nsecs_t RequestManager::GetTimestamp(int index)
  645. {
  646. Mutex::Autolock lock(m_requestMutex);
  647. if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
  648. ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
  649. return 0;
  650. }
  651. request_manager_entry * currentEntry = &(entries[index]);
  652. nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
  653. if (frameTime == 0) {
  654. ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__);
  655. frameTime = m_lastTimeStamp;
  656. }
  657. ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
  658. return frameTime;
  659. }
  660. uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
  661. {
  662. int index = FindEntryIndexByFrameCnt(frameCnt);
  663. if (index == -1) {
  664. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  665. return 0;
  666. }
  667. else
  668. return GetOutputStream(index);
  669. }
  670. uint8_t RequestManager::GetOutputStream(int index)
  671. {
  672. Mutex::Autolock lock(m_requestMutex);
  673. if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
  674. ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
  675. return 0;
  676. }
  677. request_manager_entry * currentEntry = &(entries[index]);
  678. return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
  679. }
  680. camera2_shot_ext * RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
  681. {
  682. int index = FindEntryIndexByFrameCnt(frameCnt);
  683. if (index == -1) {
  684. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  685. return 0;
  686. }
  687. else
  688. return GetInternalShotExt(index);
  689. }
  690. camera2_shot_ext * RequestManager::GetInternalShotExt(int index)
  691. {
  692. Mutex::Autolock lock(m_requestMutex);
  693. if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
  694. ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
  695. return 0;
  696. }
  697. request_manager_entry * currentEntry = &(entries[index]);
  698. return &currentEntry->internal_shot;
  699. }
  700. int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
  701. {
  702. Mutex::Autolock lock(m_requestMutex);
  703. int i;
  704. if (m_numOfEntries == 0) {
  705. CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
  706. return -1;
  707. }
  708. for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  709. if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
  710. continue;
  711. if (entries[i].status == REQUESTED) {
  712. entries[i].status = CAPTURED;
  713. return entries[i].internal_shot.shot.ctl.request.frameCount;
  714. }
  715. CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
  716. }
  717. CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  718. return -1;
  719. }
  720. void RequestManager::SetInitialSkip(int count)
  721. {
  722. ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
  723. if (count > m_sensorPipelineSkipCnt)
  724. m_sensorPipelineSkipCnt = count;
  725. }
  726. int RequestManager::GetSkipCnt()
  727. {
  728. ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
  729. if (m_sensorPipelineSkipCnt == 0)
  730. return m_sensorPipelineSkipCnt;
  731. else
  732. return --m_sensorPipelineSkipCnt;
  733. }
  734. void RequestManager::Dump(void)
  735. {
  736. int i = 0;
  737. request_manager_entry * currentEntry;
  738. ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
  739. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
  740. for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  741. currentEntry = &(entries[i]);
  742. ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
  743. currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
  744. currentEntry->output_stream_count,
  745. currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
  746. }
  747. }
  748. int RequestManager::GetNextIndex(int index)
  749. {
  750. index++;
  751. if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
  752. index = 0;
  753. return index;
  754. }
  755. int RequestManager::GetPrevIndex(int index)
  756. {
  757. index--;
  758. if (index < 0)
  759. index = NUM_MAX_REQUEST_MGR_ENTRY-1;
  760. return index;
  761. }
  762. ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
  763. m_requestQueueOps(NULL),
  764. m_frameQueueOps(NULL),
  765. m_callbackCookie(NULL),
  766. m_numOfRemainingReqInSvc(0),
  767. m_isRequestQueuePending(false),
  768. m_isRequestQueueNull(true),
  769. m_isIspStarted(false),
  770. m_ionCameraClient(0),
  771. m_zoomRatio(1),
  772. m_scp_closing(false),
  773. m_scp_closed(false),
  774. m_afState(HAL_AFSTATE_INACTIVE),
  775. m_afMode(NO_CHANGE),
  776. m_afMode2(NO_CHANGE),
  777. m_vdisBubbleCnt(0),
  778. m_vdisDupFrame(0),
  779. m_IsAfModeUpdateRequired(false),
  780. m_IsAfTriggerRequired(false),
  781. m_IsAfLockRequired(false),
  782. m_sccLocalBufferValid(false),
  783. m_wideAspect(false),
  784. m_scpOutputSignalCnt(0),
  785. m_scpOutputImageCnt(0),
  786. m_afTriggerId(0),
  787. m_afPendingTriggerId(0),
  788. m_afModeWaitingCnt(0),
  789. m_jpegEncodingCount(0),
  790. m_scpForceSuspended(false),
  791. m_halDevice(dev),
  792. m_nightCaptureCnt(0),
  793. m_nightCaptureFrameCnt(0),
  794. m_lastSceneMode(0),
  795. m_cameraId(cameraId),
  796. m_thumbNailW(160),
  797. m_thumbNailH(120)
  798. {
  799. ALOGD("(%s): ENTER", __FUNCTION__);
  800. int ret = 0;
  801. int res = 0;
  802. m_exynosPictureCSC = NULL;
  803. m_exynosVideoCSC = NULL;
  804. if (!m_grallocHal) {
  805. ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
  806. if (ret)
  807. ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
  808. }
  809. m_camera2 = camera;
  810. m_ionCameraClient = createIonClient(m_ionCameraClient);
  811. if(m_ionCameraClient == 0)
  812. ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
  813. m_BayerManager = new BayerBufManager();
  814. m_mainThread = new MainThread(this);
  815. m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
  816. *openInvalid = InitializeISPChain();
  817. if (*openInvalid < 0) {
  818. ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
  819. // clean process
  820. // 1. close video nodes
  821. // SCP
  822. res = exynos_v4l2_close(m_camera_info.scp.fd);
  823. if (res != NO_ERROR ) {
  824. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  825. }
  826. // SCC
  827. res = exynos_v4l2_close(m_camera_info.capture.fd);
  828. if (res != NO_ERROR ) {
  829. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  830. }
  831. // Sensor
  832. res = exynos_v4l2_close(m_camera_info.sensor.fd);
  833. if (res != NO_ERROR ) {
  834. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  835. }
  836. // ISP
  837. res = exynos_v4l2_close(m_camera_info.isp.fd);
  838. if (res != NO_ERROR ) {
  839. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  840. }
  841. } else {
  842. m_sensorThread = new SensorThread(this);
  843. m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
  844. m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
  845. ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
  846. for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
  847. m_subStreams[i].type = SUBSTREAM_TYPE_NONE;
  848. CSC_METHOD cscMethod = CSC_METHOD_HW;
  849. m_exynosPictureCSC = csc_init(cscMethod);
  850. if (m_exynosPictureCSC == NULL)
  851. ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
  852. csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
  853. m_exynosVideoCSC = csc_init(cscMethod);
  854. if (m_exynosVideoCSC == NULL)
  855. ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
  856. csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
  857. m_setExifFixedAttribute();
  858. // contol information clear
  859. // flash
  860. m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
  861. m_ctlInfo.flash.m_afFlashDoneFlg= false;
  862. m_ctlInfo.flash.m_flashEnableFlg = false;
  863. m_ctlInfo.flash.m_flashFrameCount = 0;
  864. m_ctlInfo.flash.m_flashCnt = 0;
  865. m_ctlInfo.flash.m_flashTimeOut = 0;
  866. m_ctlInfo.flash.m_flashDecisionResult = false;
  867. m_ctlInfo.flash.m_flashTorchMode = false;
  868. m_ctlInfo.flash.m_precaptureState = 0;
  869. m_ctlInfo.flash.m_precaptureTriggerId = 0;
  870. // ae
  871. m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
  872. // af
  873. m_ctlInfo.af.m_afTriggerTimeOut = 0;
  874. // scene
  875. m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
  876. }
  877. ALOGD("(%s): EXIT", __FUNCTION__);
  878. }
  879. ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
  880. {
  881. ALOGD("(%s): ENTER", __FUNCTION__);
  882. this->release();
  883. ALOGD("(%s): EXIT", __FUNCTION__);
  884. }
  885. void ExynosCameraHWInterface2::release()
  886. {
  887. int i, res;
  888. ALOGD("(HAL2::release): ENTER");
  889. if (m_streamThreads[1] != NULL) {
  890. m_streamThreads[1]->release();
  891. m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
  892. }
  893. if (m_streamThreads[0] != NULL) {
  894. m_streamThreads[0]->release();
  895. m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
  896. }
  897. if (m_sensorThread != NULL) {
  898. m_sensorThread->release();
  899. }
  900. if (m_mainThread != NULL) {
  901. m_mainThread->release();
  902. }
  903. if (m_exynosPictureCSC)
  904. csc_deinit(m_exynosPictureCSC);
  905. m_exynosPictureCSC = NULL;
  906. if (m_exynosVideoCSC)
  907. csc_deinit(m_exynosVideoCSC);
  908. m_exynosVideoCSC = NULL;
  909. if (m_streamThreads[1] != NULL) {
  910. ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
  911. while (!m_streamThreads[1]->IsTerminated())
  912. usleep(SIG_WAITING_TICK);
  913. ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination");
  914. m_streamThreads[1] = NULL;
  915. }
  916. if (m_streamThreads[0] != NULL) {
  917. ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
  918. while (!m_streamThreads[0]->IsTerminated())
  919. usleep(SIG_WAITING_TICK);
  920. ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination");
  921. m_streamThreads[0] = NULL;
  922. }
  923. if (m_sensorThread != NULL) {
  924. ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
  925. while (!m_sensorThread->IsTerminated())
  926. usleep(SIG_WAITING_TICK);
  927. ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination");
  928. m_sensorThread = NULL;
  929. }
  930. if (m_mainThread != NULL) {
  931. ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
  932. while (!m_mainThread->IsTerminated())
  933. usleep(SIG_WAITING_TICK);
  934. ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination");
  935. m_mainThread = NULL;
  936. }
  937. if (m_requestManager != NULL) {
  938. delete m_requestManager;
  939. m_requestManager = NULL;
  940. }
  941. if (m_BayerManager != NULL) {
  942. delete m_BayerManager;
  943. m_BayerManager = NULL;
  944. }
  945. for (i = 0; i < NUM_BAYER_BUFFERS; i++)
  946. freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
  947. if (m_sccLocalBufferValid) {
  948. for (i = 0; i < NUM_SCC_BUFFERS; i++)
  949. #ifdef ENABLE_FRAME_SYNC
  950. freeCameraMemory(&m_sccLocalBuffer[i], 2);
  951. #else
  952. freeCameraMemory(&m_sccLocalBuffer[i], 1);
  953. #endif
  954. }
  955. else {
  956. for (i = 0; i < NUM_SCC_BUFFERS; i++)
  957. freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
  958. }
  959. ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
  960. res = exynos_v4l2_close(m_camera_info.sensor.fd);
  961. if (res != NO_ERROR ) {
  962. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  963. }
  964. ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
  965. res = exynos_v4l2_close(m_camera_info.isp.fd);
  966. if (res != NO_ERROR ) {
  967. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  968. }
  969. ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
  970. res = exynos_v4l2_close(m_camera_info.capture.fd);
  971. if (res != NO_ERROR ) {
  972. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  973. }
  974. ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
  975. res = exynos_v4l2_close(m_camera_info.scp.fd);
  976. if (res != NO_ERROR ) {
  977. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  978. }
  979. ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
  980. deleteIonClient(m_ionCameraClient);
  981. ALOGD("(HAL2::release): EXIT");
  982. }
  983. int ExynosCameraHWInterface2::InitializeISPChain()
  984. {
  985. char node_name[30];
  986. int fd = 0;
  987. int i;
  988. int ret = 0;
  989. /* Open Sensor */
  990. memset(&node_name, 0x00, sizeof(char[30]));
  991. sprintf(node_name, "%s%d", NODE_PREFIX, 40);
  992. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  993. if (fd < 0) {
  994. ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  995. }
  996. else {
  997. ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  998. }
  999. m_camera_info.sensor.fd = fd;
  1000. /* Open ISP */
  1001. memset(&node_name, 0x00, sizeof(char[30]));
  1002. sprintf(node_name, "%s%d", NODE_PREFIX, 41);
  1003. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  1004. if (fd < 0) {
  1005. ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  1006. }
  1007. else {
  1008. ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  1009. }
  1010. m_camera_info.isp.fd = fd;
  1011. /* Open ScalerC */
  1012. memset(&node_name, 0x00, sizeof(char[30]));
  1013. sprintf(node_name, "%s%d", NODE_PREFIX, 42);
  1014. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  1015. if (fd < 0) {
  1016. ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  1017. }
  1018. else {
  1019. ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  1020. }
  1021. m_camera_info.capture.fd = fd;
  1022. /* Open ScalerP */
  1023. memset(&node_name, 0x00, sizeof(char[30]));
  1024. sprintf(node_name, "%s%d", NODE_PREFIX, 44);
  1025. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  1026. if (fd < 0) {
  1027. ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  1028. }
  1029. else {
  1030. ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  1031. }
  1032. m_camera_info.scp.fd = fd;
  1033. if(m_cameraId == 0)
  1034. m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
  1035. else
  1036. m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
  1037. memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
  1038. m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
  1039. m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
  1040. m_camera_info.dummy_shot.dis_bypass = 1;
  1041. m_camera_info.dummy_shot.dnr_bypass = 1;
  1042. m_camera_info.dummy_shot.fd_bypass = 1;
  1043. /*sensor setting*/
  1044. m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
  1045. m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
  1046. m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
  1047. m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
  1048. m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
  1049. /*request setting*/
  1050. m_camera_info.dummy_shot.request_sensor = 1;
  1051. m_camera_info.dummy_shot.request_scc = 0;
  1052. m_camera_info.dummy_shot.request_scp = 0;
  1053. m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
  1054. m_camera_info.sensor.width = m_camera2->getSensorRawW();
  1055. m_camera_info.sensor.height = m_camera2->getSensorRawH();
  1056. m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
  1057. m_camera_info.sensor.planes = 2;
  1058. m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
  1059. m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1060. m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
  1061. for(i = 0; i < m_camera_info.sensor.buffers; i++){
  1062. initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
  1063. m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
  1064. m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
  1065. allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
  1066. }
  1067. m_camera_info.isp.width = m_camera_info.sensor.width;
  1068. m_camera_info.isp.height = m_camera_info.sensor.height;
  1069. m_camera_info.isp.format = m_camera_info.sensor.format;
  1070. m_camera_info.isp.planes = m_camera_info.sensor.planes;
  1071. m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
  1072. m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  1073. m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
  1074. for(i = 0; i < m_camera_info.isp.buffers; i++){
  1075. initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
  1076. m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0];
  1077. m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1];
  1078. m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0];
  1079. m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1];
  1080. m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0];
  1081. m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1];
  1082. };
  1083. /* init ISP */
  1084. ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
  1085. if (ret < 0) {
  1086. ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id);
  1087. return false;
  1088. }
  1089. cam_int_s_fmt(&(m_camera_info.isp));
  1090. ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
  1091. cam_int_reqbufs(&(m_camera_info.isp));
  1092. ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
  1093. ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__);
  1094. /* init Sensor */
  1095. cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
  1096. ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__);
  1097. if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
  1098. ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__);
  1099. }
  1100. ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__);
  1101. cam_int_reqbufs(&(m_camera_info.sensor));
  1102. ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__);
  1103. for (i = 0; i < m_camera_info.sensor.buffers; i++) {
  1104. ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
  1105. m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
  1106. m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
  1107. memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
  1108. sizeof(struct camera2_shot_ext));
  1109. }
  1110. for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
  1111. cam_int_qbuf(&(m_camera_info.sensor), i);
  1112. for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
  1113. m_requestManager->pushSensorQ(i);
  1114. ALOGV("== stream_on :: sensor");
  1115. cam_int_streamon(&(m_camera_info.sensor));
  1116. m_camera_info.sensor.status = true;
  1117. /* init Capture */
  1118. m_camera_info.capture.width = m_camera2->getSensorW();
  1119. m_camera_info.capture.height = m_camera2->getSensorH();
  1120. m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
  1121. #ifdef ENABLE_FRAME_SYNC
  1122. m_camera_info.capture.planes = 2;
  1123. #else
  1124. m_camera_info.capture.planes = 1;
  1125. #endif
  1126. m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
  1127. m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1128. m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
  1129. m_camera_info.capture.status = false;
  1130. return true;
  1131. }
  1132. void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
  1133. {
  1134. ALOGV("(%s)", __FUNCTION__);
  1135. StreamThread *AllocatedStream;
  1136. stream_parameters_t newParameters;
  1137. uint32_t format_actual;
  1138. if (!threadExists) {
  1139. m_streamThreads[1] = new StreamThread(this, 1);
  1140. }
  1141. AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
  1142. if (!threadExists) {
  1143. AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
  1144. m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
  1145. AllocatedStream->m_numRegisteredStream = 1;
  1146. }
  1147. AllocatedStream->m_index = 1;
  1148. format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
  1149. newParameters.width = m_camera2->getSensorW();
  1150. newParameters.height = m_camera2->getSensorH();
  1151. newParameters.format = format_actual;
  1152. newParameters.streamOps = NULL;
  1153. newParameters.numHwBuffers = NUM_SCC_BUFFERS;
  1154. #ifdef ENABLE_FRAME_SYNC
  1155. newParameters.planes = 2;
  1156. #else
  1157. newParameters.planes = 1;
  1158. #endif
  1159. newParameters.numSvcBufsInHal = 0;
  1160. newParameters.node = &m_camera_info.capture;
  1161. AllocatedStream->streamType = STREAM_TYPE_INDIRECT;
  1162. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
  1163. if (!threadExists) {
  1164. if (!m_sccLocalBufferValid) {
  1165. for (int i = 0; i < m_camera_info.capture.buffers; i++){
  1166. initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
  1167. m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
  1168. #ifdef ENABLE_FRAME_SYNC
  1169. m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
  1170. allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
  1171. #else
  1172. allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
  1173. #endif
  1174. m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
  1175. }
  1176. m_sccLocalBufferValid = true;
  1177. }
  1178. } else {
  1179. if (m_sccLocalBufferValid) {
  1180. for (int i = 0; i < m_camera_info.capture.buffers; i++)
  1181. m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
  1182. } else {
  1183. ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
  1184. }
  1185. }
  1186. cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
  1187. m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
  1188. cam_int_s_fmt(newParameters.node);
  1189. ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
  1190. cam_int_reqbufs(newParameters.node);
  1191. ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
  1192. for (int i = 0; i < newParameters.node->buffers; i++) {
  1193. ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
  1194. cam_int_qbuf(newParameters.node, i);
  1195. newParameters.svcBufStatus[i] = ON_DRIVER;
  1196. }
  1197. ALOGV("== stream_on :: capture");
  1198. if (cam_int_streamon(newParameters.node) < 0) {
  1199. ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
  1200. } else {
  1201. m_camera_info.capture.status = true;
  1202. }
  1203. AllocatedStream->setParameter(&newParameters);
  1204. AllocatedStream->m_activated = true;
  1205. AllocatedStream->m_isBufferInit = true;
  1206. }
  1207. void ExynosCameraHWInterface2::StartISP()
  1208. {
  1209. ALOGV("== stream_on :: isp");
  1210. cam_int_streamon(&(m_camera_info.isp));
  1211. exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
  1212. }
  1213. int ExynosCameraHWInterface2::getCameraId() const
  1214. {
  1215. return m_cameraId;
  1216. }
  1217. int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
  1218. {
  1219. ALOGV("DEBUG(%s):", __FUNCTION__);
  1220. if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
  1221. && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
  1222. m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
  1223. return 0;
  1224. }
  1225. else {
  1226. ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
  1227. return 1;
  1228. }
  1229. }
  1230. int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
  1231. {
  1232. int i = 0;
  1233. ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
  1234. if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
  1235. ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
  1236. return 0;
  1237. }
  1238. m_isRequestQueueNull = false;
  1239. if (m_requestManager->GetNumEntries() == 0)
  1240. m_requestManager->SetInitialSkip(0);
  1241. if (m_isIspStarted == false) {
  1242. /* isp */
  1243. m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
  1244. m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
  1245. cam_int_s_fmt(&(m_camera_info.isp));
  1246. cam_int_reqbufs(&(m_camera_info.isp));
  1247. /* sensor */
  1248. if (m_camera_info.sensor.status == false) {
  1249. cam_int_s_fmt(&(m_camera_info.sensor));
  1250. cam_int_reqbufs(&(m_camera_info.sensor));
  1251. for (i = 0; i < m_camera_info.sensor.buffers; i++) {
  1252. ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
  1253. m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
  1254. m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
  1255. memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
  1256. sizeof(struct camera2_shot_ext));
  1257. }
  1258. for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
  1259. cam_int_qbuf(&(m_camera_info.sensor), i);
  1260. for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
  1261. m_requestManager->pushSensorQ(i);
  1262. ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
  1263. cam_int_streamon(&(m_camera_info.sensor));
  1264. m_camera_info.sensor.status = true;
  1265. }
  1266. }
  1267. if (!(m_streamThreads[1].get())) {
  1268. ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
  1269. StartSCCThread(false);
  1270. } else {
  1271. if (m_streamThreads[1]->m_activated == false) {
  1272. ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
  1273. StartSCCThread(true);
  1274. } else {
  1275. if (m_camera_info.capture.status == false) {
  1276. m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
  1277. cam_int_s_fmt(&(m_camera_info.capture));
  1278. ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
  1279. cam_int_reqbufs(&(m_camera_info.capture));
  1280. ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
  1281. if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
  1282. StreamThread * targetStream = m_streamThreads[1].get();
  1283. stream_parameters_t *targetStreamParms = &(targetStream->m_parameters);
  1284. node_info_t *currentNode = targetStreamParms->node;
  1285. struct v4l2_buffer v4l2_buf;
  1286. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  1287. for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
  1288. v4l2_buf.m.planes = planes;
  1289. v4l2_buf.type = currentNode->type;
  1290. v4l2_buf.memory = currentNode->memory;
  1291. v4l2_buf.length = currentNode->planes;
  1292. v4l2_buf.index = i;
  1293. ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
  1294. if (i < currentNode->buffers) {
  1295. #ifdef ENABLE_FRAME_SYNC
  1296. v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
  1297. v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
  1298. v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
  1299. v4l2_buf.length += targetStreamParms->metaPlanes;
  1300. v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
  1301. v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
  1302. ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
  1303. #endif
  1304. if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
  1305. ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
  1306. }
  1307. ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
  1308. targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC;
  1309. }
  1310. else {
  1311. targetStreamParms->svcBufStatus[i] = ON_SERVICE;
  1312. }
  1313. }
  1314. } else {
  1315. for (int i = 0; i < m_camera_info.capture.buffers; i++) {
  1316. ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
  1317. cam_int_qbuf(&(m_camera_info.capture), i);
  1318. }
  1319. }
  1320. ALOGV("== stream_on :: capture");
  1321. if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
  1322. ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
  1323. } else {
  1324. m_camera_info.capture.status = true;
  1325. }
  1326. }
  1327. if (m_scpForceSuspended) {
  1328. m_scpForceSuspended = false;
  1329. }
  1330. }
  1331. }
  1332. if (m_isIspStarted == false) {
  1333. StartISP();
  1334. ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
  1335. m_requestManager->SetInitialSkip(6);
  1336. m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
  1337. m_isIspStarted = true;
  1338. }
  1339. m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
  1340. return 0;
  1341. }
  1342. int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
  1343. {
  1344. ALOGV("DEBUG(%s):", __FUNCTION__);
  1345. if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
  1346. && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
  1347. m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
  1348. return 0;
  1349. }
  1350. else {
  1351. ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
  1352. return 1;
  1353. }
  1354. }
  1355. int ExynosCameraHWInterface2::getInProgressCount()
  1356. {
  1357. int inProgressCount = m_requestManager->GetNumEntries();
  1358. ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
  1359. inProgressCount, m_jpegEncodingCount, (inProgressCount + m_jpegEncodingCount));
  1360. return (inProgressCount + m_jpegEncodingCount);
  1361. }
  1362. int ExynosCameraHWInterface2::flushCapturesInProgress()
  1363. {
  1364. return 0;
  1365. }
  1366. int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
  1367. {
  1368. ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
  1369. if (request == NULL) return BAD_VALUE;
  1370. if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
  1371. return BAD_VALUE;
  1372. }
  1373. status_t res;
  1374. // Pass 1, calculate size and allocate
  1375. res = m_camera2->constructDefaultRequest(request_template,
  1376. request,
  1377. true);
  1378. if (res != OK) {
  1379. return res;
  1380. }
  1381. // Pass 2, build request
  1382. res = m_camera2->constructDefaultRequest(request_template,
  1383. request,
  1384. false);
  1385. if (res != OK) {
  1386. ALOGE("Unable to populate new request for template %d",
  1387. request_template);
  1388. }
  1389. return res;
  1390. }
  1391. int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
  1392. uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
  1393. {
  1394. ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format);
  1395. bool useDirectOutput = false;
  1396. StreamThread *AllocatedStream;
  1397. stream_parameters_t newParameters;
  1398. substream_parameters_t *subParameters;
  1399. StreamThread *parentStream;
  1400. status_t res;
  1401. int allocCase = 0;
  1402. if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) &&
  1403. m_camera2->isSupportedResolution(width, height)) {
  1404. if (!(m_streamThreads[0].get())) {
  1405. ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
  1406. allocCase = 0;
  1407. }
  1408. else {
  1409. if ((m_streamThreads[0].get())->m_activated == true) {
  1410. ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
  1411. allocCase = 1;
  1412. }
  1413. else {
  1414. ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
  1415. allocCase = 2;
  1416. }
  1417. }
  1418. // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
  1419. if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
  1420. || (width == 720 && height == 480) || (width == 1440 && height == 960)
  1421. || (width == 1344 && height == 896)) {
  1422. m_wideAspect = true;
  1423. } else {
  1424. m_wideAspect = false;
  1425. }
  1426. ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
  1427. if (allocCase == 0 || allocCase == 2) {
  1428. *stream_id = STREAM_ID_PREVIEW;
  1429. m_streamThreads[0] = new StreamThread(this, *stream_id);
  1430. AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
  1431. AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
  1432. m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
  1433. *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
  1434. *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
  1435. *max_buffers = 6;
  1436. newParameters.width = width;
  1437. newParameters.height = height;
  1438. newParameters.format = *format_actual;
  1439. newParameters.streamOps = stream_ops;
  1440. newParameters.usage = *usage;
  1441. newParameters.numHwBuffers = NUM_SCP_BUFFERS;
  1442. newParameters.numOwnSvcBuffers = *max_buffers;
  1443. newParameters.planes = NUM_PLANES(*format_actual);
  1444. newParameters.metaPlanes = 1;
  1445. newParameters.numSvcBufsInHal = 0;
  1446. newParameters.minUndequedBuffer = 3;
  1447. newParameters.needsIonMap = true;
  1448. newParameters.node = &m_camera_info.scp;
  1449. newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1450. newParameters.node->memory = V4L2_MEMORY_DMABUF;
  1451. AllocatedStream->streamType = STREAM_TYPE_DIRECT;
  1452. AllocatedStream->m_index = 0;
  1453. AllocatedStream->setParameter(&newParameters);
  1454. AllocatedStream->m_activated = true;
  1455. AllocatedStream->m_numRegisteredStream = 1;
  1456. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
  1457. m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
  1458. m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
  1459. if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
  1460. AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
  1461. if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
  1462. AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
  1463. return 0;
  1464. } else if (allocCase == 1) {
  1465. *stream_id = STREAM_ID_RECORD;
  1466. subParameters = &m_subStreams[STREAM_ID_RECORD];
  1467. memset(subParameters, 0, sizeof(substream_parameters_t));
  1468. parentStream = (StreamThread*)(m_streamThreads[0].get());
  1469. if (!parentStream) {
  1470. return 1;
  1471. }
  1472. *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
  1473. *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
  1474. *max_buffers = 6;
  1475. subParameters->type = SUBSTREAM_TYPE_RECORD;
  1476. subParameters->width = width;
  1477. subParameters->height = height;
  1478. subParameters->format = *format_actual;
  1479. subParameters->svcPlanes = NUM_PLANES(*format_actual);
  1480. subParameters->streamOps = stream_ops;
  1481. subParameters->usage = *usage;
  1482. subParameters->numOwnSvcBuffers = *max_buffers;
  1483. subParameters->numSvcBufsInHal = 0;
  1484. subParameters->needBufferInit = false;
  1485. subParameters->minUndequedBuffer = 2;
  1486. res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
  1487. if (res != NO_ERROR) {
  1488. ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
  1489. return 1;
  1490. }
  1491. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
  1492. ALOGV("(%s): Enabling Record", __FUNCTION__);
  1493. return 0;
  1494. }
  1495. }
  1496. else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
  1497. && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
  1498. if (!(m_streamThreads[1].get())) {
  1499. ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
  1500. useDirectOutput = true;
  1501. }
  1502. else {
  1503. ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
  1504. useDirectOutput = false;
  1505. }
  1506. if (useDirectOutput) {
  1507. *stream_id = STREAM_ID_ZSL;
  1508. m_streamThreads[1] = new StreamThread(this, *stream_id);
  1509. AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
  1510. AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
  1511. m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
  1512. *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
  1513. *max_buffers = 6;
  1514. *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
  1515. *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
  1516. *max_buffers = 6;
  1517. newParameters.width = width;
  1518. newParameters.height = height;
  1519. newParameters.format = *format_actual;
  1520. newParameters.streamOps = stream_ops;
  1521. newParameters.usage = *usage;
  1522. newParameters.numHwBuffers = NUM_SCC_BUFFERS;
  1523. newParameters.numOwnSvcBuffers = *max_buffers;
  1524. newParameters.planes = NUM_PLANES(*format_actual);
  1525. newParameters.metaPlanes = 1;
  1526. newParameters.numSvcBufsInHal = 0;
  1527. newParameters.minUndequedBuffer = 2;
  1528. newParameters.needsIonMap = false;
  1529. newParameters.node = &m_camera_info.capture;
  1530. newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1531. newParameters.node->memory = V4L2_MEMORY_DMABUF;
  1532. AllocatedStream->streamType = STREAM_TYPE_DIRECT;
  1533. AllocatedStream->m_index = 1;
  1534. AllocatedStream->setParameter(&newParameters);
  1535. AllocatedStream->m_activated = true;
  1536. AllocatedStream->m_numRegisteredStream = 1;
  1537. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
  1538. return 0;
  1539. } else {
  1540. bool bJpegExists = false;
  1541. AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
  1542. subParameters = &m_subStreams[STREAM_ID_JPEG];
  1543. if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
  1544. ALOGD("(%s): jpeg stream exists", __FUNCTION__);
  1545. bJpegExists = true;
  1546. AllocatedStream->detachSubStream(STREAM_ID_JPEG);
  1547. }
  1548. AllocatedStream->m_releasing = true;
  1549. ALOGD("START stream thread 1 release %d", __LINE__);
  1550. do {
  1551. AllocatedStream->release();
  1552. usleep(SIG_WAITING_TICK);
  1553. } while (AllocatedStream->m_releasing);
  1554. ALOGD("END stream thread 1 release %d", __LINE__);
  1555. *stream_id = STREAM_ID_ZSL;
  1556. m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
  1557. *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
  1558. *max_buffers = 6;
  1559. *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
  1560. *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
  1561. *max_buffers = 6;
  1562. newParameters.width = width;
  1563. newParameters.height = height;
  1564. newParameters.format = *format_actual;
  1565. newParameters.streamOps = stream_ops;
  1566. newParameters.usage = *usage;
  1567. newParameters.numHwBuffers = NUM_SCC_BUFFERS;
  1568. newParameters.numOwnSvcBuffers = *max_buffers;
  1569. newParameters.planes = NUM_PLANES(*format_actual);
  1570. newParameters.metaPlanes = 1;
  1571. newParameters.numSvcBufsInHal = 0;
  1572. newParameters.minUndequedBuffer = 2;
  1573. newParameters.needsIonMap = false;
  1574. newParameters.node = &m_camera_info.capture;
  1575. newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1576. newParameters.node->memory = V4L2_MEMORY_DMABUF;
  1577. AllocatedStream->streamType = STREAM_TYPE_DIRECT;
  1578. AllocatedStream->m_index = 1;
  1579. AllocatedStream->setParameter(&newParameters);
  1580. AllocatedStream->m_activated = true;
  1581. AllocatedStream->m_numRegisteredStream = 1;
  1582. if (bJpegExists) {
  1583. AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
  1584. }
  1585. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
  1586. return 0;
  1587. }
  1588. }
  1589. else if (format == HAL_PIXEL_FORMAT_BLOB
  1590. && m_camera2->isSupportedJpegResolution(width, height)) {
  1591. *stream_id = STREAM_ID_JPEG;
  1592. subParameters = &m_subStreams[*stream_id];
  1593. memset(subParameters, 0, sizeof(substream_parameters_t));
  1594. if (!(m_streamThreads[1].get())) {
  1595. ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
  1596. StartSCCThread(false);
  1597. }
  1598. else if (m_streamThreads[1]->m_activated == false) {
  1599. ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
  1600. StartSCCThread(true);
  1601. }
  1602. parentStream = (StreamThread*)(m_streamThreads[1].get());
  1603. *format_actual = HAL_PIXEL_FORMAT_BLOB;
  1604. *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
  1605. *max_buffers = 4;
  1606. subParameters->type = SUBSTREAM_TYPE_JPEG;
  1607. subParameters->width = width;
  1608. subParameters->height = height;
  1609. subParameters->format = *format_actual;
  1610. subParameters->svcPlanes = 1;
  1611. subParameters->streamOps = stream_ops;
  1612. subParameters->usage = *usage;
  1613. subParameters->numOwnSvcBuffers = *max_buffers;
  1614. subParameters->numSvcBufsInHal = 0;
  1615. subParameters->needBufferInit = false;
  1616. subParameters->minUndequedBuffer = 2;
  1617. res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
  1618. if (res != NO_ERROR) {
  1619. ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
  1620. return 1;
  1621. }
  1622. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
  1623. ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
  1624. return 0;
  1625. }
  1626. else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
  1627. *stream_id = STREAM_ID_PRVCB;
  1628. subParameters = &m_subStreams[STREAM_ID_PRVCB];
  1629. memset(subParameters, 0, sizeof(substream_parameters_t));
  1630. parentStream = (StreamThread*)(m_streamThreads[0].get());
  1631. if (!parentStream) {
  1632. return 1;
  1633. }
  1634. *format_actual = format;
  1635. *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
  1636. *max_buffers = 6;
  1637. subParameters->type = SUBSTREAM_TYPE_PRVCB;
  1638. subParameters->width = width;
  1639. subParameters->height = height;
  1640. subParameters->format = *format_actual;
  1641. subParameters->svcPlanes = NUM_PLANES(*format_actual);
  1642. subParameters->streamOps = stream_ops;
  1643. subParameters->usage = *usage;
  1644. subParameters->numOwnSvcBuffers = *max_buffers;
  1645. subParameters->numSvcBufsInHal = 0;
  1646. subParameters->needBufferInit = false;
  1647. subParameters->minUndequedBuffer = 2;
  1648. if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
  1649. subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
  1650. subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
  1651. }
  1652. else {
  1653. subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
  1654. subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
  1655. }
  1656. res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
  1657. if (res != NO_ERROR) {
  1658. ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
  1659. return 1;
  1660. }
  1661. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
  1662. ALOGV("(%s): Enabling previewcb", __FUNCTION__);
  1663. return 0;
  1664. }
  1665. ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
  1666. return 1;
  1667. }
  1668. int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
  1669. int num_buffers, buffer_handle_t *registeringBuffers)
  1670. {
  1671. int i,j;
  1672. void *virtAddr[3];
  1673. int plane_index = 0;
  1674. StreamThread * targetStream;
  1675. stream_parameters_t *targetStreamParms;
  1676. node_info_t *currentNode;
  1677. struct v4l2_buffer v4l2_buf;
  1678. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  1679. ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
  1680. stream_id, num_buffers, (uint32_t)registeringBuffers);
  1681. if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
  1682. targetStream = m_streamThreads[0].get();
  1683. targetStreamParms = &(m_streamThreads[0]->m_parameters);
  1684. }
  1685. else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
  1686. substream_parameters_t *targetParms;
  1687. targetParms = &m_subStreams[stream_id];
  1688. targetParms->numSvcBuffers = num_buffers;
  1689. for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
  1690. ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
  1691. i, stream_id, (uint32_t)(registeringBuffers[i]));
  1692. if (m_grallocHal) {
  1693. if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
  1694. targetParms->usage, 0, 0,
  1695. targetParms->width, targetParms->height, virtAddr) != 0) {
  1696. ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
  1697. }
  1698. else {
  1699. ExynosBuffer currentBuf;
  1700. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
  1701. if (targetParms->svcPlanes == 1) {
  1702. currentBuf.fd.extFd[0] = priv_handle->fd;
  1703. currentBuf.size.extS[0] = priv_handle->size;
  1704. currentBuf.size.extS[1] = 0;
  1705. currentBuf.size.extS[2] = 0;
  1706. } else if (targetParms->svcPlanes == 2) {
  1707. currentBuf.fd.extFd[0] = priv_handle->fd;
  1708. currentBuf.fd.extFd[1] = priv_handle->fd1;
  1709. } else if (targetParms->svcPlanes == 3) {
  1710. currentBuf.fd.extFd[0] = priv_handle->fd;
  1711. currentBuf.fd.extFd[1] = priv_handle->fd1;
  1712. currentBuf.fd.extFd[2] = priv_handle->fd2;
  1713. }
  1714. for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
  1715. currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
  1716. CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
  1717. __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
  1718. (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
  1719. }
  1720. targetParms->svcBufStatus[i] = ON_SERVICE;
  1721. targetParms->svcBuffers[i] = currentBuf;
  1722. targetParms->svcBufHandle[i] = registeringBuffers[i];
  1723. }
  1724. }
  1725. }
  1726. targetParms->needBufferInit = true;
  1727. return 0;
  1728. }
  1729. else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
  1730. targetStream = m_streamThreads[1].get();
  1731. targetStreamParms = &(m_streamThreads[1]->m_parameters);
  1732. }
  1733. else {
  1734. ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
  1735. return 1;
  1736. }
  1737. if (targetStream->streamType == STREAM_TYPE_DIRECT) {
  1738. if (num_buffers < targetStreamParms->numHwBuffers) {
  1739. ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
  1740. __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
  1741. return 1;
  1742. }
  1743. }
  1744. CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
  1745. __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
  1746. targetStreamParms->height, targetStreamParms->planes);
  1747. targetStreamParms->numSvcBuffers = num_buffers;
  1748. currentNode = targetStreamParms->node;
  1749. currentNode->width = targetStreamParms->width;
  1750. currentNode->height = targetStreamParms->height;
  1751. currentNode->format = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
  1752. currentNode->planes = targetStreamParms->planes;
  1753. currentNode->buffers = targetStreamParms->numHwBuffers;
  1754. cam_int_s_input(currentNode, m_camera_info.sensor_id);
  1755. cam_int_s_fmt(currentNode);
  1756. cam_int_reqbufs(currentNode);
  1757. for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
  1758. ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
  1759. i, (uint32_t)(registeringBuffers[i]));
  1760. v4l2_buf.m.planes = planes;
  1761. v4l2_buf.type = currentNode->type;
  1762. v4l2_buf.memory = currentNode->memory;
  1763. v4l2_buf.index = i;
  1764. v4l2_buf.length = currentNode->planes;
  1765. ExynosBuffer currentBuf;
  1766. ExynosBuffer metaBuf;
  1767. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
  1768. m_getAlignedYUVSize(currentNode->format,
  1769. currentNode->width, currentNode->height, &currentBuf);
  1770. ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
  1771. if (currentNode->planes == 1) {
  1772. v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
  1773. currentBuf.fd.extFd[0] = priv_handle->fd;
  1774. currentBuf.size.extS[0] = priv_handle->size;
  1775. currentBuf.size.extS[1] = 0;
  1776. currentBuf.size.extS[2] = 0;
  1777. } else if (currentNode->planes == 2) {
  1778. v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
  1779. v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
  1780. currentBuf.fd.extFd[0] = priv_handle->fd;
  1781. currentBuf.fd.extFd[1] = priv_handle->fd1;
  1782. } else if (currentNode->planes == 3) {
  1783. v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
  1784. v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
  1785. v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
  1786. currentBuf.fd.extFd[0] = priv_handle->fd;
  1787. currentBuf.fd.extFd[2] = priv_handle->fd1;
  1788. currentBuf.fd.extFd[1] = priv_handle->fd2;
  1789. }
  1790. for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
  1791. if (targetStreamParms->needsIonMap)
  1792. currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
  1793. v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index];
  1794. ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
  1795. __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
  1796. (unsigned int)currentBuf.virt.extP[plane_index],
  1797. v4l2_buf.m.planes[plane_index].length);
  1798. }
  1799. if (i < currentNode->buffers) {
  1800. #ifdef ENABLE_FRAME_SYNC
  1801. /* add plane for metadata*/
  1802. metaBuf.size.extS[0] = 4*1024;
  1803. allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
  1804. v4l2_buf.length += targetStreamParms->metaPlanes;
  1805. v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
  1806. v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
  1807. ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
  1808. #endif
  1809. if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
  1810. ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
  1811. __FUNCTION__, stream_id, currentNode->fd);
  1812. }
  1813. ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
  1814. __FUNCTION__, stream_id, currentNode->fd);
  1815. targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC;
  1816. }
  1817. else {
  1818. targetStreamParms->svcBufStatus[i] = ON_SERVICE;
  1819. }
  1820. targetStreamParms->svcBuffers[i] = currentBuf;
  1821. targetStreamParms->metaBuffers[i] = metaBuf;
  1822. targetStreamParms->svcBufHandle[i] = registeringBuffers[i];
  1823. }
  1824. ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__, stream_id);
  1825. cam_int_streamon(targetStreamParms->node);
  1826. ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__);
  1827. currentNode->status = true;
  1828. ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
  1829. return 0;
  1830. }
  1831. int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
  1832. {
  1833. StreamThread *targetStream;
  1834. status_t res = NO_ERROR;
  1835. ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
  1836. bool releasingScpMain = false;
  1837. if (stream_id == STREAM_ID_PREVIEW) {
  1838. targetStream = (StreamThread*)(m_streamThreads[0].get());
  1839. if (!targetStream) {
  1840. ALOGW("(%s): Stream Not Exists", __FUNCTION__);
  1841. return NO_ERROR;
  1842. }
  1843. targetStream->m_numRegisteredStream--;
  1844. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
  1845. releasingScpMain = true;
  1846. if (targetStream->m_parameters.needsIonMap) {
  1847. for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
  1848. for (int j = 0; j < targetStream->m_parameters.planes; j++) {
  1849. ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
  1850. targetStream->m_parameters.svcBuffers[i].size.extS[j]);
  1851. ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
  1852. targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
  1853. }
  1854. }
  1855. }
  1856. } else if (stream_id == STREAM_ID_JPEG) {
  1857. if (m_resizeBuf.size.s != 0) {
  1858. freeCameraMemory(&m_resizeBuf, 1);
  1859. }
  1860. memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
  1861. targetStream = (StreamThread*)(m_streamThreads[1].get());
  1862. if (!targetStream) {
  1863. ALOGW("(%s): Stream Not Exists", __FUNCTION__);
  1864. return NO_ERROR;
  1865. }
  1866. if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
  1867. ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
  1868. return 1;
  1869. }
  1870. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
  1871. return 0;
  1872. } else if (stream_id == STREAM_ID_RECORD) {
  1873. memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
  1874. targetStream = (StreamThread*)(m_streamThreads[0].get());
  1875. if (!targetStream) {
  1876. ALOGW("(%s): Stream Not Exists", __FUNCTION__);
  1877. return NO_ERROR;
  1878. }
  1879. if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
  1880. ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
  1881. return 1;
  1882. }
  1883. if (targetStream->m_numRegisteredStream != 0)
  1884. return 0;
  1885. } else if (stream_id == STREAM_ID_PRVCB) {
  1886. if (m_previewCbBuf.size.s != 0) {
  1887. freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
  1888. }
  1889. memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
  1890. targetStream = (StreamThread*)(m_streamThreads[0].get());
  1891. if (!targetStream) {
  1892. ALOGW("(%s): Stream Not Exists", __FUNCTION__);
  1893. return NO_ERROR;
  1894. }
  1895. if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
  1896. ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
  1897. return 1;
  1898. }
  1899. if (targetStream->m_numRegisteredStream != 0)
  1900. return 0;
  1901. } else if (stream_id == STREAM_ID_ZSL) {
  1902. targetStream = (StreamThread*)(m_streamThreads[1].get());
  1903. if (!targetStream) {
  1904. ALOGW("(%s): Stream Not Exists", __FUNCTION__);
  1905. return NO_ERROR;
  1906. }
  1907. targetStream->m_numRegisteredStream--;
  1908. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
  1909. if (targetStream->m_parameters.needsIonMap) {
  1910. for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
  1911. for (int j = 0; j < targetStream->m_parameters.planes; j++) {
  1912. ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
  1913. targetStream->m_parameters.svcBuffers[i].size.extS[j]);
  1914. ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
  1915. targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
  1916. }
  1917. }
  1918. }
  1919. } else {
  1920. ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
  1921. return 1;
  1922. }
  1923. if (m_sensorThread != NULL && releasingScpMain) {
  1924. m_sensorThread->release();
  1925. ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
  1926. while (!m_sensorThread->IsTerminated())
  1927. usleep(SIG_WAITING_TICK);
  1928. ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__);
  1929. }
  1930. if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
  1931. ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
  1932. targetStream = (StreamThread*)(m_streamThreads[1].get());
  1933. targetStream->m_releasing = true;
  1934. ALOGD("START stream thread release %d", __LINE__);
  1935. do {
  1936. targetStream->release();
  1937. usleep(SIG_WAITING_TICK);
  1938. } while (targetStream->m_releasing);
  1939. m_camera_info.capture.status = false;
  1940. ALOGD("END stream thread release %d", __LINE__);
  1941. }
  1942. if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
  1943. ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
  1944. targetStream = (StreamThread*)(m_streamThreads[0].get());
  1945. targetStream->m_releasing = true;
  1946. ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
  1947. do {
  1948. targetStream->release();
  1949. usleep(SIG_WAITING_TICK);
  1950. } while (targetStream->m_releasing);
  1951. ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
  1952. targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
  1953. if (targetStream != NULL) {
  1954. ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
  1955. while (!targetStream->IsTerminated())
  1956. usleep(SIG_WAITING_TICK);
  1957. ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__);
  1958. m_streamThreads[0] = NULL;
  1959. }
  1960. if (m_camera_info.capture.status == true) {
  1961. m_scpForceSuspended = true;
  1962. }
  1963. m_isIspStarted = false;
  1964. }
  1965. ALOGV("(%s): END", __FUNCTION__);
  1966. return 0;
  1967. }
  1968. int ExynosCameraHWInterface2::allocateReprocessStream(
  1969. uint32_t width, uint32_t height, uint32_t format,
  1970. const camera2_stream_in_ops_t *reprocess_stream_ops,
  1971. uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
  1972. {
  1973. ALOGV("DEBUG(%s):", __FUNCTION__);
  1974. return 0;
  1975. }
  1976. int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
  1977. uint32_t output_stream_id,
  1978. const camera2_stream_in_ops_t *reprocess_stream_ops,
  1979. // outputs
  1980. uint32_t *stream_id)
  1981. {
  1982. ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
  1983. *stream_id = STREAM_ID_JPEG_REPROCESS;
  1984. m_reprocessStreamId = *stream_id;
  1985. m_reprocessOps = reprocess_stream_ops;
  1986. m_reprocessOutputStreamId = output_stream_id;
  1987. return 0;
  1988. }
  1989. int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
  1990. {
  1991. ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
  1992. if (stream_id == STREAM_ID_JPEG_REPROCESS) {
  1993. m_reprocessStreamId = 0;
  1994. m_reprocessOps = NULL;
  1995. m_reprocessOutputStreamId = 0;
  1996. return 0;
  1997. }
  1998. return 1;
  1999. }
  2000. int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
  2001. {
  2002. Mutex::Autolock lock(m_afModeTriggerLock);
  2003. ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
  2004. switch (trigger_id) {
  2005. case CAMERA2_TRIGGER_AUTOFOCUS:
  2006. ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
  2007. OnAfTrigger(ext1);
  2008. break;
  2009. case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
  2010. ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
  2011. OnAfCancel(ext1);
  2012. break;
  2013. case CAMERA2_TRIGGER_PRECAPTURE_METERING:
  2014. ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
  2015. OnPrecaptureMeteringTriggerStart(ext1);
  2016. break;
  2017. default:
  2018. break;
  2019. }
  2020. return 0;
  2021. }
  2022. int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
  2023. {
  2024. ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
  2025. m_notifyCb = notify_cb;
  2026. m_callbackCookie = user;
  2027. return 0;
  2028. }
  2029. int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
  2030. {
  2031. ALOGV("DEBUG(%s):", __FUNCTION__);
  2032. return 0;
  2033. }
  2034. int ExynosCameraHWInterface2::dump(int fd)
  2035. {
  2036. ALOGV("DEBUG(%s):", __FUNCTION__);
  2037. return 0;
  2038. }
  2039. void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
  2040. {
  2041. switch (colorFormat) {
  2042. // 1p
  2043. case V4L2_PIX_FMT_RGB565 :
  2044. case V4L2_PIX_FMT_YUYV :
  2045. case V4L2_PIX_FMT_UYVY :
  2046. case V4L2_PIX_FMT_VYUY :
  2047. case V4L2_PIX_FMT_YVYU :
  2048. buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
  2049. buf->size.extS[1] = 0;
  2050. buf->size.extS[2] = 0;
  2051. break;
  2052. // 2p
  2053. case V4L2_PIX_FMT_NV12 :
  2054. case V4L2_PIX_FMT_NV12T :
  2055. case V4L2_PIX_FMT_NV21 :
  2056. buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
  2057. buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
  2058. buf->size.extS[2] = 0;
  2059. break;
  2060. case V4L2_PIX_FMT_NV12M :
  2061. case V4L2_PIX_FMT_NV12MT_16X16 :
  2062. case V4L2_PIX_FMT_NV21M:
  2063. buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
  2064. buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
  2065. buf->size.extS[2] = 0;
  2066. break;
  2067. case V4L2_PIX_FMT_NV16 :
  2068. case V4L2_PIX_FMT_NV61 :
  2069. buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
  2070. buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16);
  2071. buf->size.extS[2] = 0;
  2072. break;
  2073. // 3p
  2074. case V4L2_PIX_FMT_YUV420 :
  2075. case V4L2_PIX_FMT_YVU420 :
  2076. buf->size.extS[0] = (w * h);
  2077. buf->size.extS[1] = (w * h) >> 2;
  2078. buf->size.extS[2] = (w * h) >> 2;
  2079. break;
  2080. case V4L2_PIX_FMT_YUV420M:
  2081. case V4L2_PIX_FMT_YVU420M :
  2082. buf->size.extS[0] = ALIGN(w, 32) * ALIGN(h, 16);
  2083. buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
  2084. buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
  2085. break;
  2086. case V4L2_PIX_FMT_YUV422P :
  2087. buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
  2088. buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
  2089. buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
  2090. break;
  2091. default:
  2092. ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
  2093. return;
  2094. break;
  2095. }
  2096. }
  2097. bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h,
  2098. int dst_w, int dst_h,
  2099. int *crop_x, int *crop_y,
  2100. int *crop_w, int *crop_h,
  2101. int zoom)
  2102. {
  2103. *crop_w = src_w;
  2104. *crop_h = src_h;
  2105. if ( src_w != dst_w
  2106. || src_h != dst_h) {
  2107. float src_ratio = 1.0f;
  2108. float dst_ratio = 1.0f;
  2109. // ex : 1024 / 768
  2110. src_ratio = (float)src_w / (float)src_h;
  2111. // ex : 352 / 288
  2112. dst_ratio = (float)dst_w / (float)dst_h;
  2113. if (dst_w * dst_h < src_w * src_h) {
  2114. if (dst_ratio <= src_ratio) {
  2115. // shrink w
  2116. *crop_w = src_h * dst_ratio;
  2117. *crop_h = src_h;
  2118. } else {
  2119. // shrink h
  2120. *crop_w = src_w;
  2121. *crop_h = src_w / dst_ratio;
  2122. }
  2123. } else {
  2124. if (dst_ratio <= src_ratio) {
  2125. // shrink w
  2126. *crop_w = src_h * dst_ratio;
  2127. *crop_h = src_h;
  2128. } else {
  2129. // shrink h
  2130. *crop_w = src_w;
  2131. *crop_h = src_w / dst_ratio;
  2132. }
  2133. }
  2134. }
  2135. if (zoom != 0) {
  2136. float zoomLevel = ((float)zoom + 10.0) / 10.0;
  2137. *crop_w = (int)((float)*crop_w / zoomLevel);
  2138. *crop_h = (int)((float)*crop_h / zoomLevel);
  2139. }
  2140. #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2)
  2141. unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
  2142. if (w_align != 0) {
  2143. if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
  2144. && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
  2145. *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
  2146. }
  2147. else
  2148. *crop_w -= w_align;
  2149. }
  2150. #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2)
  2151. unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
  2152. if (h_align != 0) {
  2153. if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
  2154. && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
  2155. *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
  2156. }
  2157. else
  2158. *crop_h -= h_align;
  2159. }
  2160. *crop_x = (src_w - *crop_w) >> 1;
  2161. *crop_y = (src_h - *crop_h) >> 1;
  2162. if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
  2163. *crop_x -= 1;
  2164. if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
  2165. *crop_y -= 1;
  2166. return true;
  2167. }
  2168. BayerBufManager::BayerBufManager()
  2169. {
  2170. ALOGV("DEBUG(%s): ", __FUNCTION__);
  2171. for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
  2172. entries[i].status = BAYER_ON_HAL_EMPTY;
  2173. entries[i].reqFrameCnt = 0;
  2174. }
  2175. sensorEnqueueHead = 0;
  2176. sensorDequeueHead = 0;
  2177. ispEnqueueHead = 0;
  2178. ispDequeueHead = 0;
  2179. numOnSensor = 0;
  2180. numOnIsp = 0;
  2181. numOnHalFilled = 0;
  2182. numOnHalEmpty = NUM_BAYER_BUFFERS;
  2183. }
  2184. BayerBufManager::~BayerBufManager()
  2185. {
  2186. ALOGV("%s", __FUNCTION__);
  2187. }
  2188. int BayerBufManager::GetIndexForSensorEnqueue()
  2189. {
  2190. int ret = 0;
  2191. if (numOnHalEmpty == 0)
  2192. ret = -1;
  2193. else
  2194. ret = sensorEnqueueHead;
  2195. ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
  2196. return ret;
  2197. }
  2198. int BayerBufManager::MarkSensorEnqueue(int index)
  2199. {
  2200. ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index);
  2201. // sanity check
  2202. if (index != sensorEnqueueHead) {
  2203. ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
  2204. return -1;
  2205. }
  2206. if (entries[index].status != BAYER_ON_HAL_EMPTY) {
  2207. ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
  2208. index, entries[index].status, BAYER_ON_HAL_EMPTY);
  2209. return -1;
  2210. }
  2211. entries[index].status = BAYER_ON_SENSOR;
  2212. entries[index].reqFrameCnt = 0;
  2213. numOnHalEmpty--;
  2214. numOnSensor++;
  2215. sensorEnqueueHead = GetNextIndex(index);
  2216. ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
  2217. __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
  2218. return 0;
  2219. }
  2220. int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
  2221. {
  2222. ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
  2223. if (entries[index].status != BAYER_ON_SENSOR) {
  2224. ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
  2225. index, entries[index].status, BAYER_ON_SENSOR);
  2226. return -1;
  2227. }
  2228. entries[index].status = BAYER_ON_HAL_FILLED;
  2229. numOnHalFilled++;
  2230. numOnSensor--;
  2231. return 0;
  2232. }
  2233. int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
  2234. {
  2235. int ret = 0;
  2236. if (numOnHalFilled == 0)
  2237. ret = -1;
  2238. else {
  2239. *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
  2240. ret = ispEnqueueHead;
  2241. }
  2242. ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
  2243. return ret;
  2244. }
  2245. int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
  2246. {
  2247. int ret = 0;
  2248. if (numOnIsp == 0)
  2249. ret = -1;
  2250. else {
  2251. *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
  2252. ret = ispDequeueHead;
  2253. }
  2254. ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
  2255. return ret;
  2256. }
  2257. int BayerBufManager::MarkIspEnqueue(int index)
  2258. {
  2259. ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index);
  2260. // sanity check
  2261. if (index != ispEnqueueHead) {
  2262. ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
  2263. return -1;
  2264. }
  2265. if (entries[index].status != BAYER_ON_HAL_FILLED) {
  2266. ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
  2267. index, entries[index].status, BAYER_ON_HAL_FILLED);
  2268. return -1;
  2269. }
  2270. entries[index].status = BAYER_ON_ISP;
  2271. numOnHalFilled--;
  2272. numOnIsp++;
  2273. ispEnqueueHead = GetNextIndex(index);
  2274. ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
  2275. __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
  2276. return 0;
  2277. }
  2278. int BayerBufManager::MarkIspDequeue(int index)
  2279. {
  2280. ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__, index);
  2281. // sanity check
  2282. if (index != ispDequeueHead) {
  2283. ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
  2284. return -1;
  2285. }
  2286. if (entries[index].status != BAYER_ON_ISP) {
  2287. ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
  2288. index, entries[index].status, BAYER_ON_ISP);
  2289. return -1;
  2290. }
  2291. entries[index].status = BAYER_ON_HAL_EMPTY;
  2292. entries[index].reqFrameCnt = 0;
  2293. numOnHalEmpty++;
  2294. numOnIsp--;
  2295. ispDequeueHead = GetNextIndex(index);
  2296. ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
  2297. __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
  2298. return 0;
  2299. }
  2300. int BayerBufManager::GetNumOnSensor()
  2301. {
  2302. return numOnSensor;
  2303. }
  2304. int BayerBufManager::GetNumOnHalFilled()
  2305. {
  2306. return numOnHalFilled;
  2307. }
  2308. int BayerBufManager::GetNumOnIsp()
  2309. {
  2310. return numOnIsp;
  2311. }
  2312. int BayerBufManager::GetNextIndex(int index)
  2313. {
  2314. index++;
  2315. if (index >= NUM_BAYER_BUFFERS)
  2316. index = 0;
  2317. return index;
  2318. }
  2319. void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
  2320. {
  2321. camera_metadata_t *currentRequest = NULL;
  2322. camera_metadata_t *currentFrame = NULL;
  2323. size_t numEntries = 0;
  2324. size_t frameSize = 0;
  2325. camera_metadata_t * preparedFrame = NULL;
  2326. camera_metadata_t *deregisteredRequest = NULL;
  2327. uint32_t currentSignal = self->GetProcessingSignal();
  2328. MainThread * selfThread = ((MainThread*)self);
  2329. int res = 0;
  2330. int ret;
  2331. int afMode;
  2332. uint32_t afRegion[4];
  2333. ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
  2334. if (currentSignal & SIGNAL_THREAD_RELEASE) {
  2335. ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
  2336. ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
  2337. selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
  2338. return;
  2339. }
  2340. if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
  2341. ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
  2342. if (m_requestManager->IsRequestQueueFull()==false) {
  2343. Mutex::Autolock lock(m_afModeTriggerLock);
  2344. m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
  2345. if (NULL == currentRequest) {
  2346. ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
  2347. m_isRequestQueueNull = true;
  2348. if (m_requestManager->IsVdisEnable())
  2349. m_vdisBubbleCnt = 1;
  2350. }
  2351. else {
  2352. m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion);
  2353. SetAfMode((enum aa_afmode)afMode);
  2354. SetAfRegion(afRegion);
  2355. m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
  2356. ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
  2357. if (m_requestManager->IsRequestQueueFull()==false)
  2358. selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
  2359. m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
  2360. }
  2361. }
  2362. else {
  2363. m_isRequestQueuePending = true;
  2364. }
  2365. }
  2366. if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
  2367. ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
  2368. /*while (1)*/ {
  2369. ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
  2370. if (ret == false)
  2371. CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
  2372. m_requestManager->DeregisterRequest(&deregisteredRequest);
  2373. ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
  2374. if (ret < 0)
  2375. CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
  2376. ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
  2377. if (ret < 0)
  2378. CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
  2379. if (currentFrame==NULL) {
  2380. ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
  2381. }
  2382. else {
  2383. ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
  2384. }
  2385. res = append_camera_metadata(currentFrame, preparedFrame);
  2386. if (res==0) {
  2387. ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
  2388. m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
  2389. }
  2390. else {
  2391. ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
  2392. }
  2393. }
  2394. if (!m_isRequestQueueNull) {
  2395. selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
  2396. }
  2397. if (getInProgressCount()>0) {
  2398. ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
  2399. m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
  2400. }
  2401. }
  2402. ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
  2403. return;
  2404. }
  2405. void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
  2406. {
  2407. ALOGD("#### common Section");
  2408. ALOGD("#### magic(%x) ",
  2409. shot_ext->shot.magicNumber);
  2410. ALOGD("#### ctl Section");
  2411. ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
  2412. shot_ext->shot.ctl.request.metadataMode,
  2413. shot_ext->shot.ctl.lens.aperture,
  2414. shot_ext->shot.ctl.sensor.exposureTime,
  2415. shot_ext->shot.ctl.sensor.frameDuration,
  2416. shot_ext->shot.ctl.sensor.sensitivity,
  2417. shot_ext->shot.ctl.aa.awbMode);
  2418. ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
  2419. shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
  2420. shot_ext->shot.ctl.request.outputStreams[0]);
  2421. ALOGD("#### DM Section");
  2422. ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
  2423. shot_ext->shot.dm.request.metadataMode,
  2424. shot_ext->shot.dm.lens.aperture,
  2425. shot_ext->shot.dm.sensor.exposureTime,
  2426. shot_ext->shot.dm.sensor.frameDuration,
  2427. shot_ext->shot.dm.sensor.sensitivity,
  2428. shot_ext->shot.dm.sensor.timeStamp,
  2429. shot_ext->shot.dm.aa.awbMode,
  2430. shot_ext->shot.dm.request.frameCount );
  2431. }
  2432. void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
  2433. {
  2434. // Flash
  2435. switch (m_ctlInfo.flash.m_flashCnt) {
  2436. case IS_FLASH_STATE_ON:
  2437. ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  2438. // check AF locked
  2439. if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
  2440. if (m_ctlInfo.flash.m_flashTimeOut == 0) {
  2441. if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
  2442. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
  2443. m_ctlInfo.flash.m_flashTimeOut = 5;
  2444. } else
  2445. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
  2446. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
  2447. } else {
  2448. m_ctlInfo.flash.m_flashTimeOut--;
  2449. }
  2450. } else {
  2451. if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
  2452. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
  2453. m_ctlInfo.flash.m_flashTimeOut = 5;
  2454. } else
  2455. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
  2456. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
  2457. }
  2458. break;
  2459. case IS_FLASH_STATE_ON_WAIT:
  2460. break;
  2461. case IS_FLASH_STATE_ON_DONE:
  2462. if (!m_ctlInfo.flash.m_afFlashDoneFlg)
  2463. // auto transition at pre-capture trigger
  2464. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
  2465. break;
  2466. case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
  2467. ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  2468. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
  2469. //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
  2470. shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
  2471. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
  2472. break;
  2473. case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
  2474. case IS_FLASH_STATE_AUTO_WAIT:
  2475. shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
  2476. shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
  2477. break;
  2478. case IS_FLASH_STATE_AUTO_DONE:
  2479. ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  2480. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
  2481. break;
  2482. case IS_FLASH_STATE_AUTO_OFF:
  2483. ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  2484. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
  2485. m_ctlInfo.flash.m_flashEnableFlg = false;
  2486. break;
  2487. case IS_FLASH_STATE_CAPTURE:
  2488. ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  2489. m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
  2490. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
  2491. shot_ext->request_scc = 0;
  2492. shot_ext->request_scp = 0;
  2493. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
  2494. break;
  2495. case IS_FLASH_STATE_CAPTURE_WAIT:
  2496. shot_ext->request_scc = 0;
  2497. shot_ext->request_scp = 0;
  2498. break;
  2499. case IS_FLASH_STATE_CAPTURE_JPEG:
  2500. ALOGV("(%s): [Flash] Flash Capture (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
  2501. shot_ext->request_scc = 1;
  2502. shot_ext->request_scp = 1;
  2503. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END; // auto transition
  2504. break;
  2505. case IS_FLASH_STATE_CAPTURE_END:
  2506. ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  2507. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
  2508. shot_ext->request_scc = 0;
  2509. shot_ext->request_scp = 0;
  2510. m_ctlInfo.flash.m_flashEnableFlg = false;
  2511. m_ctlInfo.flash.m_flashCnt = 0;
  2512. m_ctlInfo.flash.m_afFlashDoneFlg= false;
  2513. break;
  2514. case IS_FLASH_STATE_NONE:
  2515. break;
  2516. default:
  2517. ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
  2518. }
  2519. }
  2520. void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
  2521. {
  2522. // Flash
  2523. switch (m_ctlInfo.flash.m_flashCnt) {
  2524. case IS_FLASH_STATE_AUTO_WAIT:
  2525. if (m_ctlInfo.flash.m_flashDecisionResult) {
  2526. if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
  2527. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
  2528. ALOGV("(%s): [Flash] Lis : AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
  2529. } else {
  2530. ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
  2531. }
  2532. } else {
  2533. //If flash isn't activated at flash auto mode, skip flash auto control
  2534. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
  2535. ALOGV("(%s): [Flash] Skip : AUTO -> OFF", __FUNCTION__);
  2536. }
  2537. break;
  2538. }
  2539. }
  2540. void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
  2541. {
  2542. // Flash
  2543. switch (m_ctlInfo.flash.m_flashCnt) {
  2544. case IS_FLASH_STATE_ON_WAIT:
  2545. if (shot_ext->shot.dm.flash.decision > 0) {
  2546. // store decision result to skip capture sequenece
  2547. ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
  2548. if (shot_ext->shot.dm.flash.decision == 2)
  2549. m_ctlInfo.flash.m_flashDecisionResult = false;
  2550. else
  2551. m_ctlInfo.flash.m_flashDecisionResult = true;
  2552. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
  2553. } else {
  2554. if (m_ctlInfo.flash.m_flashTimeOut == 0) {
  2555. ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
  2556. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
  2557. m_ctlInfo.flash.m_flashDecisionResult = false;
  2558. } else {
  2559. m_ctlInfo.flash.m_flashTimeOut--;
  2560. }
  2561. }
  2562. break;
  2563. case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
  2564. if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
  2565. ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
  2566. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
  2567. } else {
  2568. ALOGV("(%s): [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
  2569. }
  2570. break;
  2571. case IS_FLASH_STATE_CAPTURE_WAIT:
  2572. if (m_ctlInfo.flash.m_flashDecisionResult) {
  2573. if (shot_ext->shot.dm.flash.firingStable) {
  2574. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
  2575. } else {
  2576. if (m_ctlInfo.flash.m_flashTimeOut == 0) {
  2577. ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
  2578. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
  2579. } else {
  2580. ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
  2581. m_ctlInfo.flash.m_flashTimeOut--;
  2582. }
  2583. }
  2584. } else {
  2585. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
  2586. }
  2587. break;
  2588. }
  2589. }
  2590. void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
  2591. {
  2592. switch (m_ctlInfo.flash.i_flashMode) {
  2593. case AA_AEMODE_ON:
  2594. // At flash off mode, capture can be done as zsl capture
  2595. shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
  2596. break;
  2597. case AA_AEMODE_ON_AUTO_FLASH:
  2598. // At flash auto mode, main flash have to be done if pre-flash was done.
  2599. if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
  2600. shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
  2601. break;
  2602. }
  2603. }
  2604. void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
  2605. {
  2606. shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0];
  2607. shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1];
  2608. shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2];
  2609. shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3];
  2610. }
  2611. void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion)
  2612. {
  2613. currentAfRegion[0] = afRegion[0];
  2614. currentAfRegion[1] = afRegion[1];
  2615. currentAfRegion[2] = afRegion[2];
  2616. currentAfRegion[3] = afRegion[3];
  2617. }
  2618. void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
  2619. {
  2620. if (m_afState == HAL_AFSTATE_SCANNING) {
  2621. ALOGD("(%s): restarting trigger ", __FUNCTION__);
  2622. } else if (!mode) {
  2623. if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
  2624. ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
  2625. else
  2626. m_afState = HAL_AFSTATE_STARTED;
  2627. }
  2628. ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
  2629. shot_ext->shot.ctl.aa.afTrigger = 1;
  2630. shot_ext->shot.ctl.aa.afMode = m_afMode;
  2631. m_IsAfTriggerRequired = false;
  2632. }
  2633. void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
  2634. {
  2635. uint32_t currentSignal = self->GetProcessingSignal();
  2636. SensorThread * selfThread = ((SensorThread*)self);
  2637. int index;
  2638. int index_isp;
  2639. status_t res;
  2640. nsecs_t frameTime;
  2641. int bayersOnSensor = 0, bayersOnIsp = 0;
  2642. int j = 0;
  2643. bool isCapture = false;
  2644. ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
  2645. if (currentSignal & SIGNAL_THREAD_RELEASE) {
  2646. CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
  2647. ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
  2648. cam_int_streamoff(&(m_camera_info.sensor));
  2649. ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
  2650. m_camera_info.sensor.buffers = 0;
  2651. ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
  2652. cam_int_reqbufs(&(m_camera_info.sensor));
  2653. ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
  2654. m_camera_info.sensor.status = false;
  2655. ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
  2656. isp_int_streamoff(&(m_camera_info.isp));
  2657. ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
  2658. m_camera_info.isp.buffers = 0;
  2659. ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
  2660. cam_int_reqbufs(&(m_camera_info.isp));
  2661. ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
  2662. exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
  2663. m_requestManager->releaseSensorQ();
  2664. m_requestManager->ResetEntry();
  2665. ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
  2666. selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
  2667. return;
  2668. }
  2669. if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
  2670. {
  2671. ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
  2672. int targetStreamIndex = 0, i=0;
  2673. int matchedFrameCnt = -1, processingReqIndex;
  2674. struct camera2_shot_ext *shot_ext;
  2675. struct camera2_shot_ext *shot_ext_capture;
  2676. bool triggered = false;
  2677. /* dqbuf from sensor */
  2678. ALOGV("Sensor DQbuf start");
  2679. index = cam_int_dqbuf(&(m_camera_info.sensor));
  2680. m_requestManager->pushSensorQ(index);
  2681. ALOGV("Sensor DQbuf done(%d)", index);
  2682. shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
  2683. if (m_nightCaptureCnt != 0) {
  2684. matchedFrameCnt = m_nightCaptureFrameCnt;
  2685. } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
  2686. matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
  2687. ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
  2688. } else {
  2689. matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
  2690. }
  2691. if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
  2692. matchedFrameCnt = m_vdisDupFrame;
  2693. }
  2694. if (matchedFrameCnt != -1) {
  2695. if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
  2696. frameTime = systemTime();
  2697. m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
  2698. m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
  2699. } else {
  2700. ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
  2701. }
  2702. // face af mode setting in case of face priority scene mode
  2703. if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
  2704. ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
  2705. m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
  2706. }
  2707. m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
  2708. float zoomLeft, zoomTop, zoomWidth, zoomHeight;
  2709. int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
  2710. m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
  2711. m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
  2712. &crop_x, &crop_y,
  2713. &crop_w, &crop_h,
  2714. 0);
  2715. if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
  2716. zoomWidth = m_camera2->getSensorW() / m_zoomRatio;
  2717. zoomHeight = zoomWidth *
  2718. m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
  2719. } else {
  2720. zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
  2721. zoomWidth = zoomHeight *
  2722. m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
  2723. }
  2724. zoomLeft = (crop_w - zoomWidth) / 2;
  2725. zoomTop = (crop_h - zoomHeight) / 2;
  2726. int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
  2727. int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
  2728. if (cropCompensation)
  2729. new_cropRegion[2] -= cropCompensation;
  2730. shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
  2731. shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
  2732. shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
  2733. if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
  2734. ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode);
  2735. shot_ext->shot.ctl.aa.afMode = m_afMode;
  2736. if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
  2737. ALOGD("### With Automatic triger for continuous modes");
  2738. m_afState = HAL_AFSTATE_STARTED;
  2739. shot_ext->shot.ctl.aa.afTrigger = 1;
  2740. triggered = true;
  2741. if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
  2742. (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
  2743. switch (m_afMode) {
  2744. case AA_AFMODE_CONTINUOUS_PICTURE:
  2745. shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE;
  2746. ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
  2747. break;
  2748. }
  2749. }
  2750. // reset flash result
  2751. if (m_ctlInfo.flash.m_afFlashDoneFlg) {
  2752. m_ctlInfo.flash.m_flashEnableFlg = false;
  2753. m_ctlInfo.flash.m_afFlashDoneFlg = false;
  2754. m_ctlInfo.flash.m_flashDecisionResult = false;
  2755. m_ctlInfo.flash.m_flashCnt = 0;
  2756. }
  2757. m_ctlInfo.af.m_afTriggerTimeOut = 1;
  2758. }
  2759. m_IsAfModeUpdateRequired = false;
  2760. // support inifinity focus mode
  2761. if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
  2762. shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
  2763. shot_ext->shot.ctl.aa.afTrigger = 1;
  2764. triggered = true;
  2765. }
  2766. if (m_afMode2 != NO_CHANGE) {
  2767. enum aa_afmode tempAfMode = m_afMode2;
  2768. m_afMode2 = NO_CHANGE;
  2769. SetAfMode(tempAfMode);
  2770. }
  2771. }
  2772. else {
  2773. shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
  2774. }
  2775. if (m_IsAfTriggerRequired) {
  2776. if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
  2777. // flash case
  2778. if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
  2779. if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
  2780. // Flash is enabled and start AF
  2781. m_afTrigger(shot_ext, 1);
  2782. } else {
  2783. m_afTrigger(shot_ext, 0);
  2784. }
  2785. }
  2786. } else {
  2787. // non-flash case
  2788. m_afTrigger(shot_ext, 0);
  2789. }
  2790. } else {
  2791. shot_ext->shot.ctl.aa.afTrigger = 0;
  2792. }
  2793. if (m_wideAspect) {
  2794. shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
  2795. } else {
  2796. shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
  2797. }
  2798. if (triggered)
  2799. shot_ext->shot.ctl.aa.afTrigger = 1;
  2800. // TODO : check collision with AFMode Update
  2801. if (m_IsAfLockRequired) {
  2802. shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
  2803. m_IsAfLockRequired = false;
  2804. }
  2805. ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
  2806. index,
  2807. shot_ext->shot.ctl.request.frameCount,
  2808. shot_ext->request_scp,
  2809. shot_ext->request_scc,
  2810. shot_ext->dis_bypass, sizeof(camera2_shot));
  2811. // update AF region
  2812. m_updateAfRegion(shot_ext);
  2813. m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
  2814. if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
  2815. && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
  2816. shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
  2817. if (m_nightCaptureCnt == 0) {
  2818. if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
  2819. && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
  2820. shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
  2821. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
  2822. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
  2823. m_nightCaptureCnt = 4;
  2824. m_nightCaptureFrameCnt = matchedFrameCnt;
  2825. shot_ext->request_scc = 0;
  2826. }
  2827. }
  2828. else if (m_nightCaptureCnt == 1) {
  2829. shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
  2830. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
  2831. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
  2832. m_nightCaptureCnt--;
  2833. m_nightCaptureFrameCnt = 0;
  2834. shot_ext->request_scc = 1;
  2835. }
  2836. else if (m_nightCaptureCnt == 2) {
  2837. shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
  2838. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
  2839. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
  2840. m_nightCaptureCnt--;
  2841. shot_ext->request_scc = 0;
  2842. }
  2843. else if (m_nightCaptureCnt == 3) {
  2844. shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
  2845. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
  2846. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
  2847. m_nightCaptureCnt--;
  2848. shot_ext->request_scc = 0;
  2849. }
  2850. else if (m_nightCaptureCnt == 4) {
  2851. shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
  2852. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
  2853. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
  2854. m_nightCaptureCnt--;
  2855. shot_ext->request_scc = 0;
  2856. }
  2857. switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) {
  2858. case 15:
  2859. shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000);
  2860. break;
  2861. case 24:
  2862. shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000);
  2863. break;
  2864. case 25:
  2865. shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000);
  2866. break;
  2867. case 30:
  2868. default:
  2869. shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000);
  2870. break;
  2871. }
  2872. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
  2873. // Flash mode
  2874. // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
  2875. if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
  2876. && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
  2877. && (m_cameraId == 0)) {
  2878. if (!m_ctlInfo.flash.m_flashDecisionResult) {
  2879. m_ctlInfo.flash.m_flashEnableFlg = false;
  2880. m_ctlInfo.flash.m_afFlashDoneFlg = false;
  2881. m_ctlInfo.flash.m_flashCnt = 0;
  2882. } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
  2883. (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
  2884. ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
  2885. shot_ext->request_scc = 0;
  2886. m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
  2887. m_ctlInfo.flash.m_flashEnableFlg = true;
  2888. m_ctlInfo.flash.m_afFlashDoneFlg = false;
  2889. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
  2890. } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
  2891. ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
  2892. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
  2893. m_ctlInfo.flash.m_flashEnableFlg = false;
  2894. m_ctlInfo.flash.m_afFlashDoneFlg= false;
  2895. m_ctlInfo.flash.m_flashCnt = 0;
  2896. }
  2897. } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
  2898. m_ctlInfo.flash.m_flashDecisionResult = false;
  2899. }
  2900. if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
  2901. if (m_ctlInfo.flash.m_flashTorchMode == false) {
  2902. m_ctlInfo.flash.m_flashTorchMode = true;
  2903. }
  2904. } else {
  2905. if (m_ctlInfo.flash.m_flashTorchMode == true) {
  2906. shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
  2907. shot_ext->shot.ctl.flash.firingPower = 0;
  2908. m_ctlInfo.flash.m_flashTorchMode = false;
  2909. } else {
  2910. shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
  2911. }
  2912. }
  2913. if (shot_ext->isReprocessing) {
  2914. ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
  2915. m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
  2916. shot_ext->request_scp = 0;
  2917. shot_ext->request_scc = 0;
  2918. m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
  2919. m_ctlInfo.flash.m_flashDecisionResult = false;
  2920. memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)),
  2921. sizeof(struct camera2_shot_ext));
  2922. m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
  2923. m_ctlInfo.flash.m_flashEnableFlg = false;
  2924. }
  2925. if (m_ctlInfo.flash.m_flashEnableFlg) {
  2926. m_preCaptureListenerSensor(shot_ext);
  2927. m_preCaptureSetter(shot_ext);
  2928. }
  2929. ALOGV("(%s): queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
  2930. (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
  2931. (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
  2932. (int)(shot_ext->shot.ctl.aa.afTrigger));
  2933. if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
  2934. shot_ext->dis_bypass = 1;
  2935. shot_ext->dnr_bypass = 1;
  2936. shot_ext->request_scp = 0;
  2937. shot_ext->request_scc = 0;
  2938. m_vdisBubbleCnt--;
  2939. matchedFrameCnt = -1;
  2940. } else {
  2941. m_vdisDupFrame = matchedFrameCnt;
  2942. }
  2943. if (m_scpForceSuspended)
  2944. shot_ext->request_scc = 0;
  2945. uint32_t current_scp = shot_ext->request_scp;
  2946. uint32_t current_scc = shot_ext->request_scc;
  2947. if (shot_ext->shot.dm.request.frameCount == 0) {
  2948. CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
  2949. }
  2950. cam_int_qbuf(&(m_camera_info.isp), index);
  2951. ALOGV("### isp DQBUF start");
  2952. index_isp = cam_int_dqbuf(&(m_camera_info.isp));
  2953. shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
  2954. if (m_ctlInfo.flash.m_flashEnableFlg)
  2955. m_preCaptureListenerISP(shot_ext);
  2956. ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
  2957. index,
  2958. shot_ext->shot.ctl.request.frameCount,
  2959. shot_ext->request_scp,
  2960. shot_ext->request_scc,
  2961. shot_ext->dis_bypass,
  2962. shot_ext->dnr_bypass, sizeof(camera2_shot));
  2963. ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
  2964. (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
  2965. (int)(shot_ext->shot.dm.aa.awbMode),
  2966. (int)(shot_ext->shot.dm.aa.afMode));
  2967. #ifndef ENABLE_FRAME_SYNC
  2968. m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
  2969. #endif
  2970. if (!shot_ext->fd_bypass) {
  2971. /* FD orientation axis transformation */
  2972. for (int i=0; i < CAMERA2_MAX_FACES; i++) {
  2973. if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
  2974. shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
  2975. * shot_ext->shot.dm.stats.faceRectangles[i][0])
  2976. / m_streamThreads[0].get()->m_parameters.width;
  2977. if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
  2978. shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
  2979. * shot_ext->shot.dm.stats.faceRectangles[i][1])
  2980. / m_streamThreads[0].get()->m_parameters.height;
  2981. if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
  2982. shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
  2983. * shot_ext->shot.dm.stats.faceRectangles[i][2])
  2984. / m_streamThreads[0].get()->m_parameters.width;
  2985. if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
  2986. shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
  2987. * shot_ext->shot.dm.stats.faceRectangles[i][3])
  2988. / m_streamThreads[0].get()->m_parameters.height;
  2989. }
  2990. }
  2991. // aeState control
  2992. if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
  2993. m_preCaptureAeState(shot_ext);
  2994. // At scene mode face priority
  2995. if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
  2996. shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
  2997. if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
  2998. m_requestManager->ApplyDynamicMetadata(shot_ext);
  2999. }
  3000. if (current_scc != shot_ext->request_scc) {
  3001. ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
  3002. __FUNCTION__, current_scc, shot_ext->request_scc);
  3003. m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
  3004. }
  3005. if (shot_ext->request_scc) {
  3006. ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
  3007. if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
  3008. if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)
  3009. memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)),
  3010. sizeof(struct camera2_shot_ext));
  3011. else
  3012. memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext));
  3013. }
  3014. m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
  3015. }
  3016. if (current_scp != shot_ext->request_scp) {
  3017. ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
  3018. __FUNCTION__, current_scp, shot_ext->request_scp);
  3019. m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
  3020. }
  3021. if (shot_ext->request_scp) {
  3022. ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
  3023. m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
  3024. }
  3025. ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
  3026. shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
  3027. if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
  3028. ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
  3029. m_scp_closed = true;
  3030. }
  3031. else
  3032. m_scp_closed = false;
  3033. OnAfNotification(shot_ext->shot.dm.aa.afState);
  3034. OnPrecaptureMeteringNotificationISP();
  3035. } else {
  3036. memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
  3037. shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
  3038. shot_ext->request_sensor = 1;
  3039. shot_ext->dis_bypass = 1;
  3040. shot_ext->dnr_bypass = 1;
  3041. shot_ext->fd_bypass = 1;
  3042. shot_ext->drc_bypass = 1;
  3043. shot_ext->request_scc = 0;
  3044. shot_ext->request_scp = 0;
  3045. if (m_wideAspect) {
  3046. shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
  3047. } else {
  3048. shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
  3049. }
  3050. shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
  3051. if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
  3052. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
  3053. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
  3054. }
  3055. shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
  3056. shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
  3057. ALOGV("### isp QBUF start (bubble)");
  3058. ALOGV("bubble: queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
  3059. (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
  3060. (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
  3061. (int)(shot_ext->shot.ctl.aa.afTrigger));
  3062. cam_int_qbuf(&(m_camera_info.isp), index);
  3063. ALOGV("### isp DQBUF start (bubble)");
  3064. index_isp = cam_int_dqbuf(&(m_camera_info.isp));
  3065. shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
  3066. ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
  3067. (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
  3068. (int)(shot_ext->shot.dm.aa.awbMode),
  3069. (int)(shot_ext->shot.dm.aa.afMode));
  3070. OnAfNotification(shot_ext->shot.dm.aa.afState);
  3071. }
  3072. index = m_requestManager->popSensorQ();
  3073. if(index < 0){
  3074. ALOGE("sensorQ is empty");
  3075. return;
  3076. }
  3077. processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
  3078. shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
  3079. if (m_scp_closing || m_scp_closed) {
  3080. ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
  3081. shot_ext->request_scc = 0;
  3082. shot_ext->request_scp = 0;
  3083. shot_ext->request_sensor = 0;
  3084. }
  3085. cam_int_qbuf(&(m_camera_info.sensor), index);
  3086. ALOGV("Sensor Qbuf done(%d)", index);
  3087. if (!m_scp_closing
  3088. && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
  3089. ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
  3090. matchedFrameCnt, processingReqIndex);
  3091. selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
  3092. }
  3093. }
  3094. return;
  3095. }
  3096. void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
  3097. {
  3098. uint32_t currentSignal = self->GetProcessingSignal();
  3099. StreamThread * selfThread = ((StreamThread*)self);
  3100. stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
  3101. node_info_t *currentNode = selfStreamParms->node;
  3102. substream_parameters_t *subParms;
  3103. buffer_handle_t * buf = NULL;
  3104. status_t res;
  3105. void *virtAddr[3];
  3106. int i, j;
  3107. int index;
  3108. nsecs_t timestamp;
  3109. if (!(selfThread->m_isBufferInit))
  3110. {
  3111. for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
  3112. res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
  3113. if (res != NO_ERROR || buf == NULL) {
  3114. ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
  3115. return;
  3116. }
  3117. ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
  3118. ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
  3119. index = selfThread->findBufferIndex(buf);
  3120. if (index == -1) {
  3121. ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
  3122. }
  3123. else {
  3124. ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
  3125. __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
  3126. if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
  3127. selfStreamParms->svcBufStatus[index] = ON_DRIVER;
  3128. else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
  3129. selfStreamParms->svcBufStatus[index] = ON_HAL;
  3130. else {
  3131. ALOGV("DBG(%s): buffer status abnormal (%d) "
  3132. , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
  3133. }
  3134. selfStreamParms->numSvcBufsInHal++;
  3135. }
  3136. selfStreamParms->bufIndex = 0;
  3137. }
  3138. selfThread->m_isBufferInit = true;
  3139. }
  3140. for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
  3141. if (selfThread->m_attachedSubStreams[i].streamId == -1)
  3142. continue;
  3143. subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
  3144. if (subParms->type && subParms->needBufferInit) {
  3145. ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
  3146. __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
  3147. int checkingIndex = 0;
  3148. bool found = false;
  3149. for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
  3150. res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
  3151. if (res != NO_ERROR || buf == NULL) {
  3152. ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
  3153. return;
  3154. }
  3155. subParms->numSvcBufsInHal++;
  3156. ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
  3157. subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
  3158. if (m_grallocHal->lock(m_grallocHal, *buf,
  3159. subParms->usage, 0, 0,
  3160. subParms->width, subParms->height, virtAddr) != 0) {
  3161. ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
  3162. }
  3163. else {
  3164. ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
  3165. __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
  3166. }
  3167. found = false;
  3168. for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
  3169. if (subParms->svcBufHandle[checkingIndex] == *buf ) {
  3170. found = true;
  3171. break;
  3172. }
  3173. }
  3174. ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
  3175. if (!found) break;
  3176. index = checkingIndex;
  3177. if (index == -1) {
  3178. ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
  3179. }
  3180. else {
  3181. ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
  3182. __FUNCTION__, index, subParms->svcBufStatus[index]);
  3183. if (subParms->svcBufStatus[index]== ON_SERVICE)
  3184. subParms->svcBufStatus[index] = ON_HAL;
  3185. else {
  3186. ALOGV("DBG(%s): buffer status abnormal (%d) "
  3187. , __FUNCTION__, subParms->svcBufStatus[index]);
  3188. }
  3189. if (*buf != subParms->svcBufHandle[index])
  3190. ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
  3191. else
  3192. ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
  3193. }
  3194. subParms->svcBufIndex = 0;
  3195. }
  3196. if (subParms->type == SUBSTREAM_TYPE_JPEG) {
  3197. m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
  3198. m_resizeBuf.size.extS[1] = 0;
  3199. m_resizeBuf.size.extS[2] = 0;
  3200. if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
  3201. ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
  3202. }
  3203. }
  3204. if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
  3205. m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
  3206. subParms->height, &m_previewCbBuf);
  3207. if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
  3208. ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
  3209. }
  3210. }
  3211. subParms->needBufferInit= false;
  3212. }
  3213. }
  3214. }
  3215. void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
  3216. {
  3217. StreamThread * selfThread = ((StreamThread*)self);
  3218. ALOGV("DEBUG(%s): ", __FUNCTION__ );
  3219. memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
  3220. selfThread->m_isBufferInit = false;
  3221. for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
  3222. selfThread->m_attachedSubStreams[i].streamId = -1;
  3223. selfThread->m_attachedSubStreams[i].priority = 0;
  3224. }
  3225. return;
  3226. }
  3227. int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
  3228. int stream_id, nsecs_t frameTimeStamp)
  3229. {
  3230. substream_parameters_t *subParms = &m_subStreams[stream_id];
  3231. switch (stream_id) {
  3232. case STREAM_ID_JPEG:
  3233. return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
  3234. case STREAM_ID_RECORD:
  3235. return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
  3236. case STREAM_ID_PRVCB:
  3237. return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
  3238. default:
  3239. return 0;
  3240. }
  3241. }
  3242. void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
  3243. {
  3244. uint32_t currentSignal = self->GetProcessingSignal();
  3245. StreamThread * selfThread = ((StreamThread*)self);
  3246. stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
  3247. node_info_t *currentNode = selfStreamParms->node;
  3248. int i = 0;
  3249. nsecs_t frameTimeStamp;
  3250. if (currentSignal & SIGNAL_THREAD_RELEASE) {
  3251. CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
  3252. if (selfThread->m_isBufferInit) {
  3253. if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
  3254. ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
  3255. selfThread->m_index, currentNode->fd);
  3256. if (cam_int_streamoff(currentNode) < 0 ) {
  3257. ALOGE("ERR(%s): stream off fail", __FUNCTION__);
  3258. }
  3259. ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
  3260. selfThread->m_index, currentNode->fd);
  3261. currentNode->buffers = 0;
  3262. cam_int_reqbufs(currentNode);
  3263. ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
  3264. selfThread->m_index, currentNode->fd);
  3265. }
  3266. }
  3267. #ifdef ENABLE_FRAME_SYNC
  3268. // free metabuffers
  3269. for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
  3270. if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
  3271. freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
  3272. selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
  3273. selfStreamParms->metaBuffers[i].size.extS[0] = 0;
  3274. }
  3275. #endif
  3276. selfThread->m_isBufferInit = false;
  3277. selfThread->m_releasing = false;
  3278. selfThread->m_activated = false;
  3279. ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
  3280. return;
  3281. }
  3282. if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
  3283. status_t res;
  3284. buffer_handle_t * buf = NULL;
  3285. bool found = false;
  3286. ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
  3287. __FUNCTION__, selfThread->m_index);
  3288. res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
  3289. if (res != NO_ERROR || buf == NULL) {
  3290. ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
  3291. return;
  3292. }
  3293. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
  3294. int checkingIndex = 0;
  3295. for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
  3296. if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
  3297. found = true;
  3298. break;
  3299. }
  3300. }
  3301. ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
  3302. __FUNCTION__, (unsigned int)buf, found, checkingIndex);
  3303. if (!found) return;
  3304. for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
  3305. if (selfThread->m_attachedSubStreams[i].streamId == -1)
  3306. continue;
  3307. #ifdef ENABLE_FRAME_SYNC
  3308. frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
  3309. m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
  3310. #else
  3311. frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
  3312. #endif
  3313. if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
  3314. m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
  3315. selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
  3316. }
  3317. res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
  3318. if (res != NO_ERROR) {
  3319. ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
  3320. return;
  3321. }
  3322. ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_REPROCESSING_START",
  3323. __FUNCTION__,selfThread->m_index);
  3324. return;
  3325. }
  3326. if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
  3327. buffer_handle_t * buf = NULL;
  3328. status_t res = 0;
  3329. int i, j;
  3330. int index;
  3331. nsecs_t timestamp;
  3332. #ifdef ENABLE_FRAME_SYNC
  3333. camera2_stream *frame;
  3334. uint8_t currentOutputStreams;
  3335. bool directOutputEnabled = false;
  3336. #endif
  3337. int numOfUndqbuf = 0;
  3338. ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
  3339. m_streamBufferInit(self);
  3340. do {
  3341. ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
  3342. selfThread->m_index, selfThread->streamType);
  3343. #ifdef ENABLE_FRAME_SYNC
  3344. selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
  3345. frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
  3346. frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
  3347. currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
  3348. ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
  3349. if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
  3350. ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
  3351. directOutputEnabled = true;
  3352. }
  3353. if (!directOutputEnabled) {
  3354. if (!m_nightCaptureFrameCnt)
  3355. m_requestManager->NotifyStreamOutput(frame->rcount);
  3356. }
  3357. #else
  3358. selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
  3359. frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
  3360. #endif
  3361. ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d) sigcnt(%d)",__FUNCTION__,
  3362. selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
  3363. if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] != ON_DRIVER)
  3364. ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
  3365. __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
  3366. selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
  3367. for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
  3368. if (selfThread->m_attachedSubStreams[i].streamId == -1)
  3369. continue;
  3370. #ifdef ENABLE_FRAME_SYNC
  3371. if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
  3372. m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
  3373. selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
  3374. }
  3375. #else
  3376. if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
  3377. m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
  3378. selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
  3379. }
  3380. #endif
  3381. }
  3382. if (m_requestManager->GetSkipCnt() <= 0) {
  3383. #ifdef ENABLE_FRAME_SYNC
  3384. if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
  3385. ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
  3386. res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
  3387. frameTimeStamp,
  3388. &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
  3389. }
  3390. else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
  3391. ALOGV("** SCC output (frameCnt:%d)", frame->rcount);
  3392. res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
  3393. frameTimeStamp,
  3394. &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
  3395. }
  3396. else {
  3397. res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
  3398. &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
  3399. ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
  3400. }
  3401. #else
  3402. if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
  3403. ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
  3404. res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
  3405. frameTimeStamp,
  3406. &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
  3407. }
  3408. else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
  3409. ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
  3410. res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
  3411. frameTimeStamp,
  3412. &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
  3413. }
  3414. #endif
  3415. ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
  3416. }
  3417. else {
  3418. res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
  3419. &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
  3420. ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
  3421. }
  3422. #ifdef ENABLE_FRAME_SYNC
  3423. if (directOutputEnabled) {
  3424. if (!m_nightCaptureFrameCnt)
  3425. m_requestManager->NotifyStreamOutput(frame->rcount);
  3426. }
  3427. #endif
  3428. if (res == 0) {
  3429. selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
  3430. selfStreamParms->numSvcBufsInHal--;
  3431. }
  3432. else {
  3433. selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
  3434. }
  3435. }
  3436. while(0);
  3437. while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
  3438. < selfStreamParms->minUndequedBuffer) {
  3439. res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
  3440. if (res != NO_ERROR || buf == NULL) {
  3441. ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index, res, selfStreamParms->numSvcBufsInHal);
  3442. break;
  3443. }
  3444. selfStreamParms->numSvcBufsInHal++;
  3445. ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
  3446. selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
  3447. ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
  3448. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
  3449. bool found = false;
  3450. int checkingIndex = 0;
  3451. for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
  3452. if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
  3453. found = true;
  3454. break;
  3455. }
  3456. }
  3457. if (!found) break;
  3458. selfStreamParms->bufIndex = checkingIndex;
  3459. if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
  3460. uint32_t plane_index = 0;
  3461. ExynosBuffer* currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
  3462. struct v4l2_buffer v4l2_buf;
  3463. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  3464. v4l2_buf.m.planes = planes;
  3465. v4l2_buf.type = currentNode->type;
  3466. v4l2_buf.memory = currentNode->memory;
  3467. v4l2_buf.index = selfStreamParms->bufIndex;
  3468. v4l2_buf.length = currentNode->planes;
  3469. v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
  3470. v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
  3471. v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
  3472. for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
  3473. v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index];
  3474. }
  3475. #ifdef ENABLE_FRAME_SYNC
  3476. /* add plane for metadata*/
  3477. v4l2_buf.length += selfStreamParms->metaPlanes;
  3478. v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
  3479. v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
  3480. #endif
  3481. if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
  3482. ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
  3483. __FUNCTION__, selfThread->m_index);
  3484. return;
  3485. }
  3486. selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
  3487. ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
  3488. __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
  3489. }
  3490. }
  3491. ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
  3492. }
  3493. return;
  3494. }
  3495. void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
  3496. {
  3497. uint32_t currentSignal = self->GetProcessingSignal();
  3498. StreamThread * selfThread = ((StreamThread*)self);
  3499. stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
  3500. node_info_t *currentNode = selfStreamParms->node;
  3501. if (currentSignal & SIGNAL_THREAD_RELEASE) {
  3502. CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
  3503. if (selfThread->m_isBufferInit) {
  3504. if (currentNode->fd == m_camera_info.capture.fd) {
  3505. if (m_camera_info.capture.status == true) {
  3506. ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
  3507. selfThread->m_index, currentNode->fd);
  3508. if (cam_int_streamoff(currentNode) < 0 ){
  3509. ALOGE("ERR(%s): stream off fail", __FUNCTION__);
  3510. } else {
  3511. m_camera_info.capture.status = false;
  3512. }
  3513. }
  3514. } else {
  3515. ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
  3516. selfThread->m_index, currentNode->fd);
  3517. if (cam_int_streamoff(currentNode) < 0 ){
  3518. ALOGE("ERR(%s): stream off fail", __FUNCTION__);
  3519. }
  3520. }
  3521. ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
  3522. ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
  3523. selfThread->m_index, currentNode->fd);
  3524. currentNode->buffers = 0;
  3525. cam_int_reqbufs(currentNode);
  3526. ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
  3527. selfThread->m_index, currentNode->fd);
  3528. }
  3529. selfThread->m_isBufferInit = false;
  3530. selfThread->m_releasing = false;
  3531. selfThread->m_activated = false;
  3532. ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
  3533. return;
  3534. }
  3535. if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
  3536. #ifdef ENABLE_FRAME_SYNC
  3537. camera2_stream *frame;
  3538. uint8_t currentOutputStreams;
  3539. #endif
  3540. nsecs_t frameTimeStamp;
  3541. ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
  3542. __FUNCTION__,selfThread->m_index);
  3543. m_streamBufferInit(self);
  3544. ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
  3545. selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
  3546. ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
  3547. selfThread->m_index, selfStreamParms->bufIndex);
  3548. #ifdef ENABLE_FRAME_SYNC
  3549. frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
  3550. frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
  3551. currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
  3552. ALOGV("frame count(SCC) : %d outputStream(%x)", frame->rcount, currentOutputStreams);
  3553. #else
  3554. frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
  3555. #endif
  3556. for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
  3557. if (selfThread->m_attachedSubStreams[i].streamId == -1)
  3558. continue;
  3559. #ifdef ENABLE_FRAME_SYNC
  3560. if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
  3561. m_requestManager->NotifyStreamOutput(frame->rcount);
  3562. m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
  3563. selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
  3564. }
  3565. #else
  3566. if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
  3567. m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
  3568. selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
  3569. }
  3570. #endif
  3571. }
  3572. cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
  3573. ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
  3574. ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
  3575. __FUNCTION__, selfThread->m_index);
  3576. }
  3577. return;
  3578. }
  3579. void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
  3580. {
  3581. uint32_t currentSignal = self->GetProcessingSignal();
  3582. StreamThread * selfThread = ((StreamThread*)self);
  3583. stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
  3584. node_info_t *currentNode = selfStreamParms->node;
  3585. ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
  3586. // Do something in Child thread handler
  3587. // Should change function to class that inherited StreamThread class to support dynamic stream allocation
  3588. if (selfThread->streamType == STREAM_TYPE_DIRECT) {
  3589. m_streamFunc_direct(self);
  3590. } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
  3591. m_streamFunc_indirect(self);
  3592. }
  3593. return;
  3594. }
  3595. int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
  3596. {
  3597. Mutex::Autolock lock(m_jpegEncoderLock);
  3598. stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
  3599. substream_parameters_t *subParms = &m_subStreams[STREAM_ID_JPEG];
  3600. status_t res;
  3601. ExynosRect jpegRect;
  3602. bool found = false;
  3603. int srcW, srcH, srcCropX, srcCropY;
  3604. int pictureW, pictureH, pictureFramesize = 0;
  3605. int pictureFormat;
  3606. int cropX, cropY, cropW, cropH = 0;
  3607. ExynosBuffer resizeBufInfo;
  3608. ExynosRect m_jpegPictureRect;
  3609. buffer_handle_t * buf = NULL;
  3610. camera2_jpeg_blob * jpegBlob = NULL;
  3611. int jpegBufSize = 0;
  3612. ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
  3613. for (int i = 0 ; subParms->numSvcBuffers ; i++) {
  3614. if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
  3615. found = true;
  3616. break;
  3617. }
  3618. subParms->svcBufIndex++;
  3619. if (subParms->svcBufIndex >= subParms->numSvcBuffers)
  3620. subParms->svcBufIndex = 0;
  3621. }
  3622. if (!found) {
  3623. ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
  3624. subParms->svcBufIndex++;
  3625. return 1;
  3626. }
  3627. m_jpegEncodingCount++;
  3628. m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
  3629. m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
  3630. &srcCropX, &srcCropY,
  3631. &srcW, &srcH,
  3632. 0);
  3633. m_jpegPictureRect.w = subParms->width;
  3634. m_jpegPictureRect.h = subParms->height;
  3635. ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
  3636. __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
  3637. m_jpegPictureRect.w, m_jpegPictureRect.h);
  3638. m_getRatioSize(srcW, srcH,
  3639. m_jpegPictureRect.w, m_jpegPictureRect.h,
  3640. &cropX, &cropY,
  3641. &pictureW, &pictureH,
  3642. 0);
  3643. pictureFormat = V4L2_PIX_FMT_YUYV;
  3644. pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
  3645. if (m_exynosPictureCSC) {
  3646. float zoom_w = 0, zoom_h = 0;
  3647. if (m_zoomRatio == 0)
  3648. m_zoomRatio = 1;
  3649. if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
  3650. zoom_w = pictureW / m_zoomRatio;
  3651. zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
  3652. } else {
  3653. zoom_h = pictureH / m_zoomRatio;
  3654. zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
  3655. }
  3656. cropX = (srcW - zoom_w) / 2;
  3657. cropY = (srcH - zoom_h) / 2;
  3658. cropW = zoom_w;
  3659. cropH = zoom_h;
  3660. ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
  3661. __FUNCTION__, cropX, cropY, cropW, cropH);
  3662. csc_set_src_format(m_exynosPictureCSC,
  3663. ALIGN(srcW, 16), ALIGN(srcH, 16),
  3664. cropX, cropY, cropW, cropH,
  3665. V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
  3666. 0);
  3667. csc_set_dst_format(m_exynosPictureCSC,
  3668. m_jpegPictureRect.w, m_jpegPictureRect.h,
  3669. 0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
  3670. V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
  3671. 0);
  3672. for (int i = 0 ; i < 3 ; i++)
  3673. ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
  3674. __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
  3675. csc_set_src_buffer(m_exynosPictureCSC,
  3676. (void **)&srcImageBuf->fd.fd);
  3677. csc_set_dst_buffer(m_exynosPictureCSC,
  3678. (void **)&m_resizeBuf.fd.fd);
  3679. for (int i = 0 ; i < 3 ; i++)
  3680. ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
  3681. __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
  3682. if (csc_convert(m_exynosPictureCSC) != 0)
  3683. ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
  3684. }
  3685. else {
  3686. ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
  3687. }
  3688. resizeBufInfo = m_resizeBuf;
  3689. m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
  3690. for (int i = 1; i < 3; i++) {
  3691. if (m_resizeBuf.size.extS[i] != 0)
  3692. m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
  3693. ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
  3694. }
  3695. jpegRect.w = m_jpegPictureRect.w;
  3696. jpegRect.h = m_jpegPictureRect.h;
  3697. jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
  3698. for (int j = 0 ; j < 3 ; j++)
  3699. ALOGV("DEBUG(%s): dest buf node fd.extFd[%d]=%d size=%d virt=%x ",
  3700. __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
  3701. (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
  3702. (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
  3703. jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
  3704. if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
  3705. ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
  3706. } else {
  3707. m_resizeBuf = resizeBufInfo;
  3708. int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
  3709. ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
  3710. m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
  3711. char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
  3712. jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
  3713. if (jpegBuffer[jpegSize-1] == 0)
  3714. jpegSize--;
  3715. jpegBlob->jpeg_size = jpegSize;
  3716. jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
  3717. }
  3718. subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
  3719. res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
  3720. ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
  3721. __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
  3722. if (res == 0) {
  3723. subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
  3724. subParms->numSvcBufsInHal--;
  3725. }
  3726. else {
  3727. subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
  3728. }
  3729. while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
  3730. {
  3731. bool found = false;
  3732. int checkingIndex = 0;
  3733. ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
  3734. res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
  3735. if (res != NO_ERROR || buf == NULL) {
  3736. ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
  3737. break;
  3738. }
  3739. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
  3740. subParms->numSvcBufsInHal ++;
  3741. ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
  3742. subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
  3743. for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
  3744. if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
  3745. found = true;
  3746. break;
  3747. }
  3748. }
  3749. ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
  3750. if (!found) {
  3751. break;
  3752. }
  3753. subParms->svcBufIndex = checkingIndex;
  3754. if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
  3755. subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
  3756. }
  3757. else {
  3758. ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d] status = %d", __FUNCTION__,
  3759. subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]);
  3760. }
  3761. }
  3762. m_jpegEncodingCount--;
  3763. return 0;
  3764. }
  3765. int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
  3766. {
  3767. stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
  3768. substream_parameters_t *subParms = &m_subStreams[STREAM_ID_RECORD];
  3769. status_t res;
  3770. ExynosRect jpegRect;
  3771. bool found = false;
  3772. int cropX, cropY, cropW, cropH = 0;
  3773. buffer_handle_t * buf = NULL;
  3774. ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
  3775. for (int i = 0 ; subParms->numSvcBuffers ; i++) {
  3776. if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
  3777. found = true;
  3778. break;
  3779. }
  3780. subParms->svcBufIndex++;
  3781. if (subParms->svcBufIndex >= subParms->numSvcBuffers)
  3782. subParms->svcBufIndex = 0;
  3783. }
  3784. if (!found) {
  3785. ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
  3786. subParms->svcBufIndex++;
  3787. return 1;
  3788. }
  3789. if (m_exynosVideoCSC) {
  3790. int videoW = subParms->width, videoH = subParms->height;
  3791. int cropX, cropY, cropW, cropH = 0;
  3792. int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
  3793. m_getRatioSize(previewW, previewH,
  3794. videoW, videoH,
  3795. &cropX, &cropY,
  3796. &cropW, &cropH,
  3797. 0);
  3798. ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
  3799. __FUNCTION__, cropX, cropY, cropW, cropH);
  3800. csc_set_src_format(m_exynosVideoCSC,
  3801. ALIGN(previewW, 32), previewH,
  3802. cropX, cropY, cropW, cropH,
  3803. selfStreamParms->format,
  3804. 0);
  3805. csc_set_dst_format(m_exynosVideoCSC,
  3806. videoW, videoH,
  3807. 0, 0, videoW, videoH,
  3808. subParms->format,
  3809. 1);
  3810. csc_set_src_buffer(m_exynosVideoCSC,
  3811. (void **)&srcImageBuf->fd.fd);
  3812. csc_set_dst_buffer(m_exynosVideoCSC,
  3813. (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
  3814. if (csc_convert(m_exynosVideoCSC) != 0) {
  3815. ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
  3816. }
  3817. else {
  3818. ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
  3819. }
  3820. }
  3821. else {
  3822. ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
  3823. }
  3824. res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
  3825. ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
  3826. __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
  3827. if (res == 0) {
  3828. subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
  3829. subParms->numSvcBufsInHal--;
  3830. }
  3831. else {
  3832. subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
  3833. }
  3834. while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
  3835. {
  3836. bool found = false;
  3837. int checkingIndex = 0;
  3838. ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
  3839. res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
  3840. if (res != NO_ERROR || buf == NULL) {
  3841. ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
  3842. break;
  3843. }
  3844. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
  3845. subParms->numSvcBufsInHal ++;
  3846. ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
  3847. subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
  3848. for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
  3849. if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
  3850. found = true;
  3851. break;
  3852. }
  3853. }
  3854. ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
  3855. if (!found) {
  3856. break;
  3857. }
  3858. subParms->svcBufIndex = checkingIndex;
  3859. if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
  3860. subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
  3861. }
  3862. else {
  3863. ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__,
  3864. subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]);
  3865. }
  3866. }
  3867. return 0;
  3868. }
  3869. int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
  3870. {
  3871. stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
  3872. substream_parameters_t *subParms = &m_subStreams[STREAM_ID_PRVCB];
  3873. status_t res;
  3874. bool found = false;
  3875. int cropX, cropY, cropW, cropH = 0;
  3876. buffer_handle_t * buf = NULL;
  3877. ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
  3878. for (int i = 0 ; subParms->numSvcBuffers ; i++) {
  3879. if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
  3880. found = true;
  3881. break;
  3882. }
  3883. subParms->svcBufIndex++;
  3884. if (subParms->svcBufIndex >= subParms->numSvcBuffers)
  3885. subParms->svcBufIndex = 0;
  3886. }
  3887. if (!found) {
  3888. ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
  3889. subParms->svcBufIndex++;
  3890. return 1;
  3891. }
  3892. if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
  3893. if (m_exynosVideoCSC) {
  3894. int previewCbW = subParms->width, previewCbH = subParms->height;
  3895. int cropX, cropY, cropW, cropH = 0;
  3896. int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
  3897. m_getRatioSize(previewW, previewH,
  3898. previewCbW, previewCbH,
  3899. &cropX, &cropY,
  3900. &cropW, &cropH,
  3901. 0);
  3902. ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
  3903. __FUNCTION__, cropX, cropY, cropW, cropH);
  3904. csc_set_src_format(m_exynosVideoCSC,
  3905. ALIGN(previewW, 32), previewH,
  3906. cropX, cropY, cropW, cropH,
  3907. selfStreamParms->format,
  3908. 0);
  3909. csc_set_dst_format(m_exynosVideoCSC,
  3910. previewCbW, previewCbH,
  3911. 0, 0, previewCbW, previewCbH,
  3912. subParms->internalFormat,
  3913. 1);
  3914. csc_set_src_buffer(m_exynosVideoCSC,
  3915. (void **)&srcImageBuf->fd.fd);
  3916. csc_set_dst_buffer(m_exynosVideoCSC,
  3917. (void **)(&(m_previewCbBuf.fd.fd)));
  3918. if (csc_convert(m_exynosVideoCSC) != 0) {
  3919. ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
  3920. }
  3921. else {
  3922. ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
  3923. }
  3924. if (previewCbW == ALIGN(previewCbW, 16)) {
  3925. memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
  3926. m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
  3927. memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
  3928. m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
  3929. }
  3930. else {
  3931. // TODO : copy line by line ?
  3932. }
  3933. }
  3934. else {
  3935. ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
  3936. }
  3937. }
  3938. else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
  3939. int previewCbW = subParms->width, previewCbH = subParms->height;
  3940. int stride = ALIGN(previewCbW, 16);
  3941. int uv_stride = ALIGN(previewCbW/2, 16);
  3942. int c_stride = ALIGN(stride / 2, 16);
  3943. if (previewCbW == ALIGN(previewCbW, 32)) {
  3944. memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
  3945. srcImageBuf->virt.extP[0], stride * previewCbH);
  3946. memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
  3947. srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
  3948. memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
  3949. srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
  3950. } else {
  3951. char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
  3952. char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
  3953. for (int i = 0 ; i < previewCbH ; i++) {
  3954. memcpy(dstAddr, srcAddr, previewCbW);
  3955. dstAddr += stride;
  3956. srcAddr += ALIGN(stride, 32);
  3957. }
  3958. dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
  3959. srcAddr = (char *)(srcImageBuf->virt.extP[1]);
  3960. for (int i = 0 ; i < previewCbH/2 ; i++) {
  3961. memcpy(dstAddr, srcAddr, previewCbW/2);
  3962. dstAddr += c_stride;
  3963. srcAddr += uv_stride;
  3964. }
  3965. srcAddr = (char *)(srcImageBuf->virt.extP[2]);
  3966. for (int i = 0 ; i < previewCbH/2 ; i++) {
  3967. memcpy(dstAddr, srcAddr, previewCbW/2);
  3968. dstAddr += c_stride;
  3969. srcAddr += uv_stride;
  3970. }
  3971. }
  3972. }
  3973. res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
  3974. ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
  3975. __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
  3976. if (res == 0) {
  3977. subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
  3978. subParms->numSvcBufsInHal--;
  3979. }
  3980. else {
  3981. subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
  3982. }
  3983. while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
  3984. {
  3985. bool found = false;
  3986. int checkingIndex = 0;
  3987. ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
  3988. res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
  3989. if (res != NO_ERROR || buf == NULL) {
  3990. ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
  3991. break;
  3992. }
  3993. const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
  3994. subParms->numSvcBufsInHal ++;
  3995. ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
  3996. subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
  3997. for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
  3998. if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
  3999. found = true;
  4000. break;
  4001. }
  4002. }
  4003. ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
  4004. if (!found) {
  4005. break;
  4006. }
  4007. subParms->svcBufIndex = checkingIndex;
  4008. if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
  4009. subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
  4010. }
  4011. else {
  4012. ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d] status = %d", __FUNCTION__,
  4013. subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]);
  4014. }
  4015. }
  4016. return 0;
  4017. }
  4018. bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
  4019. {
  4020. int sizeOfSupportList;
  4021. //REAR Camera
  4022. if(this->getCameraId() == 0) {
  4023. sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
  4024. for(int i = 0; i < sizeOfSupportList; i++) {
  4025. if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
  4026. return true;
  4027. }
  4028. }
  4029. else {
  4030. sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
  4031. for(int i = 0; i < sizeOfSupportList; i++) {
  4032. if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
  4033. return true;
  4034. }
  4035. }
  4036. return false;
  4037. }
  4038. bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
  4039. ExynosBuffer *jpegBuf,
  4040. ExynosRect *rect)
  4041. {
  4042. unsigned char *addr;
  4043. ExynosJpegEncoderForCamera jpegEnc;
  4044. bool ret = false;
  4045. int res = 0;
  4046. unsigned int *yuvSize = yuvBuf->size.extS;
  4047. if (jpegEnc.create()) {
  4048. ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
  4049. goto jpeg_encode_done;
  4050. }
  4051. if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
  4052. ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
  4053. goto jpeg_encode_done;
  4054. }
  4055. if (jpegEnc.setSize(rect->w, rect->h)) {
  4056. ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
  4057. goto jpeg_encode_done;
  4058. }
  4059. ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
  4060. if (jpegEnc.setColorFormat(rect->colorFormat)) {
  4061. ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
  4062. goto jpeg_encode_done;
  4063. }
  4064. if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
  4065. ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
  4066. goto jpeg_encode_done;
  4067. }
  4068. if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
  4069. mExifInfo.enableThumb = true;
  4070. if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
  4071. // in the case of unsupported parameter, disable thumbnail
  4072. mExifInfo.enableThumb = false;
  4073. } else {
  4074. m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
  4075. m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
  4076. }
  4077. ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
  4078. } else {
  4079. mExifInfo.enableThumb = false;
  4080. }
  4081. if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
  4082. ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
  4083. goto jpeg_encode_done;
  4084. }
  4085. ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
  4086. if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
  4087. ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
  4088. goto jpeg_encode_done;
  4089. }
  4090. m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
  4091. ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
  4092. if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
  4093. ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
  4094. goto jpeg_encode_done;
  4095. }
  4096. if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
  4097. ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
  4098. goto jpeg_encode_done;
  4099. }
  4100. if (jpegEnc.updateConfig()) {
  4101. ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
  4102. goto jpeg_encode_done;
  4103. }
  4104. if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
  4105. ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
  4106. goto jpeg_encode_done;
  4107. }
  4108. ret = true;
  4109. jpeg_encode_done:
  4110. if (jpegEnc.flagCreate() == true)
  4111. jpegEnc.destroy();
  4112. return ret;
  4113. }
  4114. void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
  4115. {
  4116. m_ctlInfo.flash.m_precaptureTriggerId = id;
  4117. m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
  4118. if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
  4119. // flash is required
  4120. switch (m_ctlInfo.flash.m_flashCnt) {
  4121. case IS_FLASH_STATE_AUTO_DONE:
  4122. case IS_FLASH_STATE_AUTO_OFF:
  4123. // Flash capture sequence, AF flash was executed before
  4124. break;
  4125. default:
  4126. // Full flash sequence
  4127. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
  4128. m_ctlInfo.flash.m_flashEnableFlg = true;
  4129. m_ctlInfo.flash.m_flashTimeOut = 0;
  4130. }
  4131. } else {
  4132. // Skip pre-capture in case of non-flash.
  4133. ALOGV("[PreCap] Flash OFF mode ");
  4134. m_ctlInfo.flash.m_flashEnableFlg = false;
  4135. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
  4136. }
  4137. ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
  4138. OnPrecaptureMeteringNotificationSensor();
  4139. }
  4140. void ExynosCameraHWInterface2::OnAfTrigger(int id)
  4141. {
  4142. m_afTriggerId = id;
  4143. switch (m_afMode) {
  4144. case AA_AFMODE_AUTO:
  4145. case AA_AFMODE_MACRO:
  4146. case AA_AFMODE_MANUAL:
  4147. ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
  4148. // If flash is enable, Flash operation is executed before triggering AF
  4149. if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
  4150. && (m_ctlInfo.flash.m_flashEnableFlg == false)
  4151. && (m_cameraId == 0)) {
  4152. ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
  4153. m_ctlInfo.flash.m_flashEnableFlg = true;
  4154. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
  4155. m_ctlInfo.flash.m_flashDecisionResult = false;
  4156. m_ctlInfo.flash.m_afFlashDoneFlg = true;
  4157. }
  4158. OnAfTriggerAutoMacro(id);
  4159. break;
  4160. case AA_AFMODE_CONTINUOUS_VIDEO:
  4161. ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
  4162. OnAfTriggerCAFVideo(id);
  4163. break;
  4164. case AA_AFMODE_CONTINUOUS_PICTURE:
  4165. ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
  4166. OnAfTriggerCAFPicture(id);
  4167. break;
  4168. case AA_AFMODE_OFF:
  4169. default:
  4170. break;
  4171. }
  4172. }
  4173. void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
  4174. {
  4175. int nextState = NO_TRANSITION;
  4176. switch (m_afState) {
  4177. case HAL_AFSTATE_INACTIVE:
  4178. case HAL_AFSTATE_PASSIVE_FOCUSED:
  4179. case HAL_AFSTATE_SCANNING:
  4180. nextState = HAL_AFSTATE_NEEDS_COMMAND;
  4181. m_IsAfTriggerRequired = true;
  4182. break;
  4183. case HAL_AFSTATE_NEEDS_COMMAND:
  4184. nextState = NO_TRANSITION;
  4185. break;
  4186. case HAL_AFSTATE_STARTED:
  4187. nextState = NO_TRANSITION;
  4188. break;
  4189. case HAL_AFSTATE_LOCKED:
  4190. nextState = HAL_AFSTATE_NEEDS_COMMAND;
  4191. m_IsAfTriggerRequired = true;
  4192. break;
  4193. case HAL_AFSTATE_FAILED:
  4194. nextState = HAL_AFSTATE_NEEDS_COMMAND;
  4195. m_IsAfTriggerRequired = true;
  4196. break;
  4197. default:
  4198. break;
  4199. }
  4200. ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
  4201. if (nextState != NO_TRANSITION)
  4202. m_afState = nextState;
  4203. }
  4204. void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
  4205. {
  4206. int nextState = NO_TRANSITION;
  4207. switch (m_afState) {
  4208. case HAL_AFSTATE_INACTIVE:
  4209. nextState = HAL_AFSTATE_FAILED;
  4210. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4211. break;
  4212. case HAL_AFSTATE_NEEDS_COMMAND:
  4213. // not used
  4214. break;
  4215. case HAL_AFSTATE_STARTED:
  4216. nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
  4217. m_AfHwStateFailed = false;
  4218. break;
  4219. case HAL_AFSTATE_SCANNING:
  4220. nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
  4221. m_AfHwStateFailed = false;
  4222. // If flash is enable, Flash operation is executed before triggering AF
  4223. if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
  4224. && (m_ctlInfo.flash.m_flashEnableFlg == false)
  4225. && (m_cameraId == 0)) {
  4226. ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
  4227. m_ctlInfo.flash.m_flashEnableFlg = true;
  4228. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
  4229. m_ctlInfo.flash.m_flashDecisionResult = false;
  4230. m_ctlInfo.flash.m_afFlashDoneFlg = true;
  4231. }
  4232. break;
  4233. case HAL_AFSTATE_NEEDS_DETERMINATION:
  4234. nextState = NO_TRANSITION;
  4235. break;
  4236. case HAL_AFSTATE_PASSIVE_FOCUSED:
  4237. m_IsAfLockRequired = true;
  4238. if (m_AfHwStateFailed) {
  4239. ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
  4240. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4241. nextState = HAL_AFSTATE_FAILED;
  4242. }
  4243. else {
  4244. ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
  4245. SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
  4246. nextState = HAL_AFSTATE_LOCKED;
  4247. }
  4248. m_AfHwStateFailed = false;
  4249. break;
  4250. case HAL_AFSTATE_LOCKED:
  4251. nextState = NO_TRANSITION;
  4252. break;
  4253. case HAL_AFSTATE_FAILED:
  4254. nextState = NO_TRANSITION;
  4255. break;
  4256. default:
  4257. break;
  4258. }
  4259. ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
  4260. if (nextState != NO_TRANSITION)
  4261. m_afState = nextState;
  4262. }
  4263. void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
  4264. {
  4265. int nextState = NO_TRANSITION;
  4266. switch (m_afState) {
  4267. case HAL_AFSTATE_INACTIVE:
  4268. nextState = HAL_AFSTATE_FAILED;
  4269. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4270. break;
  4271. case HAL_AFSTATE_NEEDS_COMMAND:
  4272. // not used
  4273. break;
  4274. case HAL_AFSTATE_STARTED:
  4275. m_IsAfLockRequired = true;
  4276. nextState = HAL_AFSTATE_FAILED;
  4277. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4278. break;
  4279. case HAL_AFSTATE_SCANNING:
  4280. m_IsAfLockRequired = true;
  4281. nextState = HAL_AFSTATE_FAILED;
  4282. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4283. break;
  4284. case HAL_AFSTATE_NEEDS_DETERMINATION:
  4285. // not used
  4286. break;
  4287. case HAL_AFSTATE_PASSIVE_FOCUSED:
  4288. m_IsAfLockRequired = true;
  4289. SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
  4290. nextState = HAL_AFSTATE_LOCKED;
  4291. break;
  4292. case HAL_AFSTATE_LOCKED:
  4293. nextState = NO_TRANSITION;
  4294. break;
  4295. case HAL_AFSTATE_FAILED:
  4296. nextState = NO_TRANSITION;
  4297. break;
  4298. default:
  4299. break;
  4300. }
  4301. ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
  4302. if (nextState != NO_TRANSITION)
  4303. m_afState = nextState;
  4304. }
  4305. void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
  4306. {
  4307. if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
  4308. // Just noti of pre-capture start
  4309. if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
  4310. m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
  4311. ANDROID_CONTROL_AE_STATE_PRECAPTURE,
  4312. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4313. ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
  4314. m_notifyCb(CAMERA2_MSG_AUTOWB,
  4315. ANDROID_CONTROL_AWB_STATE_CONVERGED,
  4316. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4317. m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
  4318. }
  4319. }
  4320. }
  4321. void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
  4322. {
  4323. if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
  4324. if (m_ctlInfo.flash.m_flashEnableFlg) {
  4325. // flash case
  4326. switch (m_ctlInfo.flash.m_flashCnt) {
  4327. case IS_FLASH_STATE_AUTO_DONE:
  4328. case IS_FLASH_STATE_AUTO_OFF:
  4329. if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
  4330. // End notification
  4331. m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
  4332. ANDROID_CONTROL_AE_STATE_CONVERGED,
  4333. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4334. ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
  4335. m_notifyCb(CAMERA2_MSG_AUTOWB,
  4336. ANDROID_CONTROL_AWB_STATE_CONVERGED,
  4337. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4338. m_ctlInfo.flash.m_precaptureTriggerId = 0;
  4339. } else {
  4340. m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
  4341. ANDROID_CONTROL_AE_STATE_PRECAPTURE,
  4342. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4343. ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
  4344. m_notifyCb(CAMERA2_MSG_AUTOWB,
  4345. ANDROID_CONTROL_AWB_STATE_CONVERGED,
  4346. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4347. m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
  4348. }
  4349. break;
  4350. case IS_FLASH_STATE_CAPTURE:
  4351. case IS_FLASH_STATE_CAPTURE_WAIT:
  4352. case IS_FLASH_STATE_CAPTURE_JPEG:
  4353. case IS_FLASH_STATE_CAPTURE_END:
  4354. ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
  4355. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
  4356. m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
  4357. ANDROID_CONTROL_AE_STATE_CONVERGED,
  4358. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4359. m_notifyCb(CAMERA2_MSG_AUTOWB,
  4360. ANDROID_CONTROL_AWB_STATE_CONVERGED,
  4361. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4362. m_ctlInfo.flash.m_precaptureTriggerId = 0;
  4363. break;
  4364. }
  4365. } else {
  4366. // non-flash case
  4367. if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
  4368. m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
  4369. ANDROID_CONTROL_AE_STATE_CONVERGED,
  4370. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4371. ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
  4372. m_notifyCb(CAMERA2_MSG_AUTOWB,
  4373. ANDROID_CONTROL_AWB_STATE_CONVERGED,
  4374. m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
  4375. m_ctlInfo.flash.m_precaptureTriggerId = 0;
  4376. }
  4377. }
  4378. }
  4379. }
  4380. void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
  4381. {
  4382. switch (m_afMode) {
  4383. case AA_AFMODE_AUTO:
  4384. case AA_AFMODE_MACRO:
  4385. OnAfNotificationAutoMacro(noti);
  4386. break;
  4387. case AA_AFMODE_CONTINUOUS_VIDEO:
  4388. OnAfNotificationCAFVideo(noti);
  4389. break;
  4390. case AA_AFMODE_CONTINUOUS_PICTURE:
  4391. OnAfNotificationCAFPicture(noti);
  4392. break;
  4393. case AA_AFMODE_OFF:
  4394. default:
  4395. break;
  4396. }
  4397. }
  4398. void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
  4399. {
  4400. int nextState = NO_TRANSITION;
  4401. bool bWrongTransition = false;
  4402. if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
  4403. switch (noti) {
  4404. case AA_AFSTATE_INACTIVE:
  4405. case AA_AFSTATE_ACTIVE_SCAN:
  4406. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4407. case AA_AFSTATE_AF_FAILED_FOCUS:
  4408. default:
  4409. nextState = NO_TRANSITION;
  4410. break;
  4411. }
  4412. }
  4413. else if (m_afState == HAL_AFSTATE_STARTED) {
  4414. switch (noti) {
  4415. case AA_AFSTATE_INACTIVE:
  4416. nextState = NO_TRANSITION;
  4417. break;
  4418. case AA_AFSTATE_ACTIVE_SCAN:
  4419. nextState = HAL_AFSTATE_SCANNING;
  4420. SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
  4421. break;
  4422. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4423. nextState = NO_TRANSITION;
  4424. break;
  4425. case AA_AFSTATE_AF_FAILED_FOCUS:
  4426. nextState = NO_TRANSITION;
  4427. break;
  4428. default:
  4429. bWrongTransition = true;
  4430. break;
  4431. }
  4432. }
  4433. else if (m_afState == HAL_AFSTATE_SCANNING) {
  4434. switch (noti) {
  4435. case AA_AFSTATE_INACTIVE:
  4436. bWrongTransition = true;
  4437. break;
  4438. case AA_AFSTATE_ACTIVE_SCAN:
  4439. nextState = NO_TRANSITION;
  4440. break;
  4441. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4442. // If Flash mode is enable, after AF execute pre-capture metering
  4443. if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
  4444. switch (m_ctlInfo.flash.m_flashCnt) {
  4445. case IS_FLASH_STATE_ON_DONE:
  4446. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
  4447. nextState = NO_TRANSITION;
  4448. break;
  4449. case IS_FLASH_STATE_AUTO_DONE:
  4450. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
  4451. nextState = HAL_AFSTATE_LOCKED;
  4452. SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
  4453. break;
  4454. default:
  4455. nextState = NO_TRANSITION;
  4456. }
  4457. } else {
  4458. nextState = HAL_AFSTATE_LOCKED;
  4459. SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
  4460. }
  4461. break;
  4462. case AA_AFSTATE_AF_FAILED_FOCUS:
  4463. // If Flash mode is enable, after AF execute pre-capture metering
  4464. if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
  4465. switch (m_ctlInfo.flash.m_flashCnt) {
  4466. case IS_FLASH_STATE_ON_DONE:
  4467. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
  4468. nextState = NO_TRANSITION;
  4469. break;
  4470. case IS_FLASH_STATE_AUTO_DONE:
  4471. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
  4472. nextState = HAL_AFSTATE_FAILED;
  4473. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4474. break;
  4475. default:
  4476. nextState = NO_TRANSITION;
  4477. }
  4478. } else {
  4479. nextState = HAL_AFSTATE_FAILED;
  4480. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4481. }
  4482. break;
  4483. default:
  4484. bWrongTransition = true;
  4485. break;
  4486. }
  4487. }
  4488. else if (m_afState == HAL_AFSTATE_LOCKED) {
  4489. switch (noti) {
  4490. case AA_AFSTATE_INACTIVE:
  4491. case AA_AFSTATE_ACTIVE_SCAN:
  4492. bWrongTransition = true;
  4493. break;
  4494. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4495. nextState = NO_TRANSITION;
  4496. break;
  4497. case AA_AFSTATE_AF_FAILED_FOCUS:
  4498. default:
  4499. bWrongTransition = true;
  4500. break;
  4501. }
  4502. }
  4503. else if (m_afState == HAL_AFSTATE_FAILED) {
  4504. switch (noti) {
  4505. case AA_AFSTATE_INACTIVE:
  4506. case AA_AFSTATE_ACTIVE_SCAN:
  4507. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4508. bWrongTransition = true;
  4509. break;
  4510. case AA_AFSTATE_AF_FAILED_FOCUS:
  4511. nextState = NO_TRANSITION;
  4512. break;
  4513. default:
  4514. bWrongTransition = true;
  4515. break;
  4516. }
  4517. }
  4518. if (bWrongTransition) {
  4519. ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
  4520. return;
  4521. }
  4522. ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
  4523. if (nextState != NO_TRANSITION)
  4524. m_afState = nextState;
  4525. }
  4526. void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
  4527. {
  4528. int nextState = NO_TRANSITION;
  4529. bool bWrongTransition = false;
  4530. if (m_afState == HAL_AFSTATE_INACTIVE) {
  4531. switch (noti) {
  4532. case AA_AFSTATE_INACTIVE:
  4533. case AA_AFSTATE_ACTIVE_SCAN:
  4534. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4535. case AA_AFSTATE_AF_FAILED_FOCUS:
  4536. default:
  4537. nextState = NO_TRANSITION;
  4538. break;
  4539. }
  4540. // Check AF notification after triggering
  4541. if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
  4542. if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
  4543. ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
  4544. SetAfMode(AA_AFMODE_OFF);
  4545. SetAfMode(m_afMode);
  4546. m_ctlInfo.af.m_afTriggerTimeOut = 0;
  4547. } else {
  4548. m_ctlInfo.af.m_afTriggerTimeOut++;
  4549. }
  4550. }
  4551. }
  4552. else if (m_afState == HAL_AFSTATE_STARTED) {
  4553. switch (noti) {
  4554. case AA_AFSTATE_INACTIVE:
  4555. nextState = NO_TRANSITION;
  4556. break;
  4557. case AA_AFSTATE_ACTIVE_SCAN:
  4558. nextState = HAL_AFSTATE_SCANNING;
  4559. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
  4560. m_ctlInfo.af.m_afTriggerTimeOut = 0;
  4561. break;
  4562. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4563. nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
  4564. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
  4565. m_ctlInfo.af.m_afTriggerTimeOut = 0;
  4566. break;
  4567. case AA_AFSTATE_AF_FAILED_FOCUS:
  4568. //nextState = HAL_AFSTATE_FAILED;
  4569. //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4570. nextState = NO_TRANSITION;
  4571. break;
  4572. default:
  4573. bWrongTransition = true;
  4574. break;
  4575. }
  4576. }
  4577. else if (m_afState == HAL_AFSTATE_SCANNING) {
  4578. switch (noti) {
  4579. case AA_AFSTATE_INACTIVE:
  4580. nextState = NO_TRANSITION;
  4581. break;
  4582. case AA_AFSTATE_ACTIVE_SCAN:
  4583. nextState = NO_TRANSITION;
  4584. m_AfHwStateFailed = false;
  4585. break;
  4586. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4587. nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
  4588. m_AfHwStateFailed = false;
  4589. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
  4590. break;
  4591. case AA_AFSTATE_AF_FAILED_FOCUS:
  4592. nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
  4593. m_AfHwStateFailed = true;
  4594. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
  4595. break;
  4596. default:
  4597. bWrongTransition = true;
  4598. break;
  4599. }
  4600. }
  4601. else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
  4602. switch (noti) {
  4603. case AA_AFSTATE_INACTIVE:
  4604. nextState = NO_TRANSITION;
  4605. break;
  4606. case AA_AFSTATE_ACTIVE_SCAN:
  4607. nextState = HAL_AFSTATE_SCANNING;
  4608. m_AfHwStateFailed = false;
  4609. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
  4610. break;
  4611. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4612. nextState = NO_TRANSITION;
  4613. m_AfHwStateFailed = false;
  4614. break;
  4615. case AA_AFSTATE_AF_FAILED_FOCUS:
  4616. nextState = NO_TRANSITION;
  4617. m_AfHwStateFailed = true;
  4618. break;
  4619. default:
  4620. bWrongTransition = true;
  4621. break;
  4622. }
  4623. }
  4624. else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
  4625. //Skip notification in case of flash, wait the end of flash on
  4626. if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
  4627. if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
  4628. return;
  4629. }
  4630. switch (noti) {
  4631. case AA_AFSTATE_INACTIVE:
  4632. nextState = NO_TRANSITION;
  4633. break;
  4634. case AA_AFSTATE_ACTIVE_SCAN:
  4635. nextState = NO_TRANSITION;
  4636. break;
  4637. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4638. // If Flash mode is enable, after AF execute pre-capture metering
  4639. if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
  4640. switch (m_ctlInfo.flash.m_flashCnt) {
  4641. case IS_FLASH_STATE_ON_DONE:
  4642. ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
  4643. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
  4644. nextState = NO_TRANSITION;
  4645. break;
  4646. case IS_FLASH_STATE_AUTO_DONE:
  4647. ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
  4648. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
  4649. m_IsAfLockRequired = true;
  4650. nextState = HAL_AFSTATE_LOCKED;
  4651. SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
  4652. break;
  4653. default:
  4654. nextState = NO_TRANSITION;
  4655. }
  4656. } else {
  4657. m_IsAfLockRequired = true;
  4658. nextState = HAL_AFSTATE_LOCKED;
  4659. SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
  4660. }
  4661. break;
  4662. case AA_AFSTATE_AF_FAILED_FOCUS:
  4663. // If Flash mode is enable, after AF execute pre-capture metering
  4664. if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
  4665. switch (m_ctlInfo.flash.m_flashCnt) {
  4666. case IS_FLASH_STATE_ON_DONE:
  4667. ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
  4668. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
  4669. nextState = NO_TRANSITION;
  4670. break;
  4671. case IS_FLASH_STATE_AUTO_DONE:
  4672. ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
  4673. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
  4674. m_IsAfLockRequired = true;
  4675. nextState = HAL_AFSTATE_FAILED;
  4676. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4677. break;
  4678. default:
  4679. nextState = NO_TRANSITION;
  4680. }
  4681. } else {
  4682. m_IsAfLockRequired = true;
  4683. nextState = HAL_AFSTATE_FAILED;
  4684. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4685. }
  4686. break;
  4687. default:
  4688. bWrongTransition = true;
  4689. break;
  4690. }
  4691. }
  4692. else if (m_afState == HAL_AFSTATE_LOCKED) {
  4693. switch (noti) {
  4694. case AA_AFSTATE_INACTIVE:
  4695. nextState = NO_TRANSITION;
  4696. break;
  4697. case AA_AFSTATE_ACTIVE_SCAN:
  4698. bWrongTransition = true;
  4699. break;
  4700. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4701. nextState = NO_TRANSITION;
  4702. break;
  4703. case AA_AFSTATE_AF_FAILED_FOCUS:
  4704. default:
  4705. bWrongTransition = true;
  4706. break;
  4707. }
  4708. }
  4709. else if (m_afState == HAL_AFSTATE_FAILED) {
  4710. switch (noti) {
  4711. case AA_AFSTATE_INACTIVE:
  4712. bWrongTransition = true;
  4713. break;
  4714. case AA_AFSTATE_ACTIVE_SCAN:
  4715. nextState = HAL_AFSTATE_SCANNING;
  4716. break;
  4717. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4718. bWrongTransition = true;
  4719. break;
  4720. case AA_AFSTATE_AF_FAILED_FOCUS:
  4721. nextState = NO_TRANSITION;
  4722. break;
  4723. default:
  4724. bWrongTransition = true;
  4725. break;
  4726. }
  4727. }
  4728. if (bWrongTransition) {
  4729. ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
  4730. return;
  4731. }
  4732. ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
  4733. if (nextState != NO_TRANSITION)
  4734. m_afState = nextState;
  4735. }
  4736. void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
  4737. {
  4738. int nextState = NO_TRANSITION;
  4739. bool bWrongTransition = false;
  4740. if (m_afState == HAL_AFSTATE_INACTIVE) {
  4741. switch (noti) {
  4742. case AA_AFSTATE_INACTIVE:
  4743. case AA_AFSTATE_ACTIVE_SCAN:
  4744. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4745. case AA_AFSTATE_AF_FAILED_FOCUS:
  4746. default:
  4747. nextState = NO_TRANSITION;
  4748. break;
  4749. }
  4750. }
  4751. else if (m_afState == HAL_AFSTATE_STARTED) {
  4752. switch (noti) {
  4753. case AA_AFSTATE_INACTIVE:
  4754. nextState = NO_TRANSITION;
  4755. break;
  4756. case AA_AFSTATE_ACTIVE_SCAN:
  4757. nextState = HAL_AFSTATE_SCANNING;
  4758. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
  4759. break;
  4760. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4761. nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
  4762. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
  4763. break;
  4764. case AA_AFSTATE_AF_FAILED_FOCUS:
  4765. nextState = HAL_AFSTATE_FAILED;
  4766. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4767. break;
  4768. default:
  4769. bWrongTransition = true;
  4770. break;
  4771. }
  4772. }
  4773. else if (m_afState == HAL_AFSTATE_SCANNING) {
  4774. switch (noti) {
  4775. case AA_AFSTATE_INACTIVE:
  4776. bWrongTransition = true;
  4777. break;
  4778. case AA_AFSTATE_ACTIVE_SCAN:
  4779. nextState = NO_TRANSITION;
  4780. break;
  4781. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4782. nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
  4783. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
  4784. break;
  4785. case AA_AFSTATE_AF_FAILED_FOCUS:
  4786. nextState = NO_TRANSITION;
  4787. break;
  4788. default:
  4789. bWrongTransition = true;
  4790. break;
  4791. }
  4792. }
  4793. else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
  4794. switch (noti) {
  4795. case AA_AFSTATE_INACTIVE:
  4796. bWrongTransition = true;
  4797. break;
  4798. case AA_AFSTATE_ACTIVE_SCAN:
  4799. nextState = HAL_AFSTATE_SCANNING;
  4800. SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
  4801. break;
  4802. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4803. nextState = NO_TRANSITION;
  4804. break;
  4805. case AA_AFSTATE_AF_FAILED_FOCUS:
  4806. nextState = HAL_AFSTATE_FAILED;
  4807. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4808. // TODO : needs NO_TRANSITION ?
  4809. break;
  4810. default:
  4811. bWrongTransition = true;
  4812. break;
  4813. }
  4814. }
  4815. else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
  4816. switch (noti) {
  4817. case AA_AFSTATE_INACTIVE:
  4818. bWrongTransition = true;
  4819. break;
  4820. case AA_AFSTATE_ACTIVE_SCAN:
  4821. nextState = NO_TRANSITION;
  4822. break;
  4823. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4824. m_IsAfLockRequired = true;
  4825. nextState = HAL_AFSTATE_LOCKED;
  4826. SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
  4827. break;
  4828. case AA_AFSTATE_AF_FAILED_FOCUS:
  4829. nextState = HAL_AFSTATE_FAILED;
  4830. SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
  4831. break;
  4832. default:
  4833. bWrongTransition = true;
  4834. break;
  4835. }
  4836. }
  4837. else if (m_afState == HAL_AFSTATE_LOCKED) {
  4838. switch (noti) {
  4839. case AA_AFSTATE_INACTIVE:
  4840. nextState = NO_TRANSITION;
  4841. break;
  4842. case AA_AFSTATE_ACTIVE_SCAN:
  4843. bWrongTransition = true;
  4844. break;
  4845. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4846. nextState = NO_TRANSITION;
  4847. break;
  4848. case AA_AFSTATE_AF_FAILED_FOCUS:
  4849. default:
  4850. bWrongTransition = true;
  4851. break;
  4852. }
  4853. }
  4854. else if (m_afState == HAL_AFSTATE_FAILED) {
  4855. switch (noti) {
  4856. case AA_AFSTATE_INACTIVE:
  4857. case AA_AFSTATE_ACTIVE_SCAN:
  4858. case AA_AFSTATE_AF_ACQUIRED_FOCUS:
  4859. bWrongTransition = true;
  4860. break;
  4861. case AA_AFSTATE_AF_FAILED_FOCUS:
  4862. nextState = NO_TRANSITION;
  4863. break;
  4864. default:
  4865. bWrongTransition = true;
  4866. break;
  4867. }
  4868. }
  4869. if (bWrongTransition) {
  4870. ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
  4871. return;
  4872. }
  4873. ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
  4874. if (nextState != NO_TRANSITION)
  4875. m_afState = nextState;
  4876. }
  4877. void ExynosCameraHWInterface2::OnAfCancel(int id)
  4878. {
  4879. m_afTriggerId = id;
  4880. switch (m_afMode) {
  4881. case AA_AFMODE_AUTO:
  4882. case AA_AFMODE_MACRO:
  4883. case AA_AFMODE_OFF:
  4884. case AA_AFMODE_MANUAL:
  4885. OnAfCancelAutoMacro(id);
  4886. break;
  4887. case AA_AFMODE_CONTINUOUS_VIDEO:
  4888. OnAfCancelCAFVideo(id);
  4889. break;
  4890. case AA_AFMODE_CONTINUOUS_PICTURE:
  4891. OnAfCancelCAFPicture(id);
  4892. break;
  4893. default:
  4894. break;
  4895. }
  4896. }
  4897. void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
  4898. {
  4899. int nextState = NO_TRANSITION;
  4900. if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
  4901. m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
  4902. }
  4903. switch (m_afState) {
  4904. case HAL_AFSTATE_INACTIVE:
  4905. nextState = NO_TRANSITION;
  4906. SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
  4907. break;
  4908. case HAL_AFSTATE_NEEDS_COMMAND:
  4909. case HAL_AFSTATE_STARTED:
  4910. case HAL_AFSTATE_SCANNING:
  4911. case HAL_AFSTATE_LOCKED:
  4912. case HAL_AFSTATE_FAILED:
  4913. SetAfMode(AA_AFMODE_OFF);
  4914. SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
  4915. nextState = HAL_AFSTATE_INACTIVE;
  4916. break;
  4917. default:
  4918. break;
  4919. }
  4920. ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
  4921. if (nextState != NO_TRANSITION)
  4922. m_afState = nextState;
  4923. }
  4924. void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
  4925. {
  4926. int nextState = NO_TRANSITION;
  4927. switch (m_afState) {
  4928. case HAL_AFSTATE_INACTIVE:
  4929. nextState = NO_TRANSITION;
  4930. break;
  4931. case HAL_AFSTATE_NEEDS_COMMAND:
  4932. case HAL_AFSTATE_STARTED:
  4933. case HAL_AFSTATE_SCANNING:
  4934. case HAL_AFSTATE_LOCKED:
  4935. case HAL_AFSTATE_FAILED:
  4936. case HAL_AFSTATE_NEEDS_DETERMINATION:
  4937. case HAL_AFSTATE_PASSIVE_FOCUSED:
  4938. SetAfMode(AA_AFMODE_OFF);
  4939. SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
  4940. SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
  4941. nextState = HAL_AFSTATE_INACTIVE;
  4942. break;
  4943. default:
  4944. break;
  4945. }
  4946. ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
  4947. if (nextState != NO_TRANSITION)
  4948. m_afState = nextState;
  4949. }
  4950. void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
  4951. {
  4952. int nextState = NO_TRANSITION;
  4953. switch (m_afState) {
  4954. case HAL_AFSTATE_INACTIVE:
  4955. nextState = NO_TRANSITION;
  4956. break;
  4957. case HAL_AFSTATE_NEEDS_COMMAND:
  4958. case HAL_AFSTATE_STARTED:
  4959. case HAL_AFSTATE_SCANNING:
  4960. case HAL_AFSTATE_LOCKED:
  4961. case HAL_AFSTATE_FAILED:
  4962. case HAL_AFSTATE_NEEDS_DETERMINATION:
  4963. case HAL_AFSTATE_PASSIVE_FOCUSED:
  4964. SetAfMode(AA_AFMODE_OFF);
  4965. SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
  4966. SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
  4967. nextState = HAL_AFSTATE_INACTIVE;
  4968. break;
  4969. default:
  4970. break;
  4971. }
  4972. ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
  4973. if (nextState != NO_TRANSITION)
  4974. m_afState = nextState;
  4975. }
  4976. void ExynosCameraHWInterface2::SetAfStateForService(int newState)
  4977. {
  4978. if (m_serviceAfState != newState || newState == 0)
  4979. m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
  4980. m_serviceAfState = newState;
  4981. }
  4982. int ExynosCameraHWInterface2::GetAfStateForService()
  4983. {
  4984. return m_serviceAfState;
  4985. }
  4986. void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
  4987. {
  4988. if (m_afMode != afMode) {
  4989. if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) {
  4990. m_afMode2 = afMode;
  4991. ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
  4992. }
  4993. else {
  4994. ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
  4995. m_IsAfModeUpdateRequired = true;
  4996. m_afMode = afMode;
  4997. SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
  4998. m_afState = HAL_AFSTATE_INACTIVE;
  4999. }
  5000. }
  5001. }
  5002. void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
  5003. {
  5004. char property[PROPERTY_VALUE_MAX];
  5005. //2 0th IFD TIFF Tags
  5006. //3 Maker
  5007. property_get("ro.product.brand", property, EXIF_DEF_MAKER);
  5008. strncpy((char *)mExifInfo.maker, property,
  5009. sizeof(mExifInfo.maker) - 1);
  5010. mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
  5011. //3 Model
  5012. property_get("ro.product.model", property, EXIF_DEF_MODEL);
  5013. strncpy((char *)mExifInfo.model, property,
  5014. sizeof(mExifInfo.model) - 1);
  5015. mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
  5016. //3 Software
  5017. property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
  5018. strncpy((char *)mExifInfo.software, property,
  5019. sizeof(mExifInfo.software) - 1);
  5020. mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
  5021. //3 YCbCr Positioning
  5022. mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
  5023. //2 0th IFD Exif Private Tags
  5024. //3 F Number
  5025. mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
  5026. mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
  5027. //3 Exposure Program
  5028. mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
  5029. //3 Exif Version
  5030. memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
  5031. //3 Aperture
  5032. double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
  5033. mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
  5034. mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
  5035. //3 Maximum lens aperture
  5036. mExifInfo.max_aperture.num = mExifInfo.aperture.num;
  5037. mExifInfo.max_aperture.den = mExifInfo.aperture.den;
  5038. //3 Lens Focal Length
  5039. mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
  5040. mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
  5041. //3 User Comments
  5042. strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
  5043. //3 Color Space information
  5044. mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
  5045. //3 Exposure Mode
  5046. mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
  5047. //2 0th IFD GPS Info Tags
  5048. unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
  5049. memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
  5050. //2 1th IFD TIFF Tags
  5051. mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
  5052. mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
  5053. mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
  5054. mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
  5055. mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
  5056. mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
  5057. }
  5058. void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
  5059. camera2_shot_ext *currentEntry)
  5060. {
  5061. camera2_dm *dm = &(currentEntry->shot.dm);
  5062. camera2_ctl *ctl = &(currentEntry->shot.ctl);
  5063. ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
  5064. if (!ctl->request.frameCount)
  5065. return;
  5066. //2 0th IFD TIFF Tags
  5067. //3 Width
  5068. exifInfo->width = rect->w;
  5069. //3 Height
  5070. exifInfo->height = rect->h;
  5071. //3 Orientation
  5072. switch (ctl->jpeg.orientation) {
  5073. case 90:
  5074. exifInfo->orientation = EXIF_ORIENTATION_90;
  5075. break;
  5076. case 180:
  5077. exifInfo->orientation = EXIF_ORIENTATION_180;
  5078. break;
  5079. case 270:
  5080. exifInfo->orientation = EXIF_ORIENTATION_270;
  5081. break;
  5082. case 0:
  5083. default:
  5084. exifInfo->orientation = EXIF_ORIENTATION_UP;
  5085. break;
  5086. }
  5087. //3 Date time
  5088. time_t rawtime;
  5089. struct tm *timeinfo;
  5090. time(&rawtime);
  5091. timeinfo = localtime(&rawtime);
  5092. strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
  5093. //2 0th IFD Exif Private Tags
  5094. //3 Exposure Time
  5095. int shutterSpeed = (dm->sensor.exposureTime/1000);
  5096. // To display exposure time just above 500ms as 1/2sec, not 1 sec.
  5097. if (shutterSpeed > 500000)
  5098. shutterSpeed -= 100000;
  5099. if (shutterSpeed < 0) {
  5100. shutterSpeed = 100;
  5101. }
  5102. exifInfo->exposure_time.num = 1;
  5103. // x us -> 1/x s */
  5104. //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
  5105. exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
  5106. //3 ISO Speed Rating
  5107. exifInfo->iso_speed_rating = dm->aa.isoValue;
  5108. uint32_t av, tv, bv, sv, ev;
  5109. av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
  5110. tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
  5111. sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
  5112. bv = av + tv - sv;
  5113. ev = av + tv;
  5114. //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
  5115. ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
  5116. //3 Shutter Speed
  5117. exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
  5118. exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
  5119. //3 Brightness
  5120. exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
  5121. exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
  5122. //3 Exposure Bias
  5123. if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
  5124. ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
  5125. exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
  5126. exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
  5127. } else {
  5128. exifInfo->exposure_bias.num = 0;
  5129. exifInfo->exposure_bias.den = 0;
  5130. }
  5131. //3 Metering Mode
  5132. /*switch (m_curCameraInfo->metering) {
  5133. case METERING_MODE_CENTER:
  5134. exifInfo->metering_mode = EXIF_METERING_CENTER;
  5135. break;
  5136. case METERING_MODE_MATRIX:
  5137. exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
  5138. break;
  5139. case METERING_MODE_SPOT:
  5140. exifInfo->metering_mode = EXIF_METERING_SPOT;
  5141. break;
  5142. case METERING_MODE_AVERAGE:
  5143. default:
  5144. exifInfo->metering_mode = EXIF_METERING_AVERAGE;
  5145. break;
  5146. }*/
  5147. exifInfo->metering_mode = EXIF_METERING_CENTER;
  5148. //3 Flash
  5149. if (m_ctlInfo.flash.m_flashDecisionResult)
  5150. exifInfo->flash = 1;
  5151. else
  5152. exifInfo->flash = EXIF_DEF_FLASH;
  5153. //3 White Balance
  5154. if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
  5155. exifInfo->white_balance = EXIF_WB_AUTO;
  5156. else
  5157. exifInfo->white_balance = EXIF_WB_MANUAL;
  5158. //3 Scene Capture Type
  5159. switch (ctl->aa.sceneMode) {
  5160. case AA_SCENE_MODE_PORTRAIT:
  5161. exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
  5162. break;
  5163. case AA_SCENE_MODE_LANDSCAPE:
  5164. exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
  5165. break;
  5166. case AA_SCENE_MODE_NIGHT_PORTRAIT:
  5167. exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
  5168. break;
  5169. default:
  5170. exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
  5171. break;
  5172. }
  5173. //2 0th IFD GPS Info Tags
  5174. if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
  5175. if (ctl->jpeg.gpsCoordinates[0] > 0)
  5176. strcpy((char *)exifInfo->gps_latitude_ref, "N");
  5177. else
  5178. strcpy((char *)exifInfo->gps_latitude_ref, "S");
  5179. if (ctl->jpeg.gpsCoordinates[1] > 0)
  5180. strcpy((char *)exifInfo->gps_longitude_ref, "E");
  5181. else
  5182. strcpy((char *)exifInfo->gps_longitude_ref, "W");
  5183. if (ctl->jpeg.gpsCoordinates[2] > 0)
  5184. exifInfo->gps_altitude_ref = 0;
  5185. else
  5186. exifInfo->gps_altitude_ref = 1;
  5187. double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
  5188. double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
  5189. double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
  5190. exifInfo->gps_latitude[0].num = (uint32_t)latitude;
  5191. exifInfo->gps_latitude[0].den = 1;
  5192. exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
  5193. exifInfo->gps_latitude[1].den = 1;
  5194. exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60)
  5195. - exifInfo->gps_latitude[1].num) * 60);
  5196. exifInfo->gps_latitude[2].den = 1;
  5197. exifInfo->gps_longitude[0].num = (uint32_t)longitude;
  5198. exifInfo->gps_longitude[0].den = 1;
  5199. exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
  5200. exifInfo->gps_longitude[1].den = 1;
  5201. exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60)
  5202. - exifInfo->gps_longitude[1].num) * 60);
  5203. exifInfo->gps_longitude[2].den = 1;
  5204. exifInfo->gps_altitude.num = (uint32_t)round(altitude);
  5205. exifInfo->gps_altitude.den = 1;
  5206. struct tm tm_data;
  5207. long timestamp;
  5208. timestamp = (long)ctl->jpeg.gpsTimestamp;
  5209. gmtime_r(&timestamp, &tm_data);
  5210. exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
  5211. exifInfo->gps_timestamp[0].den = 1;
  5212. exifInfo->gps_timestamp[1].num = tm_data.tm_min;
  5213. exifInfo->gps_timestamp[1].den = 1;
  5214. exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
  5215. exifInfo->gps_timestamp[2].den = 1;
  5216. snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
  5217. "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
  5218. memset(exifInfo->gps_processing_method, 0, 100);
  5219. memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
  5220. exifInfo->enableGps = true;
  5221. } else {
  5222. exifInfo->enableGps = false;
  5223. }
  5224. //2 1th IFD TIFF Tags
  5225. exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
  5226. exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
  5227. }
  5228. ExynosCameraHWInterface2::MainThread::~MainThread()
  5229. {
  5230. ALOGV("(%s):", __FUNCTION__);
  5231. }
  5232. void ExynosCameraHWInterface2::MainThread::release()
  5233. {
  5234. ALOGV("(%s):", __func__);
  5235. SetSignal(SIGNAL_THREAD_RELEASE);
  5236. }
  5237. ExynosCameraHWInterface2::SensorThread::~SensorThread()
  5238. {
  5239. ALOGV("(%s):", __FUNCTION__);
  5240. }
  5241. void ExynosCameraHWInterface2::SensorThread::release()
  5242. {
  5243. ALOGV("(%s):", __func__);
  5244. SetSignal(SIGNAL_THREAD_RELEASE);
  5245. }
  5246. ExynosCameraHWInterface2::StreamThread::~StreamThread()
  5247. {
  5248. ALOGV("(%s):", __FUNCTION__);
  5249. }
  5250. void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
  5251. {
  5252. ALOGV("DEBUG(%s):", __FUNCTION__);
  5253. memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
  5254. }
  5255. void ExynosCameraHWInterface2::StreamThread::release()
  5256. {
  5257. ALOGV("(%s):", __func__);
  5258. SetSignal(SIGNAL_THREAD_RELEASE);
  5259. }
  5260. int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
  5261. {
  5262. int index;
  5263. for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
  5264. if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
  5265. return index;
  5266. }
  5267. return -1;
  5268. }
  5269. int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
  5270. {
  5271. int index;
  5272. for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
  5273. if (m_parameters.svcBufHandle[index] == *bufHandle)
  5274. return index;
  5275. }
  5276. return -1;
  5277. }
  5278. status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
  5279. {
  5280. ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
  5281. int index, vacantIndex;
  5282. bool vacancy = false;
  5283. for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
  5284. if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
  5285. vacancy = true;
  5286. vacantIndex = index;
  5287. } else if (m_attachedSubStreams[index].streamId == stream_id) {
  5288. return BAD_VALUE;
  5289. }
  5290. }
  5291. if (!vacancy)
  5292. return NO_MEMORY;
  5293. m_attachedSubStreams[vacantIndex].streamId = stream_id;
  5294. m_attachedSubStreams[vacantIndex].priority = priority;
  5295. m_numRegisteredStream++;
  5296. return NO_ERROR;
  5297. }
  5298. status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
  5299. {
  5300. ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
  5301. int index;
  5302. bool found = false;
  5303. for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
  5304. if (m_attachedSubStreams[index].streamId == stream_id) {
  5305. found = true;
  5306. break;
  5307. }
  5308. }
  5309. if (!found)
  5310. return BAD_VALUE;
  5311. m_attachedSubStreams[index].streamId = -1;
  5312. m_attachedSubStreams[index].priority = 0;
  5313. m_numRegisteredStream--;
  5314. return NO_ERROR;
  5315. }
  5316. int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
  5317. {
  5318. if (ionClient == 0) {
  5319. ionClient = ion_client_create();
  5320. if (ionClient < 0) {
  5321. ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
  5322. return 0;
  5323. }
  5324. }
  5325. return ionClient;
  5326. }
  5327. int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
  5328. {
  5329. if (ionClient != 0) {
  5330. if (ionClient > 0) {
  5331. ion_client_destroy(ionClient);
  5332. }
  5333. ionClient = 0;
  5334. }
  5335. return ionClient;
  5336. }
  5337. int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
  5338. {
  5339. return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
  5340. }
  5341. int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
  5342. {
  5343. int ret = 0;
  5344. int i = 0;
  5345. int flag = 0;
  5346. if (ionClient == 0) {
  5347. ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
  5348. return -1;
  5349. }
  5350. for (i = 0 ; i < iMemoryNum ; i++) {
  5351. if (buf->size.extS[i] == 0) {
  5352. break;
  5353. }
  5354. if (1 << i & cacheFlag)
  5355. flag = ION_FLAG_CACHED;
  5356. else
  5357. flag = 0;
  5358. buf->fd.extFd[i] = ion_alloc(ionClient, \
  5359. buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK, flag);
  5360. if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
  5361. ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
  5362. buf->fd.extFd[i] = -1;
  5363. freeCameraMemory(buf, iMemoryNum);
  5364. return -1;
  5365. }
  5366. buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
  5367. buf->size.extS[i], 0);
  5368. if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
  5369. ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
  5370. buf->virt.extP[i] = (char *)MAP_FAILED;
  5371. freeCameraMemory(buf, iMemoryNum);
  5372. return -1;
  5373. }
  5374. ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
  5375. }
  5376. return ret;
  5377. }
  5378. void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
  5379. {
  5380. int i = 0 ;
  5381. int ret = 0;
  5382. for (i=0;i<iMemoryNum;i++) {
  5383. if (buf->fd.extFd[i] != -1) {
  5384. if (buf->virt.extP[i] != (char *)MAP_FAILED) {
  5385. ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
  5386. if (ret < 0)
  5387. ALOGE("ERR(%s)", __FUNCTION__);
  5388. }
  5389. ion_free(buf->fd.extFd[i]);
  5390. ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
  5391. }
  5392. buf->fd.extFd[i] = -1;
  5393. buf->virt.extP[i] = (char *)MAP_FAILED;
  5394. buf->size.extS[i] = 0;
  5395. }
  5396. }
  5397. void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
  5398. {
  5399. int i =0 ;
  5400. for (i=0;i<iMemoryNum;i++) {
  5401. buf->virt.extP[i] = (char *)MAP_FAILED;
  5402. buf->fd.extFd[i] = -1;
  5403. buf->size.extS[i] = 0;
  5404. }
  5405. }
  5406. static camera2_device_t *g_cam2_device = NULL;
  5407. static bool g_camera_vaild = false;
  5408. static Mutex g_camera_mutex;
  5409. ExynosCamera2 * g_camera2[2] = { NULL, NULL };
  5410. static int HAL2_camera_device_close(struct hw_device_t* device)
  5411. {
  5412. Mutex::Autolock lock(g_camera_mutex);
  5413. ALOGD("(%s): ENTER", __FUNCTION__);
  5414. if (device) {
  5415. camera2_device_t *cam_device = (camera2_device_t *)device;
  5416. ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
  5417. ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
  5418. delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
  5419. free(cam_device);
  5420. g_camera_vaild = false;
  5421. g_cam2_device = NULL;
  5422. }
  5423. ALOGD("(%s): EXIT", __FUNCTION__);
  5424. return 0;
  5425. }
  5426. static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
  5427. {
  5428. return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
  5429. }
  5430. static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
  5431. const camera2_request_queue_src_ops_t *request_src_ops)
  5432. {
  5433. ALOGV("DEBUG(%s):", __FUNCTION__);
  5434. return obj(dev)->setRequestQueueSrcOps(request_src_ops);
  5435. }
  5436. static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
  5437. {
  5438. ALOGV("DEBUG(%s):", __FUNCTION__);
  5439. return obj(dev)->notifyRequestQueueNotEmpty();
  5440. }
  5441. static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
  5442. const camera2_frame_queue_dst_ops_t *frame_dst_ops)
  5443. {
  5444. ALOGV("DEBUG(%s):", __FUNCTION__);
  5445. return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
  5446. }
  5447. static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
  5448. {
  5449. ALOGV("DEBUG(%s):", __FUNCTION__);
  5450. return obj(dev)->getInProgressCount();
  5451. }
  5452. static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
  5453. {
  5454. ALOGV("DEBUG(%s):", __FUNCTION__);
  5455. return obj(dev)->flushCapturesInProgress();
  5456. }
  5457. static int HAL2_device_construct_default_request(const struct camera2_device *dev,
  5458. int request_template, camera_metadata_t **request)
  5459. {
  5460. ALOGV("DEBUG(%s):", __FUNCTION__);
  5461. return obj(dev)->constructDefaultRequest(request_template, request);
  5462. }
  5463. static int HAL2_device_allocate_stream(
  5464. const struct camera2_device *dev,
  5465. // inputs
  5466. uint32_t width,
  5467. uint32_t height,
  5468. int format,
  5469. const camera2_stream_ops_t *stream_ops,
  5470. // outputs
  5471. uint32_t *stream_id,
  5472. uint32_t *format_actual,
  5473. uint32_t *usage,
  5474. uint32_t *max_buffers)
  5475. {
  5476. ALOGV("(%s): ", __FUNCTION__);
  5477. return obj(dev)->allocateStream(width, height, format, stream_ops,
  5478. stream_id, format_actual, usage, max_buffers);
  5479. }
  5480. static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
  5481. uint32_t stream_id,
  5482. int num_buffers,
  5483. buffer_handle_t *buffers)
  5484. {
  5485. ALOGV("DEBUG(%s):", __FUNCTION__);
  5486. return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
  5487. }
  5488. static int HAL2_device_release_stream(
  5489. const struct camera2_device *dev,
  5490. uint32_t stream_id)
  5491. {
  5492. ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
  5493. if (!g_camera_vaild)
  5494. return 0;
  5495. return obj(dev)->releaseStream(stream_id);
  5496. }
  5497. static int HAL2_device_allocate_reprocess_stream(
  5498. const struct camera2_device *dev,
  5499. uint32_t width,
  5500. uint32_t height,
  5501. uint32_t format,
  5502. const camera2_stream_in_ops_t *reprocess_stream_ops,
  5503. // outputs
  5504. uint32_t *stream_id,
  5505. uint32_t *consumer_usage,
  5506. uint32_t *max_buffers)
  5507. {
  5508. ALOGV("DEBUG(%s):", __FUNCTION__);
  5509. return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
  5510. stream_id, consumer_usage, max_buffers);
  5511. }
  5512. static int HAL2_device_allocate_reprocess_stream_from_stream(
  5513. const struct camera2_device *dev,
  5514. uint32_t output_stream_id,
  5515. const camera2_stream_in_ops_t *reprocess_stream_ops,
  5516. // outputs
  5517. uint32_t *stream_id)
  5518. {
  5519. ALOGV("DEBUG(%s):", __FUNCTION__);
  5520. return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
  5521. reprocess_stream_ops, stream_id);
  5522. }
  5523. static int HAL2_device_release_reprocess_stream(
  5524. const struct camera2_device *dev,
  5525. uint32_t stream_id)
  5526. {
  5527. ALOGV("DEBUG(%s):", __FUNCTION__);
  5528. return obj(dev)->releaseReprocessStream(stream_id);
  5529. }
  5530. static int HAL2_device_trigger_action(const struct camera2_device *dev,
  5531. uint32_t trigger_id,
  5532. int ext1,
  5533. int ext2)
  5534. {
  5535. ALOGV("DEBUG(%s):", __FUNCTION__);
  5536. if (!g_camera_vaild)
  5537. return 0;
  5538. return obj(dev)->triggerAction(trigger_id, ext1, ext2);
  5539. }
  5540. static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
  5541. camera2_notify_callback notify_cb,
  5542. void *user)
  5543. {
  5544. ALOGV("DEBUG(%s):", __FUNCTION__);
  5545. return obj(dev)->setNotifyCallback(notify_cb, user);
  5546. }
  5547. static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
  5548. vendor_tag_query_ops_t **ops)
  5549. {
  5550. ALOGV("DEBUG(%s):", __FUNCTION__);
  5551. return obj(dev)->getMetadataVendorTagOps(ops);
  5552. }
  5553. static int HAL2_device_dump(const struct camera2_device *dev, int fd)
  5554. {
  5555. ALOGV("DEBUG(%s):", __FUNCTION__);
  5556. return obj(dev)->dump(fd);
  5557. }
  5558. static int HAL2_getNumberOfCameras()
  5559. {
  5560. ALOGV("(%s): returning 2", __FUNCTION__);
  5561. return 2;
  5562. }
  5563. static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
  5564. {
  5565. ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
  5566. static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
  5567. status_t res;
  5568. if (cameraId == 0) {
  5569. info->facing = CAMERA_FACING_BACK;
  5570. if (!g_camera2[0])
  5571. g_camera2[0] = new ExynosCamera2(0);
  5572. }
  5573. else if (cameraId == 1) {
  5574. info->facing = CAMERA_FACING_FRONT;
  5575. if (!g_camera2[1])
  5576. g_camera2[1] = new ExynosCamera2(1);
  5577. }
  5578. else
  5579. return BAD_VALUE;
  5580. info->orientation = 0;
  5581. info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
  5582. if (mCameraInfo[cameraId] == NULL) {
  5583. res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
  5584. if (res != OK) {
  5585. ALOGE("%s: Unable to allocate static info: %s (%d)",
  5586. __FUNCTION__, strerror(-res), res);
  5587. return res;
  5588. }
  5589. res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
  5590. if (res != OK) {
  5591. ALOGE("%s: Unable to fill in static info: %s (%d)",
  5592. __FUNCTION__, strerror(-res), res);
  5593. return res;
  5594. }
  5595. }
  5596. info->static_camera_characteristics = mCameraInfo[cameraId];
  5597. return NO_ERROR;
  5598. }
  5599. #define SET_METHOD(m) m : HAL2_device_##m
  5600. static camera2_device_ops_t camera2_device_ops = {
  5601. SET_METHOD(set_request_queue_src_ops),
  5602. SET_METHOD(notify_request_queue_not_empty),
  5603. SET_METHOD(set_frame_queue_dst_ops),
  5604. SET_METHOD(get_in_progress_count),
  5605. SET_METHOD(flush_captures_in_progress),
  5606. SET_METHOD(construct_default_request),
  5607. SET_METHOD(allocate_stream),
  5608. SET_METHOD(register_stream_buffers),
  5609. SET_METHOD(release_stream),
  5610. SET_METHOD(allocate_reprocess_stream),
  5611. SET_METHOD(allocate_reprocess_stream_from_stream),
  5612. SET_METHOD(release_reprocess_stream),
  5613. SET_METHOD(trigger_action),
  5614. SET_METHOD(set_notify_callback),
  5615. SET_METHOD(get_metadata_vendor_tag_ops),
  5616. SET_METHOD(dump),
  5617. };
  5618. #undef SET_METHOD
  5619. static int HAL2_camera_device_open(const struct hw_module_t* module,
  5620. const char *id,
  5621. struct hw_device_t** device)
  5622. {
  5623. int cameraId = atoi(id);
  5624. int openInvalid = 0;
  5625. Mutex::Autolock lock(g_camera_mutex);
  5626. if (g_camera_vaild) {
  5627. ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
  5628. return -EBUSY;
  5629. }
  5630. g_camera_vaild = false;
  5631. ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
  5632. if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
  5633. ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
  5634. return -EINVAL;
  5635. }
  5636. ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
  5637. if (g_cam2_device) {
  5638. if (obj(g_cam2_device)->getCameraId() == cameraId) {
  5639. ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
  5640. goto done;
  5641. } else {
  5642. ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
  5643. while (g_cam2_device)
  5644. usleep(SIG_WAITING_TICK);
  5645. ALOGD("(%s): END waiting for cam device free", __FUNCTION__);
  5646. }
  5647. }
  5648. g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
  5649. ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
  5650. if (!g_cam2_device)
  5651. return -ENOMEM;
  5652. g_cam2_device->common.tag = HARDWARE_DEVICE_TAG;
  5653. g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
  5654. g_cam2_device->common.module = const_cast<hw_module_t *>(module);
  5655. g_cam2_device->common.close = HAL2_camera_device_close;
  5656. g_cam2_device->ops = &camera2_device_ops;
  5657. ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
  5658. g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
  5659. if (!openInvalid) {
  5660. ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
  5661. return -ENODEV;
  5662. }
  5663. done:
  5664. *device = (hw_device_t *)g_cam2_device;
  5665. ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
  5666. g_camera_vaild = true;
  5667. return 0;
  5668. }
  5669. static hw_module_methods_t camera_module_methods = {
  5670. open : HAL2_camera_device_open
  5671. };
  5672. extern "C" {
  5673. struct camera_module HAL_MODULE_INFO_SYM = {
  5674. common : {
  5675. tag : HARDWARE_MODULE_TAG,
  5676. module_api_version : CAMERA_MODULE_API_VERSION_2_0,
  5677. hal_api_version : HARDWARE_HAL_API_VERSION,
  5678. id : CAMERA_HARDWARE_MODULE_ID,
  5679. name : "Exynos Camera HAL2",
  5680. author : "Samsung Corporation",
  5681. methods : &camera_module_methods,
  5682. dso: NULL,
  5683. reserved: {0},
  5684. },
  5685. get_number_of_cameras : HAL2_getNumberOfCameras,
  5686. get_camera_info : HAL2_getCameraInfo
  5687. };
  5688. }
  5689. }; // namespace android