PageRenderTime 254ms CodeModel.GetById 33ms RepoModel.GetById 6ms app.codeStats 2ms

/libcamera2/ExynosCameraHWInterface2.cpp

https://bitbucket.org/sola/jcrom_hardware_samsung_slsi_exynos5
C++ | 6441 lines | 5504 code | 752 blank | 185 comment | 1257 complexity | 48cd632493d56125ab729d77884b90bb MD5 | raw file

Large files files are truncated, but you can click here to view the full file

  1. /*
  2. **
  3. ** Copyright 2008, The Android Open Source Project
  4. ** Copyright 2012, Samsung Electronics Co. LTD
  5. **
  6. ** Licensed under the Apache License, Version 2.0 (the "License");
  7. ** you may not use this file except in compliance with the License.
  8. ** You may obtain a copy of the License at
  9. **
  10. ** http://www.apache.org/licenses/LICENSE-2.0
  11. **
  12. ** Unless required by applicable law or agreed to in writing, software
  13. ** distributed under the License is distributed on an "AS IS" BASIS,
  14. ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. ** See the License for the specific language governing permissions and
  16. ** limitations under the License.
  17. */
  18. /*!
  19. * \file ExynosCameraHWInterface2.cpp
  20. * \brief source file for Android Camera API 2.0 HAL
  21. * \author Sungjoong Kang(sj3.kang@samsung.com)
  22. * \date 2012/07/10
  23. *
  24. * <b>Revision History: </b>
  25. * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
  26. * Initial Release
  27. *
  28. * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
  29. * 2nd Release
  30. *
  31. */
  32. //#define LOG_NDEBUG 0
  33. #define LOG_TAG "ExynosCameraHAL2"
  34. #include <utils/Log.h>
  35. #include <math.h>
  36. #include "ExynosCameraHWInterface2.h"
  37. #include "exynos_format.h"
  38. namespace android {
  39. void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
  40. {
  41. int nw;
  42. int cnt = 0;
  43. uint32_t written = 0;
  44. ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
  45. int fd = open(fname, O_RDWR | O_CREAT, 0644);
  46. if (fd < 0) {
  47. ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
  48. return;
  49. }
  50. ALOGV("writing %d bytes to file [%s]", size, fname);
  51. while (written < size) {
  52. nw = ::write(fd, buf + written, size - written);
  53. if (nw < 0) {
  54. ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
  55. break;
  56. }
  57. written += nw;
  58. cnt++;
  59. }
  60. ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
  61. ::close(fd);
  62. }
  63. int get_pixel_depth(uint32_t fmt)
  64. {
  65. int depth = 0;
  66. switch (fmt) {
  67. case V4L2_PIX_FMT_JPEG:
  68. depth = 8;
  69. break;
  70. case V4L2_PIX_FMT_NV12:
  71. case V4L2_PIX_FMT_NV21:
  72. case V4L2_PIX_FMT_YUV420:
  73. case V4L2_PIX_FMT_YVU420M:
  74. case V4L2_PIX_FMT_NV12M:
  75. case V4L2_PIX_FMT_NV12MT:
  76. depth = 12;
  77. break;
  78. case V4L2_PIX_FMT_RGB565:
  79. case V4L2_PIX_FMT_YUYV:
  80. case V4L2_PIX_FMT_YVYU:
  81. case V4L2_PIX_FMT_UYVY:
  82. case V4L2_PIX_FMT_VYUY:
  83. case V4L2_PIX_FMT_NV16:
  84. case V4L2_PIX_FMT_NV61:
  85. case V4L2_PIX_FMT_YUV422P:
  86. case V4L2_PIX_FMT_SBGGR10:
  87. case V4L2_PIX_FMT_SBGGR12:
  88. case V4L2_PIX_FMT_SBGGR16:
  89. depth = 16;
  90. break;
  91. case V4L2_PIX_FMT_RGB32:
  92. depth = 32;
  93. break;
  94. default:
  95. ALOGE("Get depth failed(format : %d)", fmt);
  96. break;
  97. }
  98. return depth;
  99. }
  100. int cam_int_s_fmt(node_info_t *node)
  101. {
  102. struct v4l2_format v4l2_fmt;
  103. unsigned int framesize;
  104. int ret;
  105. memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
  106. v4l2_fmt.type = node->type;
  107. framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
  108. if (node->planes >= 1) {
  109. v4l2_fmt.fmt.pix_mp.width = node->width;
  110. v4l2_fmt.fmt.pix_mp.height = node->height;
  111. v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
  112. v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
  113. } else {
  114. ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
  115. }
  116. /* Set up for capture */
  117. ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
  118. if (ret < 0)
  119. ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
  120. return ret;
  121. }
  122. int cam_int_reqbufs(node_info_t *node)
  123. {
  124. struct v4l2_requestbuffers req;
  125. int ret;
  126. req.count = node->buffers;
  127. req.type = node->type;
  128. req.memory = node->memory;
  129. ret = exynos_v4l2_reqbufs(node->fd, &req);
  130. if (ret < 0)
  131. ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
  132. return req.count;
  133. }
  134. int cam_int_qbuf(node_info_t *node, int index)
  135. {
  136. struct v4l2_buffer v4l2_buf;
  137. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  138. int i;
  139. int ret = 0;
  140. v4l2_buf.m.planes = planes;
  141. v4l2_buf.type = node->type;
  142. v4l2_buf.memory = node->memory;
  143. v4l2_buf.index = index;
  144. v4l2_buf.length = node->planes;
  145. for(i = 0; i < node->planes; i++){
  146. v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
  147. v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]);
  148. }
  149. ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
  150. if (ret < 0)
  151. ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
  152. return ret;
  153. }
  154. int cam_int_streamon(node_info_t *node)
  155. {
  156. enum v4l2_buf_type type = node->type;
  157. int ret;
  158. ret = exynos_v4l2_streamon(node->fd, type);
  159. if (ret < 0)
  160. ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
  161. ALOGV("On streaming I/O... ... fd(%d)", node->fd);
  162. return ret;
  163. }
  164. int cam_int_streamoff(node_info_t *node)
  165. {
  166. enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  167. int ret;
  168. ALOGV("Off streaming I/O... fd(%d)", node->fd);
  169. ret = exynos_v4l2_streamoff(node->fd, type);
  170. if (ret < 0)
  171. ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
  172. return ret;
  173. }
  174. int isp_int_streamoff(node_info_t *node)
  175. {
  176. enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  177. int ret;
  178. ALOGV("Off streaming I/O... fd(%d)", node->fd);
  179. ret = exynos_v4l2_streamoff(node->fd, type);
  180. if (ret < 0)
  181. ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
  182. return ret;
  183. }
  184. int cam_int_dqbuf(node_info_t *node)
  185. {
  186. struct v4l2_buffer v4l2_buf;
  187. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  188. int ret;
  189. v4l2_buf.type = node->type;
  190. v4l2_buf.memory = node->memory;
  191. v4l2_buf.m.planes = planes;
  192. v4l2_buf.length = node->planes;
  193. ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
  194. if (ret < 0)
  195. ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
  196. return v4l2_buf.index;
  197. }
  198. int cam_int_dqbuf(node_info_t *node, int num_plane)
  199. {
  200. struct v4l2_buffer v4l2_buf;
  201. struct v4l2_plane planes[VIDEO_MAX_PLANES];
  202. int ret;
  203. v4l2_buf.type = node->type;
  204. v4l2_buf.memory = node->memory;
  205. v4l2_buf.m.planes = planes;
  206. v4l2_buf.length = num_plane;
  207. ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
  208. if (ret < 0)
  209. ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
  210. return v4l2_buf.index;
  211. }
  212. int cam_int_s_input(node_info_t *node, int index)
  213. {
  214. int ret;
  215. ret = exynos_v4l2_s_input(node->fd, index);
  216. if (ret < 0)
  217. ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
  218. return ret;
  219. }
  220. gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
  221. RequestManager::RequestManager(SignalDrivenThread* main_thread):
  222. m_lastAeMode(0),
  223. m_lastAaMode(0),
  224. m_lastAwbMode(0),
  225. m_vdisBubbleEn(false),
  226. m_lastAeComp(0),
  227. m_lastCompletedFrameCnt(-1)
  228. {
  229. m_metadataConverter = new MetadataConverter;
  230. m_mainThread = main_thread;
  231. ResetEntry();
  232. m_sensorPipelineSkipCnt = 0;
  233. return;
  234. }
  235. RequestManager::~RequestManager()
  236. {
  237. ALOGV("%s", __FUNCTION__);
  238. if (m_metadataConverter != NULL) {
  239. delete m_metadataConverter;
  240. m_metadataConverter = NULL;
  241. }
  242. releaseSensorQ();
  243. return;
  244. }
  245. void RequestManager::ResetEntry()
  246. {
  247. Mutex::Autolock lock(m_requestMutex);
  248. for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
  249. memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
  250. entries[i].internal_shot.shot.ctl.request.frameCount = -1;
  251. }
  252. m_numOfEntries = 0;
  253. m_entryInsertionIndex = -1;
  254. m_entryProcessingIndex = -1;
  255. m_entryFrameOutputIndex = -1;
  256. }
  257. int RequestManager::GetNumEntries()
  258. {
  259. return m_numOfEntries;
  260. }
  261. void RequestManager::SetDefaultParameters(int cropX)
  262. {
  263. m_cropX = cropX;
  264. }
  265. bool RequestManager::IsRequestQueueFull()
  266. {
  267. Mutex::Autolock lock(m_requestMutex);
  268. if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
  269. return true;
  270. else
  271. return false;
  272. }
  273. void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
  274. {
  275. ALOGV("DEBUG(%s):", __FUNCTION__);
  276. Mutex::Autolock lock(m_requestMutex);
  277. request_manager_entry * newEntry = NULL;
  278. int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
  279. ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
  280. newEntry = &(entries[newInsertionIndex]);
  281. if (newEntry->status!=EMPTY) {
  282. ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
  283. return;
  284. }
  285. newEntry->status = REGISTERED;
  286. newEntry->original_request = new_request;
  287. memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
  288. m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
  289. newEntry->output_stream_count = 0;
  290. if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
  291. newEntry->output_stream_count++;
  292. if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
  293. newEntry->output_stream_count++;
  294. m_numOfEntries++;
  295. m_entryInsertionIndex = newInsertionIndex;
  296. *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
  297. afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
  298. afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
  299. afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
  300. afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
  301. ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
  302. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
  303. }
  304. void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
  305. {
  306. ALOGV("DEBUG(%s):", __FUNCTION__);
  307. int frame_index;
  308. request_manager_entry * currentEntry;
  309. Mutex::Autolock lock(m_requestMutex);
  310. frame_index = GetCompletedIndex();
  311. currentEntry = &(entries[frame_index]);
  312. if (currentEntry->status != COMPLETED) {
  313. CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
  314. m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
  315. return;
  316. }
  317. if (deregistered_request) *deregistered_request = currentEntry->original_request;
  318. m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
  319. currentEntry->status = EMPTY;
  320. currentEntry->original_request = NULL;
  321. memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
  322. currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
  323. currentEntry->output_stream_count = 0;
  324. m_numOfEntries--;
  325. ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
  326. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
  327. CheckCompleted(GetNextIndex(frame_index));
  328. return;
  329. }
  330. bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
  331. camera_metadata_t ** prepared_frame, int afState)
  332. {
  333. ALOGV("DEBUG(%s):", __FUNCTION__);
  334. Mutex::Autolock lock(m_requestMutex);
  335. status_t res = NO_ERROR;
  336. int tempFrameOutputIndex = GetCompletedIndex();
  337. request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]);
  338. ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
  339. m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
  340. if (currentEntry->status != COMPLETED) {
  341. ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
  342. return false;
  343. }
  344. m_entryFrameOutputIndex = tempFrameOutputIndex;
  345. m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
  346. add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
  347. res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
  348. m_tempFrameMetadata);
  349. if (res!=NO_ERROR) {
  350. ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
  351. return false;
  352. }
  353. *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
  354. *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
  355. *prepared_frame = m_tempFrameMetadata;
  356. ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
  357. currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
  358. // Dump();
  359. return true;
  360. }
  361. int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
  362. {
  363. struct camera2_shot_ext * shot_ext;
  364. struct camera2_shot_ext * request_shot;
  365. int targetStreamIndex = 0;
  366. request_manager_entry * newEntry = NULL;
  367. static int count = 0;
  368. Mutex::Autolock lock(m_requestMutex);
  369. if (m_numOfEntries == 0) {
  370. CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
  371. return -1;
  372. }
  373. if ((m_entryProcessingIndex == m_entryInsertionIndex)
  374. && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
  375. ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
  376. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
  377. return -1;
  378. }
  379. int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
  380. ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
  381. newEntry = &(entries[newProcessingIndex]);
  382. request_shot = &(newEntry->internal_shot);
  383. if (newEntry->status != REGISTERED) {
  384. CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
  385. for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
  386. CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
  387. }
  388. return -1;
  389. }
  390. newEntry->status = REQUESTED;
  391. shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
  392. memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
  393. shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
  394. shot_ext->request_sensor = 1;
  395. shot_ext->dis_bypass = 1;
  396. shot_ext->dnr_bypass = 1;
  397. shot_ext->fd_bypass = 1;
  398. shot_ext->setfile = 0;
  399. targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
  400. shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
  401. if (targetStreamIndex & MASK_OUTPUT_SCP)
  402. shot_ext->request_scp = 1;
  403. if (targetStreamIndex & MASK_OUTPUT_SCC)
  404. shot_ext->request_scc = 1;
  405. if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
  406. shot_ext->fd_bypass = 0;
  407. if (count == 0){
  408. shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
  409. } else
  410. shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
  411. count++;
  412. shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
  413. shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
  414. shot_ext->shot.magicNumber = 0x23456789;
  415. shot_ext->shot.ctl.sensor.exposureTime = 0;
  416. shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
  417. shot_ext->shot.ctl.sensor.sensitivity = 0;
  418. shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
  419. shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
  420. shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
  421. m_entryProcessingIndex = newProcessingIndex;
  422. return newProcessingIndex;
  423. }
  424. void RequestManager::NotifyStreamOutput(int frameCnt)
  425. {
  426. int index;
  427. Mutex::Autolock lock(m_requestMutex);
  428. ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
  429. index = FindEntryIndexByFrameCnt(frameCnt);
  430. if (index == -1) {
  431. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  432. return;
  433. }
  434. ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count);
  435. entries[index].output_stream_count--; //TODO : match stream id also
  436. CheckCompleted(index);
  437. }
  438. void RequestManager::CheckCompleted(int index)
  439. {
  440. if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
  441. && (entries[index].output_stream_count <= 0)){
  442. ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
  443. index, entries[index].internal_shot.shot.ctl.request.frameCount );
  444. entries[index].status = COMPLETED;
  445. if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount)
  446. m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
  447. }
  448. }
  449. int RequestManager::GetCompletedIndex()
  450. {
  451. return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
  452. }
  453. void RequestManager::pushSensorQ(int index)
  454. {
  455. Mutex::Autolock lock(m_requestMutex);
  456. m_sensorQ.push_back(index);
  457. }
  458. int RequestManager::popSensorQ()
  459. {
  460. List<int>::iterator sensor_token;
  461. int index;
  462. Mutex::Autolock lock(m_requestMutex);
  463. if(m_sensorQ.size() == 0)
  464. return -1;
  465. sensor_token = m_sensorQ.begin()++;
  466. index = *sensor_token;
  467. m_sensorQ.erase(sensor_token);
  468. return (index);
  469. }
  470. void RequestManager::releaseSensorQ()
  471. {
  472. List<int>::iterator r;
  473. Mutex::Autolock lock(m_requestMutex);
  474. ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
  475. while(m_sensorQ.size() > 0){
  476. r = m_sensorQ.begin()++;
  477. m_sensorQ.erase(r);
  478. }
  479. return;
  480. }
  481. void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
  482. {
  483. int index;
  484. struct camera2_shot_ext * request_shot;
  485. nsecs_t timeStamp;
  486. int i;
  487. Mutex::Autolock lock(m_requestMutex);
  488. ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  489. for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  490. if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
  491. && (entries[i].status == CAPTURED)){
  492. entries[i].status = METADONE;
  493. break;
  494. }
  495. }
  496. if (i == NUM_MAX_REQUEST_MGR_ENTRY){
  497. ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  498. return;
  499. }
  500. request_manager_entry * newEntry = &(entries[i]);
  501. request_shot = &(newEntry->internal_shot);
  502. timeStamp = request_shot->shot.dm.sensor.timeStamp;
  503. memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
  504. request_shot->shot.dm.sensor.timeStamp = timeStamp;
  505. m_lastTimeStamp = timeStamp;
  506. CheckCompleted(i);
  507. }
  508. void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
  509. {
  510. int index, targetStreamIndex;
  511. struct camera2_shot_ext * request_shot;
  512. ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
  513. if (frameCnt < 0)
  514. return;
  515. index = FindEntryIndexByFrameCnt(frameCnt);
  516. if (index == -1) {
  517. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  518. return;
  519. }
  520. request_manager_entry * newEntry = &(entries[index]);
  521. request_shot = &(newEntry->internal_shot);
  522. memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
  523. shot_ext->shot.ctl.request.frameCount = frameCnt;
  524. shot_ext->request_sensor = 1;
  525. shot_ext->dis_bypass = 1;
  526. shot_ext->dnr_bypass = 1;
  527. shot_ext->fd_bypass = 1;
  528. shot_ext->drc_bypass = 1;
  529. shot_ext->setfile = 0;
  530. shot_ext->request_scc = 0;
  531. shot_ext->request_scp = 0;
  532. shot_ext->isReprocessing = request_shot->isReprocessing;
  533. shot_ext->reprocessInput = request_shot->reprocessInput;
  534. shot_ext->shot.ctl.request.outputStreams[0] = 0;
  535. shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
  536. shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
  537. shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
  538. shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
  539. // mapping flash UI mode from aeMode
  540. if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
  541. if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
  542. ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
  543. else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
  544. ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
  545. request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
  546. }
  547. // Apply ae/awb lock or unlock
  548. if (request_shot->ae_lock == AEMODE_LOCK_ON)
  549. request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
  550. if (request_shot->awb_lock == AWBMODE_LOCK_ON)
  551. request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
  552. if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
  553. shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
  554. }
  555. else {
  556. shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
  557. m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
  558. }
  559. if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
  560. shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
  561. }
  562. else {
  563. shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
  564. m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
  565. }
  566. if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
  567. shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
  568. }
  569. else {
  570. shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
  571. m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
  572. }
  573. if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
  574. shot_ext->shot.ctl.aa.aeExpCompensation = 0;
  575. }
  576. else {
  577. shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
  578. m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
  579. }
  580. if (request_shot->shot.ctl.aa.videoStabilizationMode) {
  581. m_vdisBubbleEn = true;
  582. shot_ext->dis_bypass = 0;
  583. shot_ext->dnr_bypass = 0;
  584. } else {
  585. m_vdisBubbleEn = false;
  586. shot_ext->dis_bypass = 1;
  587. shot_ext->dnr_bypass = 1;
  588. }
  589. shot_ext->shot.ctl.aa.afTrigger = 0;
  590. targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
  591. shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
  592. if (targetStreamIndex & MASK_OUTPUT_SCP)
  593. shot_ext->request_scp = 1;
  594. if (targetStreamIndex & MASK_OUTPUT_SCC)
  595. shot_ext->request_scc = 1;
  596. if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
  597. shot_ext->fd_bypass = 0;
  598. shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
  599. shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
  600. ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
  601. (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
  602. (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
  603. (int)(shot_ext->shot.ctl.aa.afMode));
  604. }
  605. bool RequestManager::IsVdisEnable(void)
  606. {
  607. return m_vdisBubbleEn;
  608. }
  609. int RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
  610. {
  611. for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  612. if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
  613. return i;
  614. }
  615. return -1;
  616. }
  617. void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
  618. {
  619. int index = FindEntryIndexByFrameCnt(frameCnt);
  620. if (index == -1) {
  621. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  622. return;
  623. }
  624. request_manager_entry * currentEntry = &(entries[index]);
  625. if (currentEntry->internal_shot.isReprocessing == 1) {
  626. ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
  627. index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
  628. } else {
  629. currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
  630. ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
  631. index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
  632. }
  633. }
  634. nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt)
  635. {
  636. int index = FindEntryIndexByFrameCnt(frameCnt);
  637. if (index == -1) {
  638. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
  639. return m_lastTimeStamp;
  640. }
  641. else
  642. return GetTimestamp(index);
  643. }
  644. nsecs_t RequestManager::GetTimestamp(int index)
  645. {
  646. Mutex::Autolock lock(m_requestMutex);
  647. if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
  648. ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
  649. return 0;
  650. }
  651. request_manager_entry * currentEntry = &(entries[index]);
  652. nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
  653. if (frameTime == 0) {
  654. ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__);
  655. frameTime = m_lastTimeStamp;
  656. }
  657. ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
  658. return frameTime;
  659. }
  660. uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
  661. {
  662. int index = FindEntryIndexByFrameCnt(frameCnt);
  663. if (index == -1) {
  664. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  665. return 0;
  666. }
  667. else
  668. return GetOutputStream(index);
  669. }
  670. uint8_t RequestManager::GetOutputStream(int index)
  671. {
  672. Mutex::Autolock lock(m_requestMutex);
  673. if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
  674. ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
  675. return 0;
  676. }
  677. request_manager_entry * currentEntry = &(entries[index]);
  678. return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
  679. }
  680. camera2_shot_ext * RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
  681. {
  682. int index = FindEntryIndexByFrameCnt(frameCnt);
  683. if (index == -1) {
  684. ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
  685. return 0;
  686. }
  687. else
  688. return GetInternalShotExt(index);
  689. }
  690. camera2_shot_ext * RequestManager::GetInternalShotExt(int index)
  691. {
  692. Mutex::Autolock lock(m_requestMutex);
  693. if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
  694. ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
  695. return 0;
  696. }
  697. request_manager_entry * currentEntry = &(entries[index]);
  698. return &currentEntry->internal_shot;
  699. }
  700. int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
  701. {
  702. Mutex::Autolock lock(m_requestMutex);
  703. int i;
  704. if (m_numOfEntries == 0) {
  705. CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
  706. return -1;
  707. }
  708. for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  709. if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
  710. continue;
  711. if (entries[i].status == REQUESTED) {
  712. entries[i].status = CAPTURED;
  713. return entries[i].internal_shot.shot.ctl.request.frameCount;
  714. }
  715. CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
  716. }
  717. CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
  718. return -1;
  719. }
  720. void RequestManager::SetInitialSkip(int count)
  721. {
  722. ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
  723. if (count > m_sensorPipelineSkipCnt)
  724. m_sensorPipelineSkipCnt = count;
  725. }
  726. int RequestManager::GetSkipCnt()
  727. {
  728. ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
  729. if (m_sensorPipelineSkipCnt == 0)
  730. return m_sensorPipelineSkipCnt;
  731. else
  732. return --m_sensorPipelineSkipCnt;
  733. }
  734. void RequestManager::Dump(void)
  735. {
  736. int i = 0;
  737. request_manager_entry * currentEntry;
  738. ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
  739. m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
  740. for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
  741. currentEntry = &(entries[i]);
  742. ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
  743. currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
  744. currentEntry->output_stream_count,
  745. currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
  746. }
  747. }
  748. int RequestManager::GetNextIndex(int index)
  749. {
  750. index++;
  751. if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
  752. index = 0;
  753. return index;
  754. }
  755. int RequestManager::GetPrevIndex(int index)
  756. {
  757. index--;
  758. if (index < 0)
  759. index = NUM_MAX_REQUEST_MGR_ENTRY-1;
  760. return index;
  761. }
  762. ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
  763. m_requestQueueOps(NULL),
  764. m_frameQueueOps(NULL),
  765. m_callbackCookie(NULL),
  766. m_numOfRemainingReqInSvc(0),
  767. m_isRequestQueuePending(false),
  768. m_isRequestQueueNull(true),
  769. m_isIspStarted(false),
  770. m_ionCameraClient(0),
  771. m_zoomRatio(1),
  772. m_scp_closing(false),
  773. m_scp_closed(false),
  774. m_afState(HAL_AFSTATE_INACTIVE),
  775. m_afMode(NO_CHANGE),
  776. m_afMode2(NO_CHANGE),
  777. m_vdisBubbleCnt(0),
  778. m_vdisDupFrame(0),
  779. m_IsAfModeUpdateRequired(false),
  780. m_IsAfTriggerRequired(false),
  781. m_IsAfLockRequired(false),
  782. m_sccLocalBufferValid(false),
  783. m_wideAspect(false),
  784. m_scpOutputSignalCnt(0),
  785. m_scpOutputImageCnt(0),
  786. m_afTriggerId(0),
  787. m_afPendingTriggerId(0),
  788. m_afModeWaitingCnt(0),
  789. m_jpegEncodingCount(0),
  790. m_scpForceSuspended(false),
  791. m_halDevice(dev),
  792. m_nightCaptureCnt(0),
  793. m_nightCaptureFrameCnt(0),
  794. m_lastSceneMode(0),
  795. m_cameraId(cameraId),
  796. m_thumbNailW(160),
  797. m_thumbNailH(120)
  798. {
  799. ALOGD("(%s): ENTER", __FUNCTION__);
  800. int ret = 0;
  801. int res = 0;
  802. m_exynosPictureCSC = NULL;
  803. m_exynosVideoCSC = NULL;
  804. if (!m_grallocHal) {
  805. ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
  806. if (ret)
  807. ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
  808. }
  809. m_camera2 = camera;
  810. m_ionCameraClient = createIonClient(m_ionCameraClient);
  811. if(m_ionCameraClient == 0)
  812. ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
  813. m_BayerManager = new BayerBufManager();
  814. m_mainThread = new MainThread(this);
  815. m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
  816. *openInvalid = InitializeISPChain();
  817. if (*openInvalid < 0) {
  818. ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
  819. // clean process
  820. // 1. close video nodes
  821. // SCP
  822. res = exynos_v4l2_close(m_camera_info.scp.fd);
  823. if (res != NO_ERROR ) {
  824. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  825. }
  826. // SCC
  827. res = exynos_v4l2_close(m_camera_info.capture.fd);
  828. if (res != NO_ERROR ) {
  829. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  830. }
  831. // Sensor
  832. res = exynos_v4l2_close(m_camera_info.sensor.fd);
  833. if (res != NO_ERROR ) {
  834. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  835. }
  836. // ISP
  837. res = exynos_v4l2_close(m_camera_info.isp.fd);
  838. if (res != NO_ERROR ) {
  839. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  840. }
  841. } else {
  842. m_sensorThread = new SensorThread(this);
  843. m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
  844. m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
  845. ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
  846. for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
  847. m_subStreams[i].type = SUBSTREAM_TYPE_NONE;
  848. CSC_METHOD cscMethod = CSC_METHOD_HW;
  849. m_exynosPictureCSC = csc_init(cscMethod);
  850. if (m_exynosPictureCSC == NULL)
  851. ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
  852. csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
  853. m_exynosVideoCSC = csc_init(cscMethod);
  854. if (m_exynosVideoCSC == NULL)
  855. ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
  856. csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
  857. m_setExifFixedAttribute();
  858. // contol information clear
  859. // flash
  860. m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
  861. m_ctlInfo.flash.m_afFlashDoneFlg= false;
  862. m_ctlInfo.flash.m_flashEnableFlg = false;
  863. m_ctlInfo.flash.m_flashFrameCount = 0;
  864. m_ctlInfo.flash.m_flashCnt = 0;
  865. m_ctlInfo.flash.m_flashTimeOut = 0;
  866. m_ctlInfo.flash.m_flashDecisionResult = false;
  867. m_ctlInfo.flash.m_flashTorchMode = false;
  868. m_ctlInfo.flash.m_precaptureState = 0;
  869. m_ctlInfo.flash.m_precaptureTriggerId = 0;
  870. // ae
  871. m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
  872. // af
  873. m_ctlInfo.af.m_afTriggerTimeOut = 0;
  874. // scene
  875. m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
  876. }
  877. ALOGD("(%s): EXIT", __FUNCTION__);
  878. }
  879. ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
  880. {
  881. ALOGD("(%s): ENTER", __FUNCTION__);
  882. this->release();
  883. ALOGD("(%s): EXIT", __FUNCTION__);
  884. }
  885. void ExynosCameraHWInterface2::release()
  886. {
  887. int i, res;
  888. ALOGD("(HAL2::release): ENTER");
  889. if (m_streamThreads[1] != NULL) {
  890. m_streamThreads[1]->release();
  891. m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
  892. }
  893. if (m_streamThreads[0] != NULL) {
  894. m_streamThreads[0]->release();
  895. m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
  896. }
  897. if (m_sensorThread != NULL) {
  898. m_sensorThread->release();
  899. }
  900. if (m_mainThread != NULL) {
  901. m_mainThread->release();
  902. }
  903. if (m_exynosPictureCSC)
  904. csc_deinit(m_exynosPictureCSC);
  905. m_exynosPictureCSC = NULL;
  906. if (m_exynosVideoCSC)
  907. csc_deinit(m_exynosVideoCSC);
  908. m_exynosVideoCSC = NULL;
  909. if (m_streamThreads[1] != NULL) {
  910. ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
  911. while (!m_streamThreads[1]->IsTerminated())
  912. usleep(SIG_WAITING_TICK);
  913. ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination");
  914. m_streamThreads[1] = NULL;
  915. }
  916. if (m_streamThreads[0] != NULL) {
  917. ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
  918. while (!m_streamThreads[0]->IsTerminated())
  919. usleep(SIG_WAITING_TICK);
  920. ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination");
  921. m_streamThreads[0] = NULL;
  922. }
  923. if (m_sensorThread != NULL) {
  924. ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
  925. while (!m_sensorThread->IsTerminated())
  926. usleep(SIG_WAITING_TICK);
  927. ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination");
  928. m_sensorThread = NULL;
  929. }
  930. if (m_mainThread != NULL) {
  931. ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
  932. while (!m_mainThread->IsTerminated())
  933. usleep(SIG_WAITING_TICK);
  934. ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination");
  935. m_mainThread = NULL;
  936. }
  937. if (m_requestManager != NULL) {
  938. delete m_requestManager;
  939. m_requestManager = NULL;
  940. }
  941. if (m_BayerManager != NULL) {
  942. delete m_BayerManager;
  943. m_BayerManager = NULL;
  944. }
  945. for (i = 0; i < NUM_BAYER_BUFFERS; i++)
  946. freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
  947. if (m_sccLocalBufferValid) {
  948. for (i = 0; i < NUM_SCC_BUFFERS; i++)
  949. #ifdef ENABLE_FRAME_SYNC
  950. freeCameraMemory(&m_sccLocalBuffer[i], 2);
  951. #else
  952. freeCameraMemory(&m_sccLocalBuffer[i], 1);
  953. #endif
  954. }
  955. else {
  956. for (i = 0; i < NUM_SCC_BUFFERS; i++)
  957. freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
  958. }
  959. ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
  960. res = exynos_v4l2_close(m_camera_info.sensor.fd);
  961. if (res != NO_ERROR ) {
  962. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  963. }
  964. ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
  965. res = exynos_v4l2_close(m_camera_info.isp.fd);
  966. if (res != NO_ERROR ) {
  967. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  968. }
  969. ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
  970. res = exynos_v4l2_close(m_camera_info.capture.fd);
  971. if (res != NO_ERROR ) {
  972. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  973. }
  974. ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
  975. res = exynos_v4l2_close(m_camera_info.scp.fd);
  976. if (res != NO_ERROR ) {
  977. ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
  978. }
  979. ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
  980. deleteIonClient(m_ionCameraClient);
  981. ALOGD("(HAL2::release): EXIT");
  982. }
  983. int ExynosCameraHWInterface2::InitializeISPChain()
  984. {
  985. char node_name[30];
  986. int fd = 0;
  987. int i;
  988. int ret = 0;
  989. /* Open Sensor */
  990. memset(&node_name, 0x00, sizeof(char[30]));
  991. sprintf(node_name, "%s%d", NODE_PREFIX, 40);
  992. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  993. if (fd < 0) {
  994. ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  995. }
  996. else {
  997. ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  998. }
  999. m_camera_info.sensor.fd = fd;
  1000. /* Open ISP */
  1001. memset(&node_name, 0x00, sizeof(char[30]));
  1002. sprintf(node_name, "%s%d", NODE_PREFIX, 41);
  1003. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  1004. if (fd < 0) {
  1005. ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  1006. }
  1007. else {
  1008. ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  1009. }
  1010. m_camera_info.isp.fd = fd;
  1011. /* Open ScalerC */
  1012. memset(&node_name, 0x00, sizeof(char[30]));
  1013. sprintf(node_name, "%s%d", NODE_PREFIX, 42);
  1014. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  1015. if (fd < 0) {
  1016. ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  1017. }
  1018. else {
  1019. ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  1020. }
  1021. m_camera_info.capture.fd = fd;
  1022. /* Open ScalerP */
  1023. memset(&node_name, 0x00, sizeof(char[30]));
  1024. sprintf(node_name, "%s%d", NODE_PREFIX, 44);
  1025. fd = exynos_v4l2_open(node_name, O_RDWR, 0);
  1026. if (fd < 0) {
  1027. ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
  1028. }
  1029. else {
  1030. ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
  1031. }
  1032. m_camera_info.scp.fd = fd;
  1033. if(m_cameraId == 0)
  1034. m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
  1035. else
  1036. m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
  1037. memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
  1038. m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
  1039. m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
  1040. m_camera_info.dummy_shot.dis_bypass = 1;
  1041. m_camera_info.dummy_shot.dnr_bypass = 1;
  1042. m_camera_info.dummy_shot.fd_bypass = 1;
  1043. /*sensor setting*/
  1044. m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
  1045. m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
  1046. m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
  1047. m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
  1048. m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
  1049. /*request setting*/
  1050. m_camera_info.dummy_shot.request_sensor = 1;
  1051. m_camera_info.dummy_shot.request_scc = 0;
  1052. m_camera_info.dummy_shot.request_scp = 0;
  1053. m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
  1054. m_camera_info.sensor.width = m_camera2->getSensorRawW();
  1055. m_camera_info.sensor.height = m_camera2->getSensorRawH();
  1056. m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
  1057. m_camera_info.sensor.planes = 2;
  1058. m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
  1059. m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1060. m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
  1061. for(i = 0; i < m_camera_info.sensor.buffers; i++){
  1062. initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
  1063. m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
  1064. m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
  1065. allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
  1066. }
  1067. m_camera_info.isp.width = m_camera_info.sensor.width;
  1068. m_camera_info.isp.height = m_camera_info.sensor.height;
  1069. m_camera_info.isp.format = m_camera_info.sensor.format;
  1070. m_camera_info.isp.planes = m_camera_info.sensor.planes;
  1071. m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
  1072. m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  1073. m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
  1074. for(i = 0; i < m_camera_info.isp.buffers; i++){
  1075. initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
  1076. m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0];
  1077. m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1];
  1078. m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0];
  1079. m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1];
  1080. m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0];
  1081. m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1];
  1082. };
  1083. /* init ISP */
  1084. ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
  1085. if (ret < 0) {
  1086. ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id);
  1087. return false;
  1088. }
  1089. cam_int_s_fmt(&(m_camera_info.isp));
  1090. ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
  1091. cam_int_reqbufs(&(m_camera_info.isp));
  1092. ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
  1093. ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__);
  1094. /* init Sensor */
  1095. cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
  1096. ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__);
  1097. if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
  1098. ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__);
  1099. }
  1100. ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__);
  1101. cam_int_reqbufs(&(m_camera_info.sensor));
  1102. ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__);
  1103. for (i = 0; i < m_camera_info.sensor.buffers; i++) {
  1104. ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
  1105. m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
  1106. m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
  1107. memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
  1108. sizeof(struct camera2_shot_ext));
  1109. }
  1110. for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
  1111. cam_int_qbuf(&(m_camera_info.sensor), i);
  1112. for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
  1113. m_requestManager->pushSensorQ(i);
  1114. ALOGV("== stream_on :: sensor");
  1115. cam_int_streamon(&(m_camera_info.sensor));
  1116. m_camera_info.sensor.status = true;
  1117. /* init Capture */
  1118. m_camera_info.capture.width = m_camera2->getSensorW();
  1119. m_camera_info.capture.height = m_camera2->getSensorH();
  1120. m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
  1121. #ifdef ENABLE_FRAME_SYNC
  1122. m_camera_info.capture.planes = 2;
  1123. #else
  1124. m_camera_info.capture.planes = 1;
  1125. #endif
  1126. m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
  1127. m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1128. m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
  1129. m_camera_info.capture.status = false;
  1130. return true;
  1131. }
  1132. void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
  1133. {
  1134. ALOGV("(%s)", __FUNCTION__);
  1135. StreamThread *AllocatedStream;
  1136. stream_parameters_t newParameters;
  1137. uint32_t format_actual;
  1138. if (!threadExists) {
  1139. m_streamThreads[1] = new StreamThread(this, 1);
  1140. }
  1141. AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
  1142. if (!threadExists) {
  1143. AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
  1144. m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
  1145. AllocatedStream->m_numRegisteredStream = 1;
  1146. }
  1147. AllocatedStream->m_index = 1;
  1148. format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
  1149. newParameters.width = m_camera2->getSensorW();
  1150. newParameters.height = m_camera2->getSensorH();
  1151. newParameters.format = format_actual;
  1152. newParameters.streamOps = NULL;
  1153. newParameters.numHwBuffers = NUM_SCC_BUFFERS;
  1154. #ifdef ENABLE_FRAME_SYNC
  1155. newParameters.planes = 2;
  1156. #else
  1157. newParameters.planes = 1;
  1158. #endif
  1159. newParameters.numSvcBufsInHal = 0;
  1160. newParameters.node = &m_camera_info.capture;
  1161. AllocatedStream->streamType = STREAM_TYPE_INDIRECT;
  1162. ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
  1163. if (!threadExists) {
  1164. if (!m_sccLocalBufferValid) {
  1165. for (int i = 0; i < m_camera_info.capture.buffers; i++){
  1166. initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
  1167. m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
  1168. #ifdef ENABLE_FRAME_SYNC
  1169. m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
  1170. allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
  1171. #else
  1172. allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
  1173. #endif
  1174. m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
  1175. }
  1176. m_sccLocalBufferValid = true;
  1177. }
  1178. } else {
  1179. if (m_sccLocalBufferValid) {
  1180. for (int i = 0; i < m_camera_info.capture.buffers; i++)
  1181. m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
  1182. } else {
  1183. ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
  1184. }
  1185. }
  1186. cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
  1187. m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
  1188. cam_int_s_fmt(newParameters.node);
  1189. ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
  1190. cam_int_reqbufs(newParameters.node);
  1191. ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
  1192. for (int i = 0; i < newParameters.node->buffers; i++) {
  1193. ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
  1194. cam_int_qbuf(newParameters.node, i);
  1195. newParameters.svcBufStatus[i] = ON_DRIVER;
  1196. }
  1197. ALOGV("== stream_on :: capture");
  1198. if (cam_int_streamon(newParameters.node) < 0) {
  1199. ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
  1200. } else {
  1201. m_camera_info.capture.status = true;
  1202. }
  1203. AllocatedStream->setParameter(&newParameters);
  1204. AllocatedStream->m_activated = true;
  1205. AllocatedStream->m_isBufferInit = true;
  1206. }
  1207. void ExynosCameraHWInterface2::StartISP()
  1208. {
  1209. ALOGV("== stream_on :: isp");
  1210. cam_int_streamon(&(m_camera_info.isp));
  1211. exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
  1212. }
  1213. int ExynosCameraHWInterface2::getCameraId() const
  1214. {
  1215. return m_cameraId;
  1216. }
  1217. int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera

Large files files are truncated, but you can click here to view the full file