PageRenderTime 565ms CodeModel.GetById 29ms RepoModel.GetById 0ms app.codeStats 2ms

/client/AnyVODClient/src/media/MediaPresenter.cpp

https://bitbucket.org/chadr123/anyvod
C++ | 8316 lines | 6631 code | 1670 blank | 15 comment | 1083 complexity | 4488bebedeed7fa07cfa0faac6f830f4 MD5 | raw file
  1. /*************************************************************************
  2. Copyright (c) 2011-2017, DongRyeol Cha (chadr@dcple.com)
  3. This program is free software: you can redistribute it and/or modify
  4. it under the terms of the GNU General Public License as published by
  5. the Free Software Foundation, either version 3 of the License, or
  6. (at your option) any later version.
  7. This program is distributed in the hope that it will be useful,
  8. but WITHOUT ANY WARRANTY; without even the implied warranty of
  9. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  10. GNU General Public License for more details.
  11. You should have received a copy of the GNU General Public License
  12. along with this program. If not, see <http://www.gnu.org/licenses/>
  13. *************************************************************************/
  14. #include <QGlobalStatic>
  15. #define GL_NV_geometry_program4
  16. #ifndef Q_OS_WIN
  17. #define GL_GLEXT_PROTOTYPES
  18. #endif
  19. #ifdef Q_OS_WIN
  20. #ifdef _WIN32_WINNT
  21. #undef _WIN32_WINNT
  22. #endif
  23. #define _WIN32_WINNT 0x0501
  24. #endif
  25. #ifndef UNICODE
  26. #define UNICODE
  27. #endif
  28. #ifndef _UNICODE
  29. #define _UNICODE
  30. #endif
  31. #include "MediaPresenter.h"
  32. #include "MediaState.h"
  33. #include "core/Utils.h"
  34. #include "video/Sphere.h"
  35. #include "media/FrameExtractor.h"
  36. #if !defined Q_OS_MOBILE
  37. #include "ui/MainWindow.h"
  38. #endif
  39. extern "C"
  40. {
  41. #include <libavutil/pixdesc.h>
  42. #include <libavutil/time.h>
  43. #include <libavutil/audio_fifo.h>
  44. #include <libavutil/imgutils.h>
  45. #include <libswresample/swresample.h>
  46. #include <libswscale/swscale.h>
  47. #if !defined Q_OS_MOBILE
  48. #include <libavdevice/avdevice.h>
  49. #endif
  50. }
  51. #include <ass/ass.h>
  52. #include <bass/bass_fx.h>
  53. #include <bass/bassmix.h>
  54. #ifndef Q_OS_MAC
  55. #include <omp.h>
  56. #endif
  57. #if !defined Q_OS_MOBILE
  58. #include <MediaInfoDLL/MediaInfoDLL.h>
  59. #endif
  60. #if !defined Q_OS_MOBILE
  61. #include <QApplication>
  62. #include <QDesktopWidget>
  63. #endif
  64. #include <algorithm>
  65. #include <QStringList>
  66. #include <QFileInfo>
  67. #include <qmath.h>
  68. #include <QTextStream>
  69. #include <QMutexLocker>
  70. #include <QRect>
  71. #include <QPainter>
  72. #include <QFile>
  73. #include <QDir>
  74. #include <QOpenGLFramebufferObject>
  75. #include <QTemporaryFile>
  76. #if defined Q_OS_MOBILE
  77. #include <QOpenGLFunctions>
  78. #endif
  79. #include <QDebug>
  80. #ifdef Q_OS_MAC
  81. #include <mach/task_info.h>
  82. #include <mach/task.h>
  83. #include <mach/mach_init.h>
  84. #include <mach/mach_traps.h>
  85. #include <mach/mach_port.h>
  86. #include <mach/vm_map.h>
  87. #include <mach/thread_act.h>
  88. #include <mach/host_info.h>
  89. #include <mach/mach_host.h>
  90. #endif
  91. using namespace std;
  92. #if !defined Q_OS_MOBILE
  93. using namespace MediaInfoDLL;
  94. #endif
  95. #ifdef Q_OS_WIN
  96. extern PFNGLBINDBUFFERARBPROC glBindBufferARB;
  97. extern PFNGLBUFFERDATAARBPROC glBufferDataARB;
  98. extern PFNGLMAPBUFFERARBPROC glMapBufferARB;
  99. extern PFNGLUNMAPBUFFERARBPROC glUnmapBufferARB;
  100. extern PFNGLACTIVETEXTUREPROC glActiveTextureARB;
  101. #endif
  102. const int MediaPresenter::DEFAULT_VIRT_SUBTITLE_RATIO = 10;
  103. const int MediaPresenter::DEFAULT_HORI_SUBTITLE_RATIO = 10;
  104. const int MediaPresenter::OPTION_DESC_TIME = 3000;
  105. const QString MediaPresenter::SUBTITLE_CODEC_FORMAT = "%1 (%2)";
  106. const QPoint MediaPresenter::DEFAULT_3D_SUBTITLE_OFFSET = QPoint(2, 0);
  107. const QPoint MediaPresenter::DEFAULT_VR_SUBTITLE_OFFSET = QPoint(-4, 0);
  108. const qreal MediaPresenter::DEFAULT_VIRTUAL_3D_DEPTH = 0.15;
  109. const AVPixelFormat MediaPresenter::DEFAULT_PIX_FORMAT = AV_PIX_FMT_BGR32;
  110. VirtualFile MediaPresenter::VIRTUAL_FILE;
  111. DTVReader MediaPresenter::DTV_READER;
  112. QDataStream& operator << (QDataStream &out, const MediaPresenter::Range &item)
  113. {
  114. out << item.start;
  115. out << item.end;
  116. out << item.enable;
  117. return out;
  118. }
  119. QDataStream& operator >> (QDataStream &in, MediaPresenter::Range &item)
  120. {
  121. in >> item.start;
  122. in >> item.end;
  123. in >> item.enable;
  124. return in;
  125. }
  126. #ifdef Q_OS_ANDROID
  127. static void ff_log_callback(void*, int level, const char *fmt, va_list vl)
  128. {
  129. if (level <= av_log_get_level())
  130. {
  131. char buf[1024];
  132. vsnprintf(buf, sizeof(buf), fmt, vl);
  133. }
  134. }
  135. #endif
  136. MediaPresenter::MediaPresenter(const int width, const int height) :
  137. QThread(),
  138. m_volume(this->getMaxVolume() / 2),
  139. m_width(width),
  140. m_height(height),
  141. m_state(nullptr),
  142. m_forceExit(false),
  143. m_format(DEFAULT_PIX_FORMAT),
  144. m_showDetail(false),
  145. m_isMute(false),
  146. m_subtitleFontSize(0),
  147. m_showSubtitle(true),
  148. m_showOptionDesc(false),
  149. m_showingOptionDesc(false),
  150. m_subtitleSync(0.0),
  151. m_audioSync(0.0),
  152. m_isRemoteFile(false),
  153. m_isRemoteProtocol(false),
  154. m_fontSize(0),
  155. m_fontOutlineSize(1),
  156. m_subtitleOutlineSize(0),
  157. m_subtitleMaxOutlineSize(0),
  158. m_lastAudioStream(-1),
  159. m_lastSubtitleStream(-1),
  160. m_vertPosition(0),
  161. m_horiPosition(0),
  162. m_texInfo(nullptr),
  163. m_isAudioExt(false),
  164. m_halign(AnyVODEnums::HAM_NONE),
  165. m_valign(AnyVODEnums::VAM_NONE),
  166. m_3dMethod(AnyVODEnums::V3M_NONE),
  167. m_3dSubtitleMethod(AnyVODEnums::S3M_NONE),
  168. m_vrInputSource(AnyVODEnums::VRI_NONE),
  169. m_seekKeyFrame(true),
  170. m_skipOpening(false),
  171. m_skipEnding(false),
  172. m_useSkipRange(false),
  173. m_subtitleOpaque(1.0f),
  174. m_subtitleSize(1.0f),
  175. #if defined Q_OS_MOBILE
  176. m_useHWDecoder(true),
  177. #else
  178. m_useHWDecoder(false),
  179. #endif
  180. m_useSPDIF(false),
  181. m_userSPDIFSampleRate(0),
  182. m_usePBO(false),
  183. m_enableSearchSubtitle(false),
  184. m_enableSearchLyrics(false),
  185. m_autoSaveSearchLyrics(false),
  186. m_showAlbumJacket(true),
  187. #if defined Q_OS_MOBILE
  188. m_useFrameDrop(true),
  189. #else
  190. m_useFrameDrop(false),
  191. #endif
  192. m_useBufferingMode(false),
  193. m_SPIDFEncodingMethod(AnyVODEnums::SEM_NONE),
  194. m_screenRotationDegree(AnyVODEnums::SRD_NONE),
  195. m_use3DFull(false),
  196. m_maxTextureSize(0),
  197. #if defined Q_OS_MOBILE
  198. m_useGPUConvert(true),
  199. #else
  200. m_useGPUConvert(false),
  201. #endif
  202. m_anaglyphFrameBuffer(nullptr),
  203. m_distortionFrameBuffer(nullptr),
  204. m_leftDistortionFrameBuffer(nullptr),
  205. m_rightDistortionFrameBuffer(nullptr),
  206. m_videoThread(this),
  207. m_subtitleThread(this),
  208. m_readThread(this),
  209. m_refreshThread(this),
  210. #if defined Q_OS_MOBILE
  211. m_gl(nullptr),
  212. #endif
  213. m_audioDevice(-1),
  214. m_rotation(0.0),
  215. m_sensorRotation(0.0),
  216. m_optionDescY(0),
  217. m_scheduleRecomputeSubtitleSize(false),
  218. m_useSubtitleCacheMode(false),
  219. m_devicePixelRatio(1.0),
  220. m_captureMode(false),
  221. m_useLowQualityMode(true),
  222. m_controlLocker(QMutex::Recursive),
  223. m_screenOffset(0, 0),
  224. m_useDistortion(true),
  225. m_barrelDistortionCoefficients(0.05f, 0.20f),
  226. m_pincushionDistortionCoefficients(0.075f, -0.005f),
  227. m_distortionLensCenter(0.0f, 0.0f),
  228. m_cameraLock(QMutex::Recursive),
  229. m_distortionAdjustMode(AnyVODEnums::DAM_NONE),
  230. m_virtual3DDepth(DEFAULT_VIRTUAL_3D_DEPTH),
  231. m_subtitleType(ST_NONE),
  232. m_isLive(false),
  233. m_bluetoothHeadsetConnected(false),
  234. m_bluetoothHeadsetSync(0.0)
  235. {
  236. this->reset3DSubtitleOffset();
  237. }
  238. MediaPresenter::~MediaPresenter()
  239. {
  240. this->close();
  241. this->clearFrameBuffers();
  242. }
  243. void MediaPresenter::init()
  244. {
  245. av_log_set_level(AV_LOG_QUIET);
  246. #ifdef Q_OS_ANDROID
  247. av_log_set_callback(ff_log_callback);
  248. #endif
  249. ffurl_append_protocol(DTV_READER.getProtocol());
  250. ffurl_append_protocol(VIRTUAL_FILE.getProtocol());
  251. #if !defined Q_OS_MOBILE
  252. avdevice_register_all();
  253. #endif
  254. avfilter_register_all();
  255. avcodec_register_all();
  256. av_register_all();
  257. avformat_network_init();
  258. Sphere::init();
  259. }
  260. void MediaPresenter::deInit()
  261. {
  262. avformat_network_deinit();
  263. Sphere::deInit();
  264. }
  265. bool MediaPresenter::open(const QString &filePath, const QString &title, const ExtraPlayData &data,
  266. const QString &fontFamily, const int fontSize, const int subtitleOutlineSize,
  267. const QString &audioPath)
  268. {
  269. this->m_filePath = filePath;
  270. this->m_audioPath = audioPath;
  271. this->m_isRemoteFile = Utils::determinRemoteFile(filePath);
  272. this->m_isRemoteProtocol = Utils::determinRemoteProtocol(filePath);
  273. this->m_isLive = filePath.startsWith("hls+");
  274. this->m_realFilePath = Utils::removeFFMpegSeparator(filePath);
  275. this->m_title = title;
  276. if (!data.isValid())
  277. {
  278. #if !defined Q_OS_MOBILE
  279. MediaInfo mi;
  280. bool isDevice = false;
  281. isDevice = Utils::determinDevice(filePath);
  282. if (!isDevice && mi.Open(filePath.toStdWString()) == 1)
  283. {
  284. QString totalTime = QString::fromStdWString(mi.Get(Stream_General, 0, __T("Duration")));
  285. QString totalFrame = QString::fromStdWString(mi.Get(Stream_Video, 0, __T("FrameCount")));
  286. if (totalTime.toDouble() < 0.0)
  287. this->m_playData.duration = 0.0;
  288. else
  289. this->m_playData.duration = totalTime.toDouble() / 1000.0;
  290. this->m_playData.totalFrame = totalFrame.toUInt();
  291. if (this->m_playData.totalFrame <= 0)
  292. {
  293. QString frameRate = QString::fromStdWString(mi.Get(Stream_Video, 0, __T("FrameRate")));
  294. if (frameRate.isEmpty())
  295. frameRate = QString::fromStdWString(mi.Get(Stream_Video, 0, __T("FrameRate_Original")));
  296. if (frameRate.toDouble() < 0)
  297. this->m_playData.totalFrame = 0;
  298. else
  299. this->m_playData.totalFrame = uint32_t(frameRate.toDouble() * this->m_playData.duration);
  300. }
  301. QString rotation = QString::fromStdWString(mi.Get(Stream_Video, 0, __T("Rotation")));
  302. this->m_rotation = rotation.toDouble();
  303. mi.Close();
  304. }
  305. #endif
  306. this->m_playData.userData = data.userData;
  307. }
  308. else
  309. {
  310. this->m_playData = data;
  311. }
  312. this->m_fontFamily = fontFamily;
  313. this->m_fontSize = fontSize * this->m_devicePixelRatio;
  314. this->m_fontOutlineSize = 1 * this->m_devicePixelRatio;
  315. this->m_subtitleMaxOutlineSize = subtitleOutlineSize * this->m_devicePixelRatio;
  316. QFileInfo fileInfo(this->m_realFilePath);
  317. this->m_isAudioExt = Utils::isExtension(fileInfo.suffix(), Utils::MT_AUDIO);
  318. return true;
  319. }
  320. void MediaPresenter::openRemoteSubtitle(const QString &filePath)
  321. {
  322. VIRTUAL_FILE.loadSubtitle(filePath, this);
  323. }
  324. bool MediaPresenter::saveSubtitleAs(const QString &filePath)
  325. {
  326. bool success = false;
  327. double sync = -this->m_subtitleSync;
  328. if (this->m_samiParser.isExist())
  329. success = this->m_samiParser.save(filePath, sync);
  330. else if (this->m_srtParser.isExist())
  331. success = this->m_srtParser.save(filePath, sync);
  332. else if (this->m_lrcParser.isExist())
  333. success = this->m_lrcParser.save(filePath, sync);
  334. else if (this->m_youtubeParser.isExist())
  335. success = this->m_youtubeParser.save(filePath, sync);
  336. return success;
  337. }
  338. bool MediaPresenter::saveSubtitle()
  339. {
  340. return this->saveSubtitleAs(this->m_subtitleFilePath);
  341. }
  342. QString MediaPresenter::getSubtitlePath() const
  343. {
  344. return this->m_subtitleFilePath;
  345. }
  346. bool MediaPresenter::openSubtitle(const QString &filePath, bool isExternal)
  347. {
  348. bool success = false;
  349. QFileInfo info(filePath);
  350. QString ext = info.suffix().toLower();
  351. if (ext == "smi" || ext == "sami")
  352. success = this->openSAMI(filePath) || this->openSRT(filePath);
  353. else if (ext == "ass" || ext == "ssa")
  354. success = this->openASS(filePath);
  355. else if (ext == "srt")
  356. success = this->openSRT(filePath) || this->openSAMI(filePath);
  357. else if (ext == "lrc")
  358. success = this->openLRC(filePath);
  359. if (!success)
  360. success = this->openAVParser(filePath);
  361. if (!success && isExternal)
  362. success = this->openYouTube(filePath);
  363. this->m_subtitleFilePath = filePath;
  364. return success;
  365. }
  366. bool MediaPresenter::openSAMI(const QString &filePath)
  367. {
  368. if (this->m_samiParser.open(filePath))
  369. {
  370. this->m_detail.subtitleCodec = "SAMI Parser";
  371. this->m_subtitleType = ST_SAMI;
  372. return true;
  373. }
  374. return false;
  375. }
  376. bool MediaPresenter::openASS(const QString &filePath)
  377. {
  378. if (this->m_assParser.open(filePath))
  379. {
  380. QFileInfo info(filePath);
  381. if (info.suffix().toLower() == "ass")
  382. this->m_detail.subtitleCodec = "ASS Parser";
  383. else if (info.suffix().toLower() == "ssa")
  384. this->m_detail.subtitleCodec = "SSA Parser";
  385. else
  386. this->m_detail.subtitleCodec = "Unknown Parser";
  387. this->m_assParser.setDefaultFont(this->m_assFontFamily);
  388. this->m_subtitleType = ST_ASS;
  389. return true;
  390. }
  391. return false;
  392. }
  393. bool MediaPresenter::openSRT(const QString &filePath)
  394. {
  395. if (this->m_srtParser.open(filePath))
  396. {
  397. this->m_detail.subtitleCodec = "SRT Parser";
  398. this->m_subtitleType = ST_SRT;
  399. return true;
  400. }
  401. return false;
  402. }
  403. bool MediaPresenter::openYouTube(const QString &id)
  404. {
  405. if (this->m_youtubeParser.open(id))
  406. {
  407. this->m_detail.subtitleCodec = "YouTube Parser";
  408. this->m_subtitleType = ST_YOUTUBE;
  409. return true;
  410. }
  411. return false;
  412. }
  413. bool MediaPresenter::openLRC(const QString &filePath)
  414. {
  415. if (this->m_lrcParser.open(filePath))
  416. {
  417. this->m_detail.subtitleCodec = "LRC Parser";
  418. this->m_subtitleType = ST_LRC;
  419. return true;
  420. }
  421. return false;
  422. }
  423. bool MediaPresenter::openAVParser(const QString &filePath)
  424. {
  425. QString fontPath;
  426. this->m_assParser.getFontPath(&fontPath);
  427. if (this->m_avParser.open(filePath, fontPath, this->m_fontFamily))
  428. {
  429. this->m_avParser.setCurrentIndex(0);
  430. this->m_detail.subtitleCodec = "AV Parser";
  431. this->m_subtitleType = ST_AV;
  432. return true;
  433. }
  434. return false;
  435. }
  436. void MediaPresenter::closeAllExternalSubtitles()
  437. {
  438. this->m_assParser.close();
  439. this->m_samiParser.close();
  440. this->m_srtParser.close();
  441. this->m_youtubeParser.close();
  442. this->m_lrcParser.close();
  443. this->m_avParser.close();
  444. this->m_subtitleFilePath.clear();
  445. this->m_subtitleType = ST_NONE;
  446. this->m_detail.subtitleBitmap = false;
  447. this->m_detail.subtitleValidColor = true;
  448. MediaState *ms = this->m_state;
  449. if (ms && ms->subtitle.stream.stream)
  450. {
  451. const AVCodec *codec = ms->subtitle.stream.ctx->codec;
  452. this->m_detail.subtitleCodec = QString(SUBTITLE_CODEC_FORMAT).arg(QString(codec->name).toUpper()).arg(codec->long_name);
  453. }
  454. }
  455. void MediaPresenter::close()
  456. {
  457. QMutexLocker locker(&this->m_controlLocker);
  458. this->m_forceExit = true;
  459. if (this->isRunning())
  460. this->wait();
  461. this->closeInternal();
  462. this->m_playData = ExtraPlayData();
  463. }
  464. void MediaPresenter::setDevicePixelRatio(double ratio)
  465. {
  466. this->m_devicePixelRatio = ratio;
  467. }
  468. void MediaPresenter::closeInternal()
  469. {
  470. if (this->m_state)
  471. this->m_state->quit = true;
  472. this->m_forceExit = false;
  473. this->closeStream();
  474. this->closeAllExternalSubtitles();
  475. this->callEmptyCallback(false);
  476. this->m_isRemoteFile = false;
  477. this->m_audioStreamInfo.clear();
  478. this->m_subtitleStreamInfo.clear();
  479. this->m_detail = Detail();
  480. this->m_repeatRange = Range();
  481. this->m_GOMSubtitleURL.clear();
  482. this->m_rotation = 0.0;
  483. this->m_optionDescY = 0;
  484. this->m_format = DEFAULT_PIX_FORMAT;
  485. this->m_scheduleRecomputeSubtitleSize = false;
  486. this->setSensorRotation(0.0);
  487. this->scheduleInitTextures();
  488. }
  489. #if defined Q_OS_MOBILE
  490. void MediaPresenter::setGL(QOpenGLFunctions *gl)
  491. {
  492. this->m_gl = gl;
  493. this->m_font.setGL(gl);
  494. this->m_subtitleFont.setGL(gl);
  495. }
  496. #endif
  497. void MediaPresenter::scheduleInitTextures()
  498. {
  499. if (this->m_texInfo)
  500. {
  501. for (int i = 0; i < TEX_COUNT; i++)
  502. {
  503. for (unsigned int j = 0; j < this->m_texInfo[i].textureCount; j++)
  504. this->m_texInfo[i].init[j] = false;
  505. }
  506. }
  507. }
  508. bool MediaPresenter::resetScreen(const int width, const int height, TextureInfo *texInfo, bool inContext)
  509. {
  510. this->m_width = width;
  511. this->m_height = height;
  512. this->m_texInfo = texInfo;
  513. this->m_ortho.setToIdentity();
  514. this->m_ortho.ortho(0.0f, (float)width, (float)height, 0.0f, -1.0f, 1.0f);
  515. this->m_font.setOrtho(this->m_ortho);
  516. this->m_subtitleFont.setOrtho(this->m_ortho);
  517. if (inContext)
  518. {
  519. this->initFrameBufferObject(&this->m_distortionFrameBuffer, width, height);
  520. this->initFrameBufferObject(&this->m_leftDistortionFrameBuffer, width, height);
  521. this->initFrameBufferObject(&this->m_rightDistortionFrameBuffer, width, height);
  522. }
  523. this->scheduleInitTextures();
  524. if (this->m_state && this->m_state->video.stream.stream)
  525. {
  526. this->computeFrameSize();
  527. return true;
  528. }
  529. else
  530. {
  531. return false;
  532. }
  533. }
  534. bool MediaPresenter::play()
  535. {
  536. QMutexLocker locker(&this->m_controlLocker);
  537. bool success = false;
  538. success = this->openStream();
  539. if (success)
  540. {
  541. this->start();
  542. }
  543. else
  544. {
  545. this->m_state->quit = true;
  546. this->closeStream();
  547. }
  548. return success;
  549. }
  550. void MediaPresenter::pause()
  551. {
  552. QMutexLocker locker(&this->m_controlLocker);
  553. MediaState *ms = this->m_state;
  554. if (ms && !ms->pause.pause)
  555. {
  556. ms->pause.pause = true;
  557. ms->pause.startTime = this->getAbsoluteClock();
  558. ms->pause.lastPausedTime = ms->frameTimer.lastPTS;
  559. if (this->m_spdif.isOpened())
  560. this->m_spdif.pause();
  561. else
  562. BASS_Pause();
  563. }
  564. }
  565. void MediaPresenter::resume()
  566. {
  567. QMutexLocker locker(&this->m_controlLocker);
  568. MediaState *ms = this->m_state;
  569. if (ms && ms->pause.pause)
  570. {
  571. ms->pause.driftTime += this->getAbsoluteClock() - ms->pause.startTime;
  572. ms->pause.pause = false;
  573. ms->seek.inc = 0.0;
  574. if (this->m_spdif.isOpened())
  575. this->m_spdif.resume();
  576. else
  577. BASS_Start();
  578. }
  579. if (ms)
  580. ms->willBeEnd = false;
  581. }
  582. void MediaPresenter::stop()
  583. {
  584. this->close();
  585. }
  586. bool MediaPresenter::isPlayUserDataEmpty() const
  587. {
  588. return this->m_playData.userData.isEmpty();
  589. }
  590. QString MediaPresenter::getTitle() const
  591. {
  592. return this->m_title;
  593. }
  594. void MediaPresenter::prevFrame(int count)
  595. {
  596. QMutexLocker locker(&this->m_controlLocker);
  597. MediaState *ms = this->m_state;
  598. if (ms)
  599. {
  600. double clock = ms->pause.pause ? ms->pause.lastPausedTime : ms->frameTimer.lastPTS;
  601. double frameTime = this->getDuration() / this->m_detail.videoTotalFrame;
  602. double target = clock - (count + 1) * frameTime;
  603. if (!ms->pause.pause)
  604. this->pause();
  605. this->seekStream(target, 0.0, AVSEEK_FLAG_ANY | AVSEEK_FLAG_BACKWARD);
  606. }
  607. }
  608. void MediaPresenter::nextFrame(int count)
  609. {
  610. MediaState *ms = this->m_state;
  611. if (ms)
  612. {
  613. ms->seek.requestPauseOnRender = true;
  614. ms->seek.pauseOnRenderCount = count;
  615. this->resume();
  616. }
  617. }
  618. bool MediaPresenter::render(ShaderCompositer &shader)
  619. {
  620. QMutexLocker locker(&this->m_controlLocker);
  621. return this->update(shader);
  622. }
  623. void MediaPresenter::setSubtitleSync(double value)
  624. {
  625. this->m_subtitleSync = value;
  626. }
  627. double MediaPresenter::getSubtitleSync() const
  628. {
  629. return this->m_subtitleSync;
  630. }
  631. void MediaPresenter::audioSync(double value)
  632. {
  633. this->m_audioSync = value;
  634. }
  635. double MediaPresenter::getAudioSync() const
  636. {
  637. return this->m_audioSync;
  638. }
  639. void MediaPresenter::showDetail(bool show)
  640. {
  641. this->m_showDetail = show;
  642. }
  643. bool MediaPresenter::isShowDetail() const
  644. {
  645. return this->m_showDetail;
  646. }
  647. const MediaPresenter::Detail& MediaPresenter::getDetail() const
  648. {
  649. return this->m_detail;
  650. }
  651. void MediaPresenter::getAudioDevices(QStringList *ret) const
  652. {
  653. BASS_DEVICEINFO info;
  654. for (int i = 0; BASS_GetDeviceInfo(i, &info); i++)
  655. ret->append(QString::fromLocal8Bit(info.name));
  656. }
  657. bool MediaPresenter::setAudioDevice(int device)
  658. {
  659. this->m_audioDevice = device;
  660. if (!this->m_spdif.isOpened())
  661. return this->setAudioDeviceAfter();
  662. else
  663. return true;
  664. }
  665. int MediaPresenter::getCurrentAudioDevice() const
  666. {
  667. return this->m_audioDevice;
  668. }
  669. bool MediaPresenter::setAudioDeviceAfter()
  670. {
  671. if (this->resetAudioStream())
  672. return true;
  673. if (this->m_state)
  674. this->m_state->audio.stream.index = this->m_lastAudioStream;
  675. return false;
  676. }
  677. void MediaPresenter::getSPDIFAudioDevices(QStringList *ret)
  678. {
  679. this->m_spdif.getDeviceList(ret);
  680. }
  681. bool MediaPresenter::setSPDIFAudioDevice(int device)
  682. {
  683. if (this->m_spdif.setDevice(device))
  684. {
  685. if (this->m_spdif.isOpened())
  686. return this->setAudioDeviceAfter();
  687. else
  688. return true;
  689. }
  690. return false;
  691. }
  692. int MediaPresenter::getCurrentSPDIFAudioDevice() const
  693. {
  694. return this->m_spdif.getDevice();
  695. }
  696. void MediaPresenter::showSubtitle(bool show)
  697. {
  698. this->m_showSubtitle = show;
  699. }
  700. bool MediaPresenter::isShowSubtitle() const
  701. {
  702. return this->m_showSubtitle;
  703. }
  704. bool MediaPresenter::existFileSubtitle()
  705. {
  706. return this->m_samiParser.isExist() ||
  707. this->m_srtParser.isExist() ||
  708. this->m_youtubeParser.isExist() ||
  709. this->m_lrcParser.isExist();
  710. }
  711. bool MediaPresenter::existSubtitle()
  712. {
  713. return (this->m_state && this->m_state->subtitle.stream.stream) ||
  714. this->m_samiParser.isExist() ||
  715. this->m_assParser.isExist() ||
  716. this->m_srtParser.isExist() ||
  717. this->m_youtubeParser.isExist() ||
  718. this->m_lrcParser.isExist() ||
  719. this->m_avParser.isExist();
  720. }
  721. bool MediaPresenter::existExternalSubtitle()
  722. {
  723. return this->m_samiParser.isExist() ||
  724. this->m_assParser.isExist() ||
  725. this->m_srtParser.isExist() ||
  726. this->m_youtubeParser.isExist() ||
  727. this->m_lrcParser.isExist() ||
  728. this->m_avParser.isExist();
  729. }
  730. double MediaPresenter::getRotation() const
  731. {
  732. return this->m_rotation;
  733. }
  734. void MediaPresenter::setSensorRotation(double rot)
  735. {
  736. QMutexLocker locker(&this->m_cameraLock);
  737. this->m_sensorRotation = rot;
  738. }
  739. double MediaPresenter::getSensorRotation()
  740. {
  741. double rot;
  742. this->m_cameraLock.lock();
  743. rot = this->m_sensorRotation;
  744. this->m_cameraLock.unlock();
  745. return rot;
  746. }
  747. bool MediaPresenter::isAlignable()
  748. {
  749. return (this->m_state && this->m_state->subtitle.stream.stream &&
  750. this->m_state->subtitle.stream.ctx->subtitle_header_size <= 0 &&
  751. this->m_state->subtitle.isBitmap == false) ||
  752. this->m_samiParser.isExist() ||
  753. this->m_youtubeParser.isExist() ||
  754. this->m_srtParser.isExist();
  755. }
  756. AnyVODEnums::HAlignMethod MediaPresenter::getHAlign() const
  757. {
  758. return this->m_halign;
  759. }
  760. void MediaPresenter::setHAlign(AnyVODEnums::HAlignMethod align)
  761. {
  762. this->m_halign = align;
  763. }
  764. AnyVODEnums::VAlignMethod MediaPresenter::getVAlign() const
  765. {
  766. return this->m_valign;
  767. }
  768. void MediaPresenter::setVAlign(AnyVODEnums::VAlignMethod align)
  769. {
  770. this->m_valign = align;
  771. }
  772. bool MediaPresenter::existAudioSubtitle()
  773. {
  774. return this->m_lrcParser.isExist();
  775. }
  776. bool MediaPresenter::existAudioSubtitleGender() const
  777. {
  778. return this->m_lrcParser.isGenderExist();
  779. }
  780. void MediaPresenter::setSubtitleURL(const QString &url)
  781. {
  782. this->m_GOMSubtitleURL = url;
  783. }
  784. void MediaPresenter::getSubtitleURL(QString *ret) const
  785. {
  786. *ret = this->m_GOMSubtitleURL;
  787. }
  788. void MediaPresenter::setASSFontPath(const QString &path)
  789. {
  790. this->m_assParser.setFontPath(path);
  791. }
  792. void MediaPresenter::setASSFontFamily(const QString &family)
  793. {
  794. this->m_assFontFamily = family;
  795. }
  796. void MediaPresenter::getSubtitleClasses(QStringList *classNames)
  797. {
  798. if (this->m_samiParser.isExist())
  799. {
  800. this->m_samiParser.getClassNames(classNames);
  801. }
  802. else if (this->m_youtubeParser.isExist())
  803. {
  804. this->m_youtubeParser.getLanguages(classNames);
  805. }
  806. else if (this->m_avParser.isExist())
  807. {
  808. QVector<SubtitleStreamInfo> infos;
  809. this->m_avParser.getStreamInfos(&infos);
  810. for (int i = 0; i < infos.count(); i++)
  811. classNames->append(infos[i].name);
  812. }
  813. else
  814. {
  815. for (int i = 0; i < this->m_subtitleStreamInfo.count(); i++)
  816. classNames->append(this->m_subtitleStreamInfo[i].name);
  817. }
  818. }
  819. void MediaPresenter::getCurrentSubtitleClass(QString *className)
  820. {
  821. if (this->m_samiParser.isExist())
  822. {
  823. this->m_samiParser.getDefaultClassName(className);
  824. }
  825. else if (this->m_youtubeParser.isExist())
  826. {
  827. this->m_youtubeParser.getDefaultLanguage(className);
  828. }
  829. else if (this->m_avParser.isExist())
  830. {
  831. this->m_avParser.getCurrentName(className);
  832. }
  833. else
  834. {
  835. if (!this->m_state)
  836. return;
  837. QString name;
  838. for (int i = 0; i < this->m_subtitleStreamInfo.count(); i++)
  839. {
  840. if (this->m_subtitleStreamInfo[i].index == this->m_state->subtitle.stream.index)
  841. {
  842. name = this->m_subtitleStreamInfo[i].name;
  843. break;
  844. }
  845. }
  846. *className = name;
  847. }
  848. }
  849. bool MediaPresenter::setCurrentSubtitleClass(const QString &className)
  850. {
  851. if (this->m_samiParser.isExist())
  852. {
  853. this->m_samiParser.setDefaultClassName(className);
  854. return true;
  855. }
  856. else if (this->m_youtubeParser.isExist())
  857. {
  858. this->m_youtubeParser.setDefaultLanguage(className);
  859. return true;
  860. }
  861. else if (this->m_avParser.isExist())
  862. {
  863. QVector<SubtitleStreamInfo> infos;
  864. unsigned int index = 0;
  865. this->m_avParser.getStreamInfos(&infos);
  866. for (int i = 0; i < infos.count(); i++)
  867. {
  868. if (infos[i].name == className)
  869. {
  870. index = infos[i].index;
  871. break;
  872. }
  873. }
  874. this->m_avParser.setCurrentIndex(index);
  875. return true;
  876. }
  877. else
  878. {
  879. if (!this->m_state)
  880. return false;
  881. int index = -1;
  882. for (int i = 0; i < this->m_subtitleStreamInfo.count(); i++)
  883. {
  884. if (this->m_subtitleStreamInfo[i].name == className)
  885. {
  886. index = this->m_subtitleStreamInfo[i].index;
  887. break;
  888. }
  889. }
  890. Subtitle &subtitle = this->m_state->subtitle;
  891. subtitle.stream.queue.flush();
  892. subtitle.requestReleaseQueue = true;
  893. return this->changeStream(index, subtitle.stream.index, false, &this->m_lastSubtitleStream, &subtitle.threadQuit);
  894. }
  895. }
  896. void MediaPresenter::resetSubtitlePosition()
  897. {
  898. this->m_vertPosition = 0;
  899. this->m_horiPosition = 0;
  900. }
  901. void MediaPresenter::setVerticalSubtitlePosition(int pos)
  902. {
  903. this->m_vertPosition += pos;
  904. }
  905. void MediaPresenter::setHorizontalSubtitlePosition(int pos)
  906. {
  907. this->m_horiPosition += pos;
  908. }
  909. void MediaPresenter::setVerticalSubtitleAbsolutePosition(int pos)
  910. {
  911. this->m_vertPosition = pos;
  912. }
  913. void MediaPresenter::setHorizontalSubtitleAbsolutePosition(int pos)
  914. {
  915. this->m_horiPosition = pos;
  916. }
  917. int MediaPresenter::getVerticalSubtitlePosition() const
  918. {
  919. return this->m_vertPosition;
  920. }
  921. int MediaPresenter::getHorizontalSubtitlePosition() const
  922. {
  923. return this->m_horiPosition;
  924. }
  925. void MediaPresenter::reset3DSubtitleOffset()
  926. {
  927. if (this->m_vrInputSource == AnyVODEnums::VRI_NONE)
  928. this->m_3dSubtitleOffset = DEFAULT_3D_SUBTITLE_OFFSET;
  929. else
  930. this->m_3dSubtitleOffset = DEFAULT_VR_SUBTITLE_OFFSET;
  931. }
  932. void MediaPresenter::setVertical3DSubtitleOffset(int pos)
  933. {
  934. this->m_3dSubtitleOffset.ry() += pos;
  935. }
  936. void MediaPresenter::setHorizontal3DSubtitleOffset(int pos)
  937. {
  938. this->m_3dSubtitleOffset.rx() += pos;
  939. }
  940. void MediaPresenter::setVertical3DSubtitleAbsoluteOffset(int pos)
  941. {
  942. this->m_3dSubtitleOffset.ry() = pos;
  943. }
  944. void MediaPresenter::setHorizontal3DSubtitleAbsoluteOffset(int pos)
  945. {
  946. this->m_3dSubtitleOffset.rx() = pos;
  947. }
  948. int MediaPresenter::getVertical3DSubtitleOffset() const
  949. {
  950. return this->m_3dSubtitleOffset.y();
  951. }
  952. int MediaPresenter::getHorizontal3DSubtitleOffset() const
  953. {
  954. return this->m_3dSubtitleOffset.x();
  955. }
  956. void MediaPresenter::resetVirtual3DDepth()
  957. {
  958. this->m_virtual3DDepth = DEFAULT_VIRTUAL_3D_DEPTH;
  959. }
  960. void MediaPresenter::setVirtual3DDepth(qreal depth)
  961. {
  962. this->m_virtual3DDepth = depth;
  963. }
  964. qreal MediaPresenter::getVirtual3DDepth() const
  965. {
  966. return this->m_virtual3DDepth;
  967. }
  968. void MediaPresenter::setRepeatStart(double start)
  969. {
  970. this->m_repeatRange.start = start;
  971. }
  972. void MediaPresenter::setRepeatEnd(double end)
  973. {
  974. this->m_repeatRange.end = end;
  975. }
  976. void MediaPresenter::setRepeatEnable(bool enable)
  977. {
  978. this->m_repeatRange.enable = enable;
  979. }
  980. bool MediaPresenter::getRepeatEnable() const
  981. {
  982. return this->m_repeatRange.enable;
  983. }
  984. double MediaPresenter::getRepeatStart() const
  985. {
  986. return this->m_repeatRange.start;
  987. }
  988. double MediaPresenter::getRepeatEnd() const
  989. {
  990. return this->m_repeatRange.end;
  991. }
  992. void MediaPresenter::setCaptureMode(bool capture)
  993. {
  994. this->m_captureMode = capture;
  995. }
  996. bool MediaPresenter::getCaptureMode() const
  997. {
  998. return this->m_captureMode;
  999. }
  1000. void MediaPresenter::setSeekKeyFrame(bool keyFrame)
  1001. {
  1002. this->m_seekKeyFrame = keyFrame;
  1003. }
  1004. bool MediaPresenter::isSeekKeyFrame() const
  1005. {
  1006. return this->m_seekKeyFrame;
  1007. }
  1008. void MediaPresenter::set3DMethod(AnyVODEnums::Video3DMethod method)
  1009. {
  1010. this->m_3dMethod = method;
  1011. this->computeFrameSize();
  1012. }
  1013. AnyVODEnums::Video3DMethod MediaPresenter::get3DMethod() const
  1014. {
  1015. return this->m_3dMethod;
  1016. }
  1017. void MediaPresenter::setSubtitle3DMethod(AnyVODEnums::Subtitle3DMethod method)
  1018. {
  1019. this->m_3dSubtitleMethod = method;
  1020. }
  1021. AnyVODEnums::Subtitle3DMethod MediaPresenter::getSubtitle3DMethod() const
  1022. {
  1023. return this->m_3dSubtitleMethod;
  1024. }
  1025. void MediaPresenter::setVRInputSource(AnyVODEnums::VRInputSource source)
  1026. {
  1027. this->m_vrInputSource = source;
  1028. this->computeFrameSize();
  1029. this->reset3DSubtitleOffset();
  1030. }
  1031. AnyVODEnums::VRInputSource MediaPresenter::getVRInputSource() const
  1032. {
  1033. return this->m_vrInputSource;
  1034. }
  1035. void MediaPresenter::setDistortionAdjustMode(AnyVODEnums::DistortionAdjustMode mode)
  1036. {
  1037. this->m_distortionAdjustMode = mode;
  1038. }
  1039. AnyVODEnums::DistortionAdjustMode MediaPresenter::getDistortionAdjustMode() const
  1040. {
  1041. return this->m_distortionAdjustMode;
  1042. }
  1043. void MediaPresenter::setSkipRanges(const QVector<Range> &ranges)
  1044. {
  1045. this->m_skipRanges = ranges;
  1046. }
  1047. void MediaPresenter::getSkipRanges(QVector<Range> *ret) const
  1048. {
  1049. *ret = this->m_skipRanges;
  1050. }
  1051. void MediaPresenter::setSkipOpening(bool skip)
  1052. {
  1053. this->m_skipOpening = skip;
  1054. }
  1055. bool MediaPresenter::getSkipOpening() const
  1056. {
  1057. return this->m_skipOpening;
  1058. }
  1059. void MediaPresenter::setSkipEnding(bool skip)
  1060. {
  1061. this->m_skipEnding = skip;
  1062. }
  1063. bool MediaPresenter::getSkipEnding() const
  1064. {
  1065. return this->m_skipEnding;
  1066. }
  1067. void MediaPresenter::setUseSkipRange(bool use)
  1068. {
  1069. this->m_useSkipRange = use;
  1070. }
  1071. bool MediaPresenter::getUseSkipRange() const
  1072. {
  1073. return this->m_useSkipRange;
  1074. }
  1075. double MediaPresenter::getOpeningSkipTime() const
  1076. {
  1077. for (int i = 0; i < this->m_skipRanges.count(); i++)
  1078. {
  1079. if (this->m_skipRanges[i].start < 0.0)
  1080. return this->m_skipRanges[i].end;
  1081. }
  1082. return 0.0;
  1083. }
  1084. double MediaPresenter::getEndingSkipTime() const
  1085. {
  1086. for (int i = 0; i < this->m_skipRanges.count(); i++)
  1087. {
  1088. if (this->m_skipRanges[i].end < 0.0)
  1089. return this->m_skipRanges[i].start;
  1090. }
  1091. return 0.0;
  1092. }
  1093. void MediaPresenter::useNormalizer(bool use)
  1094. {
  1095. if (this->m_spdif.isOpened())
  1096. return;
  1097. this->m_audioEffect.useNormalizer = use;
  1098. if (this->m_state && this->m_state->audio.handle)
  1099. {
  1100. if (use)
  1101. this->initNormalizer();
  1102. else
  1103. this->closeNormalizer();
  1104. }
  1105. }
  1106. bool MediaPresenter::isUsingNormalizer() const
  1107. {
  1108. return this->m_audioEffect.useNormalizer;
  1109. }
  1110. void MediaPresenter::useEqualizer(bool use)
  1111. {
  1112. if (this->m_spdif.isOpened())
  1113. return;
  1114. this->m_audioEffect.useEqualizer = use;
  1115. if (this->m_state && this->m_state->audio.handle)
  1116. {
  1117. if (use)
  1118. this->initEqualizer();
  1119. else
  1120. this->closeEqualizer();
  1121. }
  1122. }
  1123. bool MediaPresenter::isUsingEqualizer() const
  1124. {
  1125. return this->m_audioEffect.useEqualizer;
  1126. }
  1127. void MediaPresenter::useLowerVoice(bool use)
  1128. {
  1129. if (this->m_spdif.isOpened())
  1130. return;
  1131. this->m_audioEffect.useLowerVoice = use;
  1132. if (this->m_state && this->m_state->audio.handle)
  1133. {
  1134. if (use)
  1135. this->initLowerVoice();
  1136. else
  1137. this->closeLowerVoice();
  1138. }
  1139. }
  1140. bool MediaPresenter::isUsingLowerVoice() const
  1141. {
  1142. return this->m_audioEffect.useLowerVoice;
  1143. }
  1144. void MediaPresenter::useHigherVoice(bool use)
  1145. {
  1146. if (this->m_spdif.isOpened())
  1147. return;
  1148. this->m_audioEffect.useHigherVoice = use;
  1149. if (this->m_state && this->m_state->audio.handle)
  1150. {
  1151. if (use)
  1152. this->initHigherVoice();
  1153. else
  1154. this->closeHigherVoice();
  1155. }
  1156. }
  1157. bool MediaPresenter::isUsingHigherVoice() const
  1158. {
  1159. return this->m_audioEffect.useHigherVoice;
  1160. }
  1161. void MediaPresenter::useLowerMusic(bool use)
  1162. {
  1163. if (this->m_spdif.isOpened())
  1164. return;
  1165. this->m_audioEffect.useLowerMusic = use;
  1166. if (this->m_state && this->m_state->audio.handle)
  1167. {
  1168. if (use)
  1169. this->initLowerMusic();
  1170. else
  1171. this->closeLowerMusic();
  1172. }
  1173. }
  1174. bool MediaPresenter::isUsingLowerMusic() const
  1175. {
  1176. return this->m_audioEffect.useLowerMusic;
  1177. }
  1178. void MediaPresenter::setSubtitleOpaque(float opaque)
  1179. {
  1180. this->m_subtitleOpaque = opaque;
  1181. }
  1182. float MediaPresenter::getSubtitleOpaque() const
  1183. {
  1184. return this->m_subtitleOpaque;
  1185. }
  1186. void MediaPresenter::setSubtitleSize(float size)
  1187. {
  1188. this->m_subtitleSize = size;
  1189. if (!this->m_scheduleRecomputeSubtitleSize)
  1190. this->computeSubtitleSize();
  1191. }
  1192. float MediaPresenter::getSubtitleSize() const
  1193. {
  1194. return this->m_subtitleSize;
  1195. }
  1196. void MediaPresenter::setScheduleRecomputeSubtitleSize()
  1197. {
  1198. this->m_scheduleRecomputeSubtitleSize = true;
  1199. }
  1200. bool MediaPresenter::setPreAmp(float dB)
  1201. {
  1202. this->m_audioEffect.preampValue = dB;
  1203. if (this->m_audioEffect.preamp && this->m_state && this->m_state->audio.handle)
  1204. {
  1205. BASS_BFX_VOLUME vol;
  1206. vol.lChannel = 0;
  1207. vol.fVolume = BASS_BFX_dB2Linear(dB);
  1208. if (BASS_FXSetParameters(this->m_audioEffect.preamp, &vol) == TRUE)
  1209. return true;
  1210. }
  1211. else
  1212. {
  1213. return true;
  1214. }
  1215. return false;
  1216. }
  1217. float MediaPresenter::getPreAmp() const
  1218. {
  1219. return this->m_audioEffect.preampValue;
  1220. }
  1221. bool MediaPresenter::setEqualizerGain(int band, float gain)
  1222. {
  1223. QVector<Equalizer> &values = this->m_audioEffect.equalizerValues;
  1224. if (band >= 0 && band < values.count())
  1225. {
  1226. Equalizer &eq = values[band];
  1227. eq.gain = gain;
  1228. if (this->m_audioEffect.eqaulizer && this->m_state && this->m_state->audio.handle)
  1229. {
  1230. BASS_BFX_PEAKEQ eqValue;
  1231. eqValue.fBandwidth = eq.octave;
  1232. eqValue.fCenter = eq.center;
  1233. eqValue.fGain = eq.gain;
  1234. eqValue.lBand = band;
  1235. eqValue.lChannel = BASS_BFX_CHANALL;
  1236. if (BASS_FXSetParameters(this->m_audioEffect.eqaulizer, &eqValue) == TRUE)
  1237. return true;
  1238. }
  1239. else
  1240. {
  1241. return true;
  1242. }
  1243. }
  1244. return false;
  1245. }
  1246. float MediaPresenter::getEqualizerGain(int band) const
  1247. {
  1248. const QVector<Equalizer> &values = this->m_audioEffect.equalizerValues;
  1249. if (band >= 0 && band < values.count())
  1250. return values[band].gain;
  1251. return 0.0f;
  1252. }
  1253. int MediaPresenter::getBandCount() const
  1254. {
  1255. return this->m_audioEffect.equalizerValues.count();
  1256. }
  1257. bool MediaPresenter::isEnableSearchSubtitle() const
  1258. {
  1259. return this->m_enableSearchSubtitle;
  1260. }
  1261. bool MediaPresenter::isEnableSearchLyrics() const
  1262. {
  1263. return this->m_enableSearchLyrics;
  1264. }
  1265. void MediaPresenter::enableSearchSubtitle(bool enable)
  1266. {
  1267. this->m_enableSearchSubtitle = enable;
  1268. }
  1269. void MediaPresenter::enableSearchLyrics(bool enable)
  1270. {
  1271. this->m_enableSearchLyrics = enable;
  1272. }
  1273. void MediaPresenter::enableAutoSaveSearchLyrics(bool enable)
  1274. {
  1275. this->m_autoSaveSearchLyrics = enable;
  1276. }
  1277. bool MediaPresenter::isAutoSaveSearchLyrics() const
  1278. {
  1279. return this->m_autoSaveSearchLyrics;
  1280. }
  1281. void MediaPresenter::showAlbumJacket(bool show)
  1282. {
  1283. this->m_showAlbumJacket = show;
  1284. }
  1285. bool MediaPresenter::isShowAlbumJacket() const
  1286. {
  1287. return this->m_showAlbumJacket;
  1288. }
  1289. void MediaPresenter::useFrameDrop(bool enable)
  1290. {
  1291. this->m_useFrameDrop = enable;
  1292. }
  1293. bool MediaPresenter::isUseFrameDrop() const
  1294. {
  1295. return this->m_useFrameDrop;
  1296. }
  1297. void MediaPresenter::useBufferingMode(bool enable)
  1298. {
  1299. this->m_useBufferingMode = enable;
  1300. }
  1301. bool MediaPresenter::isUseBufferingMode() const
  1302. {
  1303. return this->m_useBufferingMode;
  1304. }
  1305. void MediaPresenter::useGPUConvert(bool use)
  1306. {
  1307. bool prev = this->m_useGPUConvert;
  1308. this->m_useGPUConvert = use;
  1309. if (prev != use)
  1310. {
  1311. if (this->isEnabledVideo())
  1312. this->recover(this->getMasterClock());
  1313. }
  1314. }
  1315. bool MediaPresenter::isUseGPUConvert() const
  1316. {
  1317. return this->m_useGPUConvert;
  1318. }
  1319. void MediaPresenter::seek(double time, bool any)
  1320. {
  1321. QMutexLocker locker(&this->m_controlLocker);
  1322. if (this->m_state)
  1323. {
  1324. double incr = time - this->getCurrentPosition();
  1325. int flag = (this->m_seekKeyFrame && !any) ? 0 : AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY;
  1326. this->seekStream(time, incr, flag);
  1327. }
  1328. }
  1329. uint8_t MediaPresenter::getMaxVolume() const
  1330. {
  1331. return 255;
  1332. }
  1333. void MediaPresenter::mute(bool mute)
  1334. {
  1335. if (mute)
  1336. this->volumeInternal(0);
  1337. else
  1338. this->volume(this->m_volume);
  1339. this->m_isMute = mute;
  1340. }
  1341. void MediaPresenter::volumeInternal(uint8_t volume)
  1342. {
  1343. if (this->m_state && !this->m_spdif.isOpened())
  1344. BASS_ChannelSetAttribute(this->m_state->audio.handle, BASS_ATTRIB_VOL, (float)volume / (float)this->getMaxVolume());
  1345. }
  1346. void MediaPresenter::volume(uint8_t volume)
  1347. {
  1348. if (volume > this->getMaxVolume())
  1349. this->m_volume = this->getMaxVolume();
  1350. else
  1351. this->m_volume = volume;
  1352. this->volumeInternal(this->m_volume);
  1353. }
  1354. uint8_t MediaPresenter::getVolume() const
  1355. {
  1356. return this->m_volume;
  1357. }
  1358. double MediaPresenter::getDuration() const
  1359. {
  1360. if (this->m_state)
  1361. return this->m_playData.duration;
  1362. else
  1363. return 0.0;
  1364. }
  1365. double MediaPresenter::getCurrentPosition()
  1366. {
  1367. if (this->m_state)
  1368. {
  1369. double duration = this->getDuration();
  1370. double clock;
  1371. clock = this->getMasterClock() + this->m_state->seek.inc;
  1372. if (clock > duration)
  1373. return duration;
  1374. else
  1375. return clock;
  1376. }
  1377. else
  1378. {
  1379. return 0.0;
  1380. }
  1381. }
  1382. bool MediaPresenter::hasDuration() const
  1383. {
  1384. return Utils::zeroDouble(this->getDuration()) > 0.0;
  1385. }
  1386. double MediaPresenter::getAspectRatio(bool widthPrio) const
  1387. {
  1388. MediaState *ms = this->m_state;
  1389. double aspectRatio = 0.0;
  1390. if (!ms)
  1391. return 0.0;
  1392. if (!ms->video.stream.stream)
  1393. return 0.0;
  1394. AVCodecContext *codec = ms->video.stream.ctx;
  1395. if (!codec)
  1396. return 0.0;
  1397. if (codec->sample_aspect_ratio.num == 0)
  1398. {
  1399. aspectRatio = 0.0;
  1400. }
  1401. else
  1402. {
  1403. aspectRatio = av_q2d(codec->sample_aspect_ratio);
  1404. if (widthPrio)
  1405. aspectRatio *= (double)codec->height / (double)codec->width;
  1406. else
  1407. aspectRatio *= (double)codec->width / (double)codec->height;
  1408. }
  1409. if (aspectRatio <= 0.0)
  1410. {
  1411. if (widthPrio)
  1412. aspectRatio = (double)codec->height / (double)codec->width;
  1413. else
  1414. aspectRatio = (double)codec->width / (double)codec->height;
  1415. }
  1416. return aspectRatio;
  1417. }
  1418. bool MediaPresenter::isEnabledVideo() const
  1419. {
  1420. if (this->m_state && this->m_state->video.stream.stream)
  1421. return true;
  1422. else
  1423. return false;
  1424. }
  1425. bool MediaPresenter::isAudio() const
  1426. {
  1427. return this->m_playData.totalFrame <= 100 && this->m_isAudioExt;
  1428. }
  1429. bool MediaPresenter::isVideo() const
  1430. {
  1431. return this->isEnabledVideo() && !this->isAudio();
  1432. }
  1433. bool MediaPresenter::isTempoUsable() const
  1434. {
  1435. if (this->m_state)
  1436. {
  1437. if (this->getCurrentAudioStreamIndex() >= 0)
  1438. return this->m_state->audio.tempo != 0;
  1439. else
  1440. return true;
  1441. }
  1442. return false;
  1443. }
  1444. float MediaPresenter::getTempo() const
  1445. {
  1446. float tempo = 0.0f;
  1447. if (this->isTempoUsable())
  1448. {
  1449. if (this->getCurrentAudioStreamIndex() >= 0)
  1450. BASS_ChannelGetAttribute(this->m_state->audio.tempo, BASS_ATTRIB_TEMPO, &tempo);
  1451. else
  1452. tempo = this->m_state->video.tempo;
  1453. }
  1454. return tempo;
  1455. }
  1456. void MediaPresenter::setTempo(float percent)
  1457. {
  1458. if (this->isTempoUsable())
  1459. {
  1460. if (this->getCurrentAudioStreamIndex() >= 0)
  1461. BASS_ChannelSetAttribute(this->m_state->audio.tempo, BASS_ATTRIB_TEMPO, percent);
  1462. else
  1463. this->m_state->video.tempo = percent;
  1464. }
  1465. }
  1466. VirtualFile& MediaPresenter::getVirtualFile()
  1467. {
  1468. return VIRTUAL_FILE;
  1469. }
  1470. DTVReader& MediaPresenter::getDTVReader()
  1471. {
  1472. return DTV_READER;
  1473. }
  1474. bool MediaPresenter::isRemoteFile() const
  1475. {
  1476. return this->m_isRemoteFile;
  1477. }
  1478. bool MediaPresenter::isRemoteProtocol() const
  1479. {
  1480. return this->m_isRemoteProtocol;
  1481. }
  1482. bool MediaPresenter::changeStream(int newIndex, int oldIndex, bool isAudio, int *lastStreamIndex, bool *quitFlag)
  1483. {
  1484. QMutexLocker locker(&this->m_controlLocker);
  1485. bool success = false;
  1486. if (this->m_state == nullptr)
  1487. return false;
  1488. double clock = this->getMasterClock();
  1489. this->m_state->streamChangeStartTime = this->getAbsoluteClock();
  1490. this->m_state->readThreadQuit = true;
  1491. if (this->m_readThread.isRunning())
  1492. this->m_readThread.wait();
  1493. if (quitFlag)
  1494. *quitFlag = true;
  1495. this->closeStreamComponent(oldIndex, isAudio);
  1496. if (this->openStreamComponent(newIndex, isAudio))
  1497. {
  1498. if (newIndex == this->getCurrentAudioStreamIndex())
  1499. {
  1500. if (this->m_spdif.isOpened())
  1501. this->m_spdif.play();
  1502. else
  1503. BASS_ChannelPlay(this->m_state->audio.handle, true);
  1504. }
  1505. this->seekStream(clock, 0.0, AVSEEK_FLAG_ANY | AVSEEK_FLAG_BACKWARD);
  1506. if (lastStreamIndex)
  1507. *lastStreamIndex = newIndex;
  1508. success = true;
  1509. }
  1510. this->startReadThread();
  1511. this->m_state->streamChangeDriftTime += this->getAbsoluteClock() - this->m_state->streamChangeStartTime;
  1512. return success;
  1513. }
  1514. int MediaPresenter::getCurrentAudioStreamIndex() const
  1515. {
  1516. if (this->m_state)
  1517. return this->m_state->audio.stream.index;
  1518. else
  1519. return -1;
  1520. }
  1521. void MediaPresenter::getAudioStreamInfo(QVector<AudioStreamInfo> *ret) const
  1522. {
  1523. *ret = this->m_audioStreamInfo;
  1524. }
  1525. bool MediaPresenter::resetAudioStream()
  1526. {
  1527. return this->changeAudioStream(this->getCurrentAudioStreamIndex());
  1528. }
  1529. bool MediaPresenter::changeAudioStream(int index)
  1530. {
  1531. return this->changeStream(index, this->getCurrentAudioStreamIndex(),
  1532. this->isUseAudioPath(), &this->m_lastAudioStream, nullptr);
  1533. }
  1534. HSTREAM MediaPresenter::getAudioHandle() const
  1535. {
  1536. if (this->m_state)
  1537. return this->m_state->audio.handle;
  1538. return 0;
  1539. }
  1540. Deinterlacer& MediaPresenter::getDeinterlacer()
  1541. {
  1542. return this->m_deinterlacer;
  1543. }
  1544. void MediaPresenter::setDeinterlacerAlgorithm(AnyVODEnums::DeinterlaceAlgorithm algorithm)
  1545. {
  1546. this->m_deinterlacer.setAlgorithm(algorithm);
  1547. if (this->m_state)
  1548. {
  1549. Video &video = this->m_state->video;
  1550. this->m_deinterlacer.setCodec(video.stream.ctx, video.pixFormat, video.stream.stream->time_base);
  1551. }
  1552. }
  1553. FilterGraph& MediaPresenter::getFilterGraph()
  1554. {
  1555. return this->m_filterGraph;
  1556. }
  1557. bool MediaPresenter::configFilterGraph()
  1558. {
  1559. if (this->m_state)
  1560. {
  1561. Video &video = this->m_state->video;
  1562. return this->m_filterGraph.setCodec(video.stream.ctx, video.pixFormat, this->m_format, video.stream.stream->time_base);
  1563. }
  1564. return false;
  1565. }
  1566. int MediaPresenter::getOptionDescY() const
  1567. {
  1568. return this->m_optionDescY;
  1569. }
  1570. void MediaPresenter::setOptionDescY(int y)
  1571. {
  1572. this->m_optionDescY = y;
  1573. }
  1574. void MediaPresenter::setVerticalScreenOffset(int offset)
  1575. {
  1576. QMutexLocker locker(&this->m_cameraLock);
  1577. this->m_screenOffset.setY(offset);
  1578. }
  1579. void MediaPresenter::setHorizontalScreenOffset(int offset)
  1580. {
  1581. QMutexLocker locker(&this->m_cameraLock);
  1582. this->m_screenOffset.setX(offset);
  1583. }
  1584. void MediaPresenter::useDistortion(bool use)
  1585. {
  1586. this->m_useDistortion = use;
  1587. }
  1588. bool MediaPresenter::isUseDistortion() const
  1589. {
  1590. return this->m_useDistortion;
  1591. }
  1592. void MediaPresenter::setBarrelDistortionCoefficients(const QVector2D &coefficients)
  1593. {
  1594. this->m_barrelDistortionCoefficients = coefficients;
  1595. }
  1596. QVector2D MediaPresenter::getBarrelDistortionCoefficients() const
  1597. {
  1598. return this->m_barrelDistortionCoefficients;
  1599. }
  1600. void MediaPresenter::setPincushionDistortionCoefficients(const QVector2D &coefficients)
  1601. {
  1602. this->m_pincushionDistortionCoefficients = coefficients;
  1603. }
  1604. QVector2D MediaPresenter::getPincushionDistortionCoefficients() const
  1605. {
  1606. return this->m_pincushionDistortionCoefficients;
  1607. }
  1608. void MediaPresenter::setDistortionLensCenter(const QVector2D &lensCenter)
  1609. {
  1610. this->m_distortionLensCenter = lensCenter;
  1611. }
  1612. QVector2D MediaPresenter::getDistortionLensCenter() const
  1613. {
  1614. return this->m_distortionLensCenter;
  1615. }
  1616. QString MediaPresenter::getRealFilePath() const
  1617. {
  1618. return this->m_realFilePath;
  1619. }
  1620. AVPixelFormat MediaPresenter::getFormat() const
  1621. {
  1622. return this->m_format;
  1623. }
  1624. void MediaPresenter::setBluetoothHeadsetConnected(bool connected)
  1625. {
  1626. this->m_bluetoothHeadsetConnected = connected;
  1627. }
  1628. bool MediaPresenter::getBluetoothHeadsetConnected() const
  1629. {
  1630. return this->m_bluetoothHeadsetConnected;
  1631. }
  1632. void MediaPresenter::setBluetoothHeadsetSync(double sync)
  1633. {
  1634. this->m_bluetoothHeadsetSync = sync;
  1635. }
  1636. double MediaPresenter::getBluetoothHeadsetSync() const
  1637. {
  1638. return this->m_bluetoothHeadsetSync;
  1639. }
  1640. #ifdef Q_OS_IOS
  1641. void MediaPresenter::setIOSNotifyCallback(IOSNOTIFYPROC *proc)
  1642. {
  1643. this->m_iosNotify = proc;
  1644. }
  1645. #endif
  1646. void MediaPresenter::setUserAspectRatio(UserAspectRatio &ratio)
  1647. {
  1648. this->m_userRatio = ratio;
  1649. this->computeFrameSize();
  1650. }
  1651. void MediaPresenter::getUserAspectRatio(UserAspectRatio *ret) const
  1652. {
  1653. *ret = this->m_userRatio;
  1654. }
  1655. void MediaPresenter::setMaxTextureSize(int size)
  1656. {
  1657. this->m_maxTextureSize = size;
  1658. this->initFrameBufferObject(&this->m_anaglyphFrameBuffer, size, size);
  1659. }
  1660. int MediaPresenter::getMaxTextureSize() const
  1661. {
  1662. return this->m_maxTextureSize;
  1663. }
  1664. void MediaPresenter::useSubtitleCacheMode(bool use)
  1665. {
  1666. this->m_useSubtitleCacheMode = use;
  1667. }
  1668. bool MediaPresenter::isUseSubtitleCacheMode() const
  1669. {
  1670. return this->m_useSubtitleCacheMode;
  1671. }
  1672. void MediaPresenter::showOptionDesc(const QString &desc)
  1673. {
  1674. if (this->isRunning())
  1675. {
  1676. QMutexLocker locker(&this->m_optionDescMutex);
  1677. this->m_optionDesc = desc;
  1678. this->m_showOptionDesc = true;
  1679. if (this->m_showAudioOptionDescCallback.callback && this->isAudio())
  1680. this->m_showAudioOptionDescCallback.callback(this->m_showAudioOptionDescCallback.userData, desc, true);
  1681. }
  1682. else
  1683. {
  1684. if (this->m_nonePlayingDescCallback.callback)
  1685. this->m_nonePlayingDescCallback.callback(this->m_nonePlayingDescCallback.userData, desc);
  1686. }
  1687. }
  1688. void MediaPresenter::clearFonts()
  1689. {
  1690. this->m_font.clear();
  1691. this->m_subtitleFont.clear();
  1692. }
  1693. void MediaPresenter::clearFrameBuffers()
  1694. {
  1695. this->destroyFrameBufferObject(&this->m_anaglyphFrameBuffer);
  1696. this->destroyFrameBufferObject(&this->m_distortionFrameBuffer);
  1697. this->destroyFrameBufferObject(&this->m_leftDistortionFrameBuffer);
  1698. this->destroyFrameBufferObject(&this->m_rightDistortionFrameBuffer);
  1699. }
  1700. float MediaPresenter::getCPUUsage()
  1701. {
  1702. qint64 pid = QCoreApplication::applicationPid();
  1703. float usage = 0.0f;
  1704. uint64_t processKernelNow = 0;
  1705. uint64_t processUserNow = 0;
  1706. uint64_t systemKernelNow = 0;
  1707. uint64_t systemUserNow = 0;
  1708. uint64_t processKernelElapsed = 0;
  1709. uint64_t processUserElapsed = 0;
  1710. uint64_t systemKernelElapsed = 0;
  1711. uint64_t systemUserElapsed = 0;
  1712. uint64_t totalProcessElapsed = 0;
  1713. uint64_t totalSystemElapsed = 0;
  1714. #ifdef Q_OS_WIN
  1715. ULARGE_INTEGER int64;
  1716. FILETIME dummy;
  1717. FILETIME processFileTimeKernelNow;
  1718. FILETIME processFileTimeUserNow;
  1719. FILETIME systemFileTimeKernelNow;
  1720. FILETIME systemFileTimeUserNow;
  1721. HANDLE pHandle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, pid);
  1722. if (GetProcessTimes(pHandle, &dummy, &dummy, &processFileTimeKernelNow, &processFileTimeUserNow) &&
  1723. GetSystemTimes(&dummy, &systemFileTimeKernelNow, &systemFileTimeUserNow))
  1724. {
  1725. int64.LowPart = processFileTimeKernelNow.dwLowDateTime;
  1726. int64.HighPart = processFileTimeKernelNow.dwHighDateTime;
  1727. processKernelNow = int64.QuadPart;
  1728. int64.LowPart = processFileTimeUserNow.dwLowDateTime;
  1729. int64.HighPart = processFileTimeUserNow.dwHighDateTime;
  1730. processUserNow = int64.QuadPart;
  1731. int64.LowPart = systemFileTimeKernelNow.dwLowDateTime;
  1732. int64.HighPart = systemFileTimeKernelNow.dwHighDateTime;
  1733. systemKernelNow = int64.QuadPart;
  1734. int64.LowPart = systemFileTimeUserNow.dwLowDateTime;
  1735. int64.HighPart = systemFileTimeUserNow.dwHighDateTime;
  1736. systemUserNow = int64.QuadPart;
  1737. }
  1738. if (pHandle)
  1739. CloseHandle(pHandle);
  1740. #elif defined Q_OS_LINUX
  1741. QFile processStat(QString("/proc/%1/stat").arg(pid));
  1742. QFile systemStat("/proc/stat");
  1743. if (!processStat.open(QIODevice::ReadOnly))
  1744. return 0.0f;
  1745. if (!systemStat.open(QIODevice::ReadOnly))
  1746. return 0.0f;
  1747. QTextStream data;
  1748. QString line;
  1749. int idum;
  1750. QString sdum;
  1751. data.setDevice(&processStat);
  1752. line = data.readLine();
  1753. data.setString(&line);
  1754. data >> idum >> sdum >> sdum;
  1755. data >> idum >> idum >> idum;
  1756. data >> idum >> idum >> idum;
  1757. data >> idum >> idum >> idum >> idum;
  1758. data >> processUserNow;
  1759. data >> processKernelNow;
  1760. data.setDevice(&systemStat);
  1761. line = data.readLine();
  1762. data.setString(&line);
  1763. data >> sdum;
  1764. uint64_t cpuTimes;
  1765. for (int i = 0; i < 10; i++)
  1766. {
  1767. data >> cpuTimes;
  1768. systemKernelNow += cpuTimes;
  1769. }
  1770. systemUserNow = 0;
  1771. #elif defined Q_OS_MAC
  1772. (void)pid;
  1773. thread_array_t threads;
  1774. mach_msg_type_number_t count;
  1775. if (task_threads(mach_task_self(), &threads, &count) != KERN_SUCCESS)
  1776. return 0.0f;
  1777. kern_return_t kr = KERN_SUCCESS;
  1778. mach_msg_type_number_t infoCount = THREAD_BASIC_INFO_COUNT;
  1779. for (mach_msg_type_number_t i = 0; i < count; i++)
  1780. {
  1781. thread_basic_info_data_t info;
  1782. kr = thread_info(threads[i], THREAD_BASIC_INFO, (thread_info_t)&info, &infoCount);
  1783. if (kr != KERN_SUCCESS)
  1784. break;
  1785. usage += (float)info.cpu_usage / TH_USAGE_SCALE;
  1786. }
  1787. for (mach_msg_type_number_t i = 0; i < count; i++)
  1788. mach_port_deallocate(mach_task_self(), threads[i]);
  1789. vm_deallocate(mach_task_self(), (vm_address_t)threads, sizeof(thread_t) * count);
  1790. if (kr != KERN_SUCCESS)
  1791. usage = 0.0f;
  1792. host_basic_info_data_t hostInfo;
  1793. infoCount = HOST_BASIC_INFO_COUNT;
  1794. if (host_info(mach_host_self(), HOST_BASIC_INFO, (host_info_t)&hostInfo, &infoCount) != KERN_SUCCESS)
  1795. return 0.0f;
  1796. if (hostInfo.avail_cpus <= 0)
  1797. hostInfo.avail_cpus = 1;
  1798. return usage * 100.0f / hostInfo.avail_cpus;
  1799. #endif
  1800. processKernelElapsed = processKernelNow - this->m_detail.lastProcessKernelTime;
  1801. processUserElapsed = processUserNow - this->m_detail.lastProcessUserTime;
  1802. systemKernelElapsed = systemKernelNow - this->m_detail.lastSystemKernelTime;
  1803. systemUserElapsed = systemUserNow - this->m_detail.lastSystemUserTime;
  1804. totalProcessElapsed = processKernelElapsed + processUserElapsed;
  1805. totalSystemElapsed = systemKernelElapsed + systemUserElapsed;
  1806. if (totalSystemElapsed > 0)
  1807. usage = 100.0f * totalProcessElapsed / totalSystemElapsed;
  1808. this->m_detail.lastProcessKernelTime = processKernelNow;
  1809. this->m_detail.lastProcessUserTime = processUserNow;
  1810. this->m_detail.lastSystemKernelTime = systemKernelNow;
  1811. this->m_detail.lastSystemUserTime = systemUserNow;
  1812. return usage;
  1813. }
  1814. Camera& MediaPresenter::getCamera()
  1815. {
  1816. return this->m_camera;
  1817. }
  1818. QMutex& MediaPresenter::getCameraLock()
  1819. {
  1820. return this->m_cameraLock;
  1821. }
  1822. void MediaPresenter::setStatusChangedCallback(EventCallback *playing, EventCallback *ended)
  1823. {
  1824. if (playing)
  1825. this->m_playing = *playing;
  1826. if (ended)
  1827. this->m_ended = *ended;
  1828. }
  1829. void MediaPresenter::setEmptyBufferCallback(EmptyBufferCallback &callback)
  1830. {
  1831. this->m_emptyBufferCallback = callback;
  1832. }
  1833. void MediaPresenter::setShowAudioOptionDescCallback(ShowAudioOptionDescCallback &callback)
  1834. {
  1835. this->m_showAudioOptionDescCallback = callback;
  1836. }
  1837. void MediaPresenter::setAudioSubtitleCallback(AudioSubtitleCallback &callback)
  1838. {
  1839. this->m_audioSubtitleCallback = callback;
  1840. }
  1841. void MediaPresenter::setPaintCallback(PaintCallback &callback)
  1842. {
  1843. this->m_paintCallback = callback;
  1844. }
  1845. void MediaPresenter::setAbortCallback(AbortCallback &callback)
  1846. {
  1847. this->m_abortCallback = callback;
  1848. }
  1849. void MediaPresenter::setNonePlayingDescCallback(NonePlayingDescCallback &callback)
  1850. {
  1851. this->m_nonePlayingDescCallback = callback;
  1852. }
  1853. void MediaPresenter::setRecoverCallback(MediaPresenter::EventCallback &callback)
  1854. {
  1855. this->m_recoverCallback = callback;
  1856. }
  1857. void MediaPresenter::setDisableHWDecoderCallback(MediaPresenter::EventCallback &callback)
  1858. {
  1859. this->m_disableHWDecoder = callback;
  1860. }
  1861. void MediaPresenter::callEmptyCallback(bool show)
  1862. {
  1863. if (this->m_emptyBufferCallback.callback)
  1864. this->m_emptyBufferCallback.callback(this->m_emptyBufferCallback.userData, show);
  1865. }
  1866. void MediaPresenter::abort(int reason)
  1867. {
  1868. if (this->m_abortCallback.callback)
  1869. this->m_abortCallback.callback(this->m_abortCallback.userData, reason);
  1870. }
  1871. double MediaPresenter::getAudioClock()
  1872. {
  1873. Audio &audio = this->m_state->audio;
  1874. QMutexLocker locker(&audio.stream.clockLock);
  1875. double bluetoothSync;
  1876. if (this->m_bluetoothHeadsetConnected)
  1877. bluetoothSync = this->m_bluetoothHeadsetSync;
  1878. else
  1879. bluetoothSync = 0.0;
  1880. return audio.stream.clock - this->m_audioSync - bluetoothSync - audio.spec.latency;
  1881. }
  1882. double MediaPresenter::getVideoClock()
  1883. {
  1884. QMutexLocker locker(&this->m_state->video.stream.clockLock);
  1885. return this->m_state->video.stream.clock;
  1886. }
  1887. double MediaPresenter::getExternalClock() const
  1888. {
  1889. int64_t driftTime = this->m_state->pause.driftTime + this->m_state->video.driftTime
  1890. + this->m_state->seek.videoDiscardDriftTime + this->m_state->seek.readDiscardDriftTime
  1891. + this->m_state->streamChangeDriftTime;
  1892. int64_t base = this->m_state->externalClock.base;
  1893. return (this->getAbsoluteClock() - base - driftTime) / MICRO_SECOND;
  1894. }
  1895. double MediaPresenter::getMasterClock()
  1896. {
  1897. if (this->m_state->syncType == SYNC_VIDEO_MASTER)
  1898. return this->getVideoClock();
  1899. else if (this->m_state->syncType == SYNC_AUDIO_MASTER)
  1900. return this->getAudioClock();
  1901. else
  1902. return this->getExternalClock();
  1903. }
  1904. double MediaPresenter::getAudioClockOffset() const
  1905. {
  1906. MediaState *ms = this->m_state;
  1907. Audio &audio = ms->audio;
  1908. return ms->syncType == SYNC_AUDIO_MASTER ? audio.stream.clockOffset : 0.0;
  1909. }
  1910. double MediaPresenter::frameNumberToClock(int number) const
  1911. {
  1912. return (number * this->getDuration()) / this->m_detail.videoTotalFrame;
  1913. }
  1914. void MediaPresenter::initFrameBufferObject(QOpenGLFramebufferObject **object, int width, int height)
  1915. {
  1916. if (*object)
  1917. {
  1918. if ((*object)->width() != width || (*object)->height() != height)
  1919. delete *object;
  1920. else
  1921. return;
  1922. }
  1923. *object = new QOpenGLFramebufferObject(width, height);
  1924. GL_PREFIX glBindTexture(GL_TEXTURE_2D, (*object)->texture());
  1925. GL_PREFIX glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  1926. GL_PREFIX glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  1927. GL_PREFIX glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  1928. GL_PREFIX glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  1929. (*object)->release();
  1930. }
  1931. void MediaPresenter::destroyFrameBufferObject(QOpenGLFramebufferObject **object)
  1932. {
  1933. if (*object)
  1934. {
  1935. delete *object;
  1936. *object = nullptr;
  1937. }
  1938. }
  1939. bool MediaPresenter::isValid() const
  1940. {
  1941. return this->m_state != nullptr;
  1942. }
  1943. int64_t MediaPresenter::getAbsoluteClock() const
  1944. {
  1945. return av_gettime();
  1946. }
  1947. int MediaPresenter::synchronizeAudio(short *samples, int samplesSize)
  1948. {
  1949. MediaState *ms = this->m_state;
  1950. if (ms->syncType != SYNC_AUDIO_MASTER)
  1951. {
  1952. Audio &audio = ms->audio;
  1953. AVCodecContext *codec = audio.stream.ctx;
  1954. int n = av_get_bytes_per_sample(codec->sample_fmt) * audio.spec.channelCount;
  1955. double diff = this->getAudioClock() - this->getMasterClock();
  1956. if (diff < NOSYNC_THRESHOLD)
  1957. {
  1958. audio.diffComputation = diff + audio.diffAvgCoef * audio.diffComputation;
  1959. if (audio.diffAvgCount < AUDIO_DIFF_AVG_NB)
  1960. {
  1961. audio.diffAvgCount++;
  1962. }
  1963. else
  1964. {
  1965. double avgDiff = audio.diffComputation * (1.0 - audio.diffAvgCoef);
  1966. if (fabs(avgDiff) >= audio.diffThreshold)
  1967. {
  1968. int wantedSize = samplesSize + ((int)(diff * codec->sample_rate) * n);
  1969. int minSize = samplesSize * ((100 - SAMPLE_CORRECTION_PERCENT_MAX) / 100);
  1970. int maxSize = samplesSize * ((100 + SAMPLE_CORRECTION_PERCENT_MAX) / 100);
  1971. if (wantedSize < minSize)
  1972. wantedSize = minSize;
  1973. else if (wantedSize > maxSize)
  1974. wantedSize = maxSize;
  1975. if (wantedSize < samplesSize)
  1976. {
  1977. samplesSize = wantedSize;
  1978. }
  1979. else if (wantedSize > samplesSize)
  1980. {
  1981. int nb = samplesSize - wantedSize;
  1982. uint8_t *samplesEnd = (uint8_t *)samples + samplesSize - n;
  1983. uint8_t *q = samplesEnd + n;
  1984. while (nb > 0)
  1985. {
  1986. memcpy(q, samplesEnd, n);
  1987. q += n;
  1988. nb -= n;
  1989. }
  1990. samplesSize = wantedSize;
  1991. }
  1992. }
  1993. }
  1994. }
  1995. else
  1996. {
  1997. audio.diffAvgCount = 0;
  1998. audio.diffComputation = 0;
  1999. }
  2000. }
  2001. return samplesSize;
  2002. }
  2003. template <typename T1, typename T2>
  2004. int MediaPresenter::downSampleDecode(AVCodecContext *codec, uint8_t *audioBuffer, int bufSize, double maximum, PacketQueue::Packet &packet, int *dataSize) const
  2005. {
  2006. T1 *targetBuffer = (T1*)audioBuffer;
  2007. uint8_t *tmpBuffer = this->m_state->audio.tmpBuffer;
  2008. int decodedSize = bufSize;
  2009. int len = this->decodeAudio(codec, &tmpBuffer, &decodedSize, &packet);
  2010. int count = decodedSize / (int)sizeof(T2);
  2011. for (int i = 0; i < count; i++)
  2012. targetBuffer[i] = ((T2*)tmpBuffer)[i] / maximum;
  2013. *dataSize = decodedSize / (sizeof(T2) / sizeof(T1));
  2014. return len;
  2015. }
  2016. int MediaPresenter::decodeAudioAsSPDIFEncoding(uint8_t *audioBuffer, int bufSize, AVCodecContext *codec, int *dataSize, PacketQueue::Packet *tmpPacket)
  2017. {
  2018. SPDIFEncoding &encoding = this->m_state->audio.spdifEncoding;
  2019. AVCodecContext *encodingCodec = encoding.encoder;
  2020. int sampleCount = 0;
  2021. int len = 0;
  2022. if (!encodingCodec)
  2023. return len;
  2024. if (av_audio_fifo_size(encoding.fifo) < encodingCodec->frame_size)
  2025. {
  2026. len = this->decodeAudioAndSampleCount(codec, encoding.tmpBuffers, dataSize, &sampleCount, tmpPacket);
  2027. if (len > 0 && *dataSize > 0)
  2028. av_audio_fifo_write(encoding.fifo, (void**)encoding.tmpBuffers, sampleCount);
  2029. }
  2030. *dataSize = 0;
  2031. if (av_audio_fifo_size(encoding.fifo) >= encodingCodec->frame_size)
  2032. {
  2033. int ret = 0;
  2034. int gotPacket = 0;
  2035. int wroteSize = 0;
  2036. AVPacket packet;
  2037. av_init_packet(&packet);
  2038. packet.data = nullptr;
  2039. packet.size = 0;
  2040. av_audio_fifo_read(encoding.fifo, (void**)encoding.buffers, encodingCodec->frame_size);
  2041. ret = Utils::encodeFrame(encodingCodec, &packet, encoding.frame, &gotPacket);
  2042. if (ret >= 0 && gotPacket)
  2043. {
  2044. this->m_spdif.setAudioBuffer(audioBuffer, bufSize);
  2045. wroteSize = this->m_spdif.writePacket(packet);
  2046. *dataSize += wroteSize;
  2047. av_packet_unref(&packet);
  2048. }
  2049. }
  2050. return len;
  2051. }
  2052. double MediaPresenter::calFrameDelay(double pts)
  2053. {
  2054. FrameTimer &timer = this->m_state->frameTimer;
  2055. double delay = pts - timer.lastPTS;
  2056. if (delay <= 0.0 || delay >= 1.0)
  2057. delay = timer.lastDelay;
  2058. timer.lastDelay = delay;
  2059. timer.lastPTS = pts;
  2060. return delay;
  2061. }
  2062. void MediaPresenter::processEmptyAudio()
  2063. {
  2064. MediaState *ms = this->m_state;
  2065. Audio &audio = ms->audio;
  2066. if (!ms->pause.pause && !ms->willBeEnd && this->isRemoteProtocol() && !audio.isEmpty)
  2067. {
  2068. audio.isEmpty = true;
  2069. this->callEmptyCallback(true);
  2070. }
  2071. }
  2072. int MediaPresenter::decodeAudioFrame(uint8_t *audioBuffer, int bufSize)
  2073. {
  2074. MediaState *ms = this->m_state;
  2075. Audio &audio = ms->audio;
  2076. PacketQueue::Packet *packet = &audio.packet;
  2077. AVCodecContext *codec = audio.stream.ctx;
  2078. QVector<bool*> quits;
  2079. quits.append(&ms->quit);
  2080. while (true)
  2081. {
  2082. while (audio.packetSize > 0)
  2083. {
  2084. int dataSize = bufSize;
  2085. PacketQueue::Packet tmpPacket;
  2086. int len = 0;
  2087. tmpPacket.discard = audio.packet.discard;
  2088. tmpPacket.packet.data = audio.packetData;
  2089. tmpPacket.packet.size = audio.packetSize;
  2090. if (this->m_spdif.isOpened())
  2091. {
  2092. if (this->isUsingSPDIFEncoding())
  2093. {
  2094. len = this->decodeAudioAsSPDIFEncoding(audioBuffer, bufSize, codec, &dataSize, &tmpPacket);
  2095. }
  2096. else
  2097. {
  2098. this->m_spdif.setAudioBuffer(audioBuffer, bufSize);
  2099. dataSize = this->m_spdif.writePacket(tmpPacket.packet);
  2100. len = tmpPacket.packet.size;
  2101. }
  2102. if (dataSize < 0)
  2103. len = -1;
  2104. }
  2105. else
  2106. {
  2107. switch (audio.spec.format)
  2108. {
  2109. case AV_SAMPLE_FMT_S32:
  2110. case AV_SAMPLE_FMT_S32P:
  2111. len = this->downSampleDecode<float, int32_t>(codec, audioBuffer, bufSize, numeric_limits<int32_t>::max(), tmpPacket, &dataSize);
  2112. break;
  2113. case AV_SAMPLE_FMT_DBL:
  2114. case AV_SAMPLE_FMT_DBLP:
  2115. len = this->downSampleDecode<float, double>(codec, audioBuffer, bufSize, 1.0, tmpPacket, &dataSize);
  2116. break;
  2117. case AV_SAMPLE_FMT_S64:
  2118. case AV_SAMPLE_FMT_S64P:
  2119. len = this->downSampleDecode<float, int64_t>(codec, audioBuffer, bufSize, numeric_limits<int64_t>::max(), tmpPacket, &dataSize);
  2120. break;
  2121. default:
  2122. len = this->decodeAudio(codec, &audioBuffer, &dataSize, &tmpPacket);
  2123. break;
  2124. }
  2125. }
  2126. if (len < 0)
  2127. {
  2128. audio.packetSize = 0;
  2129. break;
  2130. }
  2131. audio.packetData += len;
  2132. audio.packetSize -= len;
  2133. if (dataSize <= 0)
  2134. continue;
  2135. if (!ms->seek.firstAudioAfterFlush)
  2136. {
  2137. ms->seek.firstAudioAfterFlush = true;
  2138. if (ms->syncType == SYNC_AUDIO_MASTER)
  2139. ms->seek.readable = true;
  2140. }
  2141. this->m_detail.audioOutputByteCount.fetchAndAddOrdered(dataSize);
  2142. return dataSize;
  2143. }
  2144. av_packet_unref(&packet->packet);
  2145. if (ms->quit)
  2146. return -1;
  2147. if (this->isUseBufferingMode() && !this->isRemoteFile() && !this->m_isLive)
  2148. {
  2149. if (!ms->willBeEnd && (!audio.stream.queue.hasPacket() || (this->isVideo() && !ms->video.stream.queue.hasPacket())))
  2150. {
  2151. this->processEmptyAudio();
  2152. return -1;
  2153. }
  2154. }
  2155. bool block = false;
  2156. if (!audio.stream.queue.get(packet, quits, &block))
  2157. return -1;
  2158. if (block)
  2159. {
  2160. if (audio.isEmpty)
  2161. {
  2162. audio.isEmpty = false;
  2163. this->callEmptyCallback(false);
  2164. }
  2165. }
  2166. else
  2167. {
  2168. this->processEmptyAudio();
  2169. return -1;
  2170. }
  2171. if (audio.stream.queue.isFlushPacket(packet))
  2172. {
  2173. avcodec_flush_buffers(audio.stream.ctx);
  2174. ms->seek.firstAudioAfterFlush = false;
  2175. continue;
  2176. }
  2177. if (packet->discard)
  2178. {
  2179. packet->discard = false;
  2180. continue;
  2181. }
  2182. this->m_detail.audioInputByteCount.fetchAndAddOrdered(packet->packet.size);
  2183. audio.packetData = packet->packet.data;
  2184. audio.packetSize = packet->packet.size;
  2185. if (packet->packet.pts != AV_NOPTS_VALUE)
  2186. {
  2187. double pts = av_q2d(audio.stream.stream->time_base) * packet->packet.pts;
  2188. QMutexLocker locker(&audio.stream.clockLock);
  2189. audio.stream.clock = pts - audio.stream.clockOffset;
  2190. }
  2191. if (this->m_audioSubtitleCallback.callback && this->existAudioSubtitle())
  2192. {
  2193. if (this->m_showSubtitle)
  2194. {
  2195. QMutexLocker locker(&audio.stream.clockLock);
  2196. uint32_t time = (int32_t)((audio.stream.clock + this->m_subtitleSync) * 1000);
  2197. bool found = true;
  2198. QVector<Lyrics> lines;
  2199. Lyrics line;
  2200. if (this->m_lrcParser.isExist())
  2201. {
  2202. if (this->m_lrcParser.get(time, &line))
  2203. {
  2204. lines.append(line);
  2205. if (this->m_lrcParser.getNext(time, 1, &line))
  2206. {
  2207. lines.append(line);
  2208. if (this->m_lrcParser.getNext(time, 2, &line))
  2209. lines.append(line);
  2210. }
  2211. }
  2212. }
  2213. else
  2214. {
  2215. found = false;
  2216. }
  2217. if (found && lines.count() > 0)
  2218. this->m_audioSubtitleCallback.callback(this->m_audioSubtitleCallback.userData, lines);
  2219. }
  2220. else
  2221. {
  2222. this->m_audioSubtitleCallback.callback(this->m_audioSubtitleCallback.userData, QVector<Lyrics>());
  2223. }
  2224. }
  2225. }
  2226. }
  2227. int MediaPresenter::audioSPDIFCallback(void *buffer, int length, void *user)
  2228. {
  2229. MediaPresenter *parent = (MediaPresenter*)user;
  2230. return (int)parent->audioCallback(HSTREAM(), buffer, (DWORD)length, user);
  2231. }
  2232. DWORD CALLBACK MediaPresenter::audioCallback(HSTREAM, void *buffer, DWORD length, void *user)
  2233. {
  2234. MediaPresenter *parent = (MediaPresenter*)user;
  2235. MediaState *ms = parent->m_state;
  2236. Audio &audio = ms->audio;
  2237. DWORD oriLen = length;
  2238. DWORD wrote = 0;
  2239. uint8_t *stream = (uint8_t*)buffer;
  2240. if (!ms->pause.pause)
  2241. {
  2242. while (length > 0)
  2243. {
  2244. if (audio.bufferIndex >= audio.bufferSize)
  2245. {
  2246. int audioSize = parent->decodeAudioFrame(audio.audioBuffer, sizeof(audio.audioBuffer));
  2247. if (audioSize < 0)
  2248. {
  2249. break;
  2250. }
  2251. else
  2252. {
  2253. if (!parent->m_spdif.isOpened())
  2254. audioSize = parent->synchronizeAudio((int16_t*)audio.audioBuffer, audioSize);
  2255. audio.bufferSize = audioSize;
  2256. }
  2257. audio.bufferIndex = 0;
  2258. }
  2259. DWORD size = min(audio.bufferSize - audio.bufferIndex, (unsigned int)length);
  2260. memcpy(stream, &audio.audioBuffer[audio.bufferIndex], size);
  2261. length -= size;
  2262. stream += size;
  2263. audio.bufferIndex += size;
  2264. }
  2265. wrote = oriLen - length;
  2266. double amount = (double)wrote / audio.spec.bytesPerSec;
  2267. audio.stream.clockLock.lock();
  2268. audio.stream.clock += amount;
  2269. audio.stream.clockLock.unlock();
  2270. }
  2271. if (wrote <= 0 && !ms->willBeEnd)
  2272. {
  2273. AVCodecContext *ctx = audio.stream.ctx;
  2274. if (ctx)
  2275. {
  2276. AVSampleFormat format = av_get_packed_sample_fmt(ctx->sample_fmt);
  2277. int sampleCount = length / av_get_bytes_per_sample(format) / ctx->channels;
  2278. av_samples_set_silence(&stream, 0, sampleCount, ctx->channels, format);
  2279. wrote = length;
  2280. }
  2281. }
  2282. return wrote;
  2283. }
  2284. void MediaPresenter::refreshSchedule(int delay)
  2285. {
  2286. this->m_refreshThread.refreshTimer(delay);
  2287. }
  2288. bool MediaPresenter::getPictureRect(QRect *rect)
  2289. {
  2290. if (this->m_state)
  2291. {
  2292. const FrameSize &frameSize = this->m_state->frameSize;
  2293. QPoint screenOffset;
  2294. this->m_cameraLock.lock();
  2295. screenOffset = this->m_screenOffset;
  2296. this->m_cameraLock.unlock();
  2297. rect->setX((screenOffset.x() * this->m_devicePixelRatio) + ((this->m_width - frameSize.width) / 2));
  2298. rect->setY((screenOffset.y() * this->m_devicePixelRatio) + ((this->m_height - frameSize.height) / 2));
  2299. rect->setWidth(frameSize.width + 1);
  2300. rect->setHeight(frameSize.height + 1);
  2301. return true;
  2302. }
  2303. return false;
  2304. }
  2305. void MediaPresenter::drawDetail(ShaderCompositer &shader, const VideoPicture *vp)
  2306. {
  2307. QString text;
  2308. QString currentTime;
  2309. QString totalTime;
  2310. QString timeFormat = Utils::TIME_HH_MM_SS;
  2311. QColor headerColor(255, 255, 255);
  2312. QColor color(255, 173, 114);
  2313. int initX = 10 * this->m_devicePixelRatio;
  2314. int x = initX;
  2315. int y = 10 * this->m_devicePixelRatio;
  2316. QString subtitle;
  2317. float opaque = 1.0f;
  2318. int lineHeight = this->m_fontSize + (5 * this->m_devicePixelRatio);
  2319. int optionDescYGap = 10;
  2320. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE)
  2321. {
  2322. x /= 2;
  2323. y /= 2;
  2324. initX /= 2;
  2325. lineHeight /= 2;
  2326. optionDescYGap /= 2;
  2327. }
  2328. if (this->existSubtitle())
  2329. {
  2330. if (this->isAudio())
  2331. subtitle = tr("가사 있음");
  2332. else
  2333. subtitle = tr("자막 있음");
  2334. }
  2335. else
  2336. {
  2337. if (this->isAudio())
  2338. subtitle = tr("가사 없음");
  2339. else
  2340. subtitle = tr("자막 없음");
  2341. }
  2342. if (this->m_showingOptionDesc && !this->m_captureMode)
  2343. y += this->m_fontSize + ((this->m_optionDescY + optionDescYGap) * this->m_devicePixelRatio);
  2344. text = tr("파일 이름 : ");
  2345. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2346. text = this->m_detail.fileName;
  2347. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2348. text = QString(" (%1)").arg(subtitle);
  2349. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2350. y += lineHeight;
  2351. x = initX;
  2352. #if !defined Q_OS_ANDROID && !defined Q_OS_IOS
  2353. if (!Utils::determinDevice(this->m_filePath))
  2354. #endif
  2355. {
  2356. text = tr("재생 위치 : ");
  2357. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2358. text = QString("%1 / %2 (%3%)")
  2359. .arg(*Utils::getTimeString(this->m_detail.currentTime, timeFormat, &currentTime))
  2360. .arg(*Utils::getTimeString(this->m_detail.totalTime, timeFormat, &totalTime))
  2361. .arg(this->m_detail.timePercentage, 0, 'f', 2);
  2362. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2363. y += lineHeight;
  2364. y += lineHeight;
  2365. x = initX;
  2366. }
  2367. text = tr("파일 포맷 : ");
  2368. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2369. text = this->m_detail.fileFormat;
  2370. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2371. y += lineHeight;
  2372. x = initX;
  2373. text = tr("CPU 사용률 : ");
  2374. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2375. text = QString("%1%").arg(this->m_detail.cpuUsage, 0, 'f', 2);
  2376. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2377. y += lineHeight;
  2378. x = initX;
  2379. if (DTV_READER.isOpened())
  2380. {
  2381. if (this->m_detail.dtvSignal)
  2382. {
  2383. this->m_detail.dtvSignalStrength = DTV_READER.getSignalStrength();
  2384. this->m_detail.dtvSignal = false;
  2385. }
  2386. text = tr("DTV 신호 감도 : ");
  2387. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2388. text = QString("%1%").arg(this->m_detail.dtvSignalStrength, 0, 'f', 2);
  2389. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2390. y += lineHeight;
  2391. x = initX;
  2392. }
  2393. y += lineHeight;
  2394. if (this->isEnabledVideo())
  2395. {
  2396. AVCodecContext *ctx = this->m_state->video.stream.ctx;
  2397. text = tr("비디오 코덱 : ");
  2398. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2399. text = this->m_detail.videoCodec;
  2400. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2401. text = QString(" (Type %1, Threads : %2)").arg(ctx->active_thread_type).arg(ctx->thread_count);
  2402. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2403. y += lineHeight;
  2404. x = initX;
  2405. if (this->m_hwDecoder.isOpened())
  2406. {
  2407. text = tr("하드웨어 디코더 : ");
  2408. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2409. text = this->m_detail.videoHWDecoder;
  2410. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2411. y += lineHeight;
  2412. x = initX;
  2413. }
  2414. text = tr("입력 : ");
  2415. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2416. text = QString("%1 (%2bits), %3x%4%5, %L6kbps (%L7KiBps / %L8KiB)")
  2417. .arg(this->m_detail.videoInputType)
  2418. .arg(this->m_detail.videoInputBits)
  2419. .arg(this->m_detail.videoInputSize.width())
  2420. .arg(this->m_detail.videoInputSize.height())
  2421. .arg(this->m_detail.videoInterlaced ? "i" : "p")
  2422. .arg(this->m_detail.videoInputByteRate / 1000.0 * 8.0, 0, 'f', 2)
  2423. .arg(this->m_detail.videoInputByteRate / 1024.0, 0, 'f', 2)
  2424. .arg(this->m_state->video.stream.queue.getBufferSizeInByte() / 1024.0, 0, 'f', 2);
  2425. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2426. y += lineHeight;
  2427. x = initX;
  2428. text = tr("출력 : ");
  2429. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2430. text = QString("%1%2 (%3bits), %4x%5%6, %7, %L8kbps (%L9KiBps)")
  2431. .arg(this->m_detail.videoOutputType)
  2432. .arg(this->isUsingPBO(this->m_format) ? " PBO" : "")
  2433. .arg(this->m_detail.videoOutputBits)
  2434. .arg(this->m_detail.videoOutputSize.width())
  2435. .arg(this->m_detail.videoOutputSize.height())
  2436. .arg(this->m_detail.videoInterlaced && !this->m_detail.videoDeinterlaced ? "i" : "p")
  2437. .arg(this->m_detail.videoFPS, 0, 'f', 2)
  2438. .arg(this->m_detail.videoOutputByteRate / 1000.0 * 8.0, 0, 'f', 2)
  2439. .arg(this->m_detail.videoOutputByteRate / 1024.0, 0, 'f', 2);
  2440. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2441. y += lineHeight;
  2442. x = initX;
  2443. #if !defined Q_OS_ANDROID && !defined Q_OS_IOS
  2444. if (Utils::determinDevice(this->m_filePath))
  2445. {
  2446. if (this->m_useFrameDrop)
  2447. {
  2448. text = tr("프레임 : ");
  2449. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2450. text = QString("%1 drops").arg(this->m_detail.videoFrameDropCount.fetchAndAddOrdered(0));
  2451. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2452. y += lineHeight;
  2453. x = initX;
  2454. }
  2455. }
  2456. else
  2457. #endif
  2458. {
  2459. text = tr("프레임 : ");
  2460. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2461. text = QString("%1 / %2")
  2462. .arg(this->m_detail.videoCurrentFrame.fetchAndAddOrdered(0))
  2463. .arg(this->m_detail.videoTotalFrame);
  2464. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2465. if (this->m_useFrameDrop)
  2466. {
  2467. text = QString(" (%1 drops)").arg(this->m_detail.videoFrameDropCount.fetchAndAddOrdered(0));
  2468. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2469. }
  2470. y += lineHeight;
  2471. x = initX;
  2472. }
  2473. y += lineHeight;
  2474. }
  2475. if (this->getCurrentAudioStreamIndex() != -1)
  2476. {
  2477. text = tr("오디오 코덱 : ");
  2478. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2479. text = this->m_detail.audioCodec;
  2480. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2481. text = QString(" (Type %1)").arg(this->m_state->audio.stream.ctx->active_thread_type);
  2482. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2483. y += lineHeight;
  2484. x = initX;
  2485. if (this->m_spdif.isOpened())
  2486. {
  2487. text = tr("S/PDIF 오디오 장치 : ");
  2488. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2489. text = this->m_detail.audioSPDIFOutputDevice;
  2490. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2491. if (this->isUsingSPDIFEncoding())
  2492. {
  2493. text = ", " + tr("인코딩 사용");
  2494. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2495. QString encoding = " (%1)";
  2496. switch (this->m_SPIDFEncodingMethod)
  2497. {
  2498. case AnyVODEnums::SEM_AC3:
  2499. encoding = encoding.arg("AC3");
  2500. break;
  2501. case AnyVODEnums::SEM_DTS:
  2502. encoding = encoding.arg("DTS");
  2503. break;
  2504. default:
  2505. encoding = encoding.arg("Unknown");
  2506. break;
  2507. }
  2508. text = encoding;
  2509. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2510. }
  2511. y += lineHeight;
  2512. x = initX;
  2513. }
  2514. text = tr("입력 : ");
  2515. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2516. text = QString("%1, %2Hz, %3Ch, %4bits, %L5kbps (%L6KiBps / %L7KiB)")
  2517. .arg(this->m_detail.audioInputType)
  2518. .arg(this->m_detail.audioInputSampleRate)
  2519. .arg(this->m_detail.audioInputChannels)
  2520. .arg(this->m_detail.audioInputBits)
  2521. .arg(this->m_detail.audioInputByteRate / 1000.0 * 8.0, 0, 'f', 2)
  2522. .arg(this->m_detail.audioInputByteRate / 1024.0, 0, 'f', 2)
  2523. .arg(this->m_state->audio.stream.queue.getBufferSizeInByte() / 1024.0, 0, 'f', 2);
  2524. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2525. y += lineHeight;
  2526. x = initX;
  2527. text = tr("출력 : ");
  2528. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2529. text = QString("%1, %2Hz, %3Ch, %4bits, %L5kbps (%L6KiBps)")
  2530. .arg(this->m_detail.audioOutputType)
  2531. .arg(this->m_detail.audioOutputSampleRate)
  2532. .arg(this->m_detail.audioOutputChannels)
  2533. .arg(this->m_detail.audioOutputBits)
  2534. .arg(this->m_detail.audioOutputByteRate / 1000.0 * 8.0, 0, 'f', 2)
  2535. .arg(this->m_detail.audioOutputByteRate / 1024.0, 0, 'f', 2);
  2536. this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2537. y += lineHeight;
  2538. y += lineHeight;
  2539. x = initX;
  2540. }
  2541. if (this->existSubtitle())
  2542. {
  2543. if (this->isAudio())
  2544. text = tr("가사 코덱 : ");
  2545. else
  2546. text = tr("자막 코덱 : ");
  2547. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, headerColor, opaque, vp);
  2548. text = this->m_detail.subtitleCodec;
  2549. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2550. if (this->m_avParser.isExist())
  2551. {
  2552. QString desc;
  2553. if (this->m_avParser.getDesc(&desc))
  2554. {
  2555. text = QString(" (%1)").arg(desc);
  2556. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2557. }
  2558. }
  2559. else if (this->m_youtubeParser.isExist())
  2560. {
  2561. QString lang;
  2562. this->m_youtubeParser.getDefaultLanguage(&lang);
  2563. text = QString(" (%1)").arg(lang);
  2564. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2565. }
  2566. if (this->m_detail.subtitleBitmap)
  2567. {
  2568. if (!this->m_detail.subtitleValidColor)
  2569. {
  2570. text = QString(" Invalid");
  2571. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2572. }
  2573. text = QString(" Colors (%1)").arg(this->m_detail.subtitleColorCount);
  2574. x += this->drawOutlined(shader, this->m_font, QPoint(x, y), text, this->m_fontOutlineSize, color, opaque, vp);
  2575. }
  2576. }
  2577. }
  2578. int MediaPresenter::drawOutlined(ShaderCompositer &shader, Font &font, const QPoint &pos, const QString &text,
  2579. int outline, const Font::Context &context, float opaque, const VideoPicture *vp)
  2580. {
  2581. QColor outlineColor(0, 0, 0);
  2582. int len;
  2583. if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_NONE)
  2584. {
  2585. shader.startSimple();
  2586. len = font.renderText(shader, ShaderCompositer::ST_SIMPLE, pos, text, context, outline, outlineColor, opaque, AnyVODEnums::SD_NONE, 1.0f, false);
  2587. shader.endSimple();
  2588. }
  2589. else
  2590. {
  2591. QPoint firstPos;
  2592. QPoint secondPos;
  2593. AnyVODEnums::ScaleDirection direction;
  2594. QRect frameRect;
  2595. bool colorBlend = false;
  2596. float scale = 0.5f;
  2597. Font::Context firstContext = context;
  2598. Font::Context secondContext = context;
  2599. this->getPictureRect(&frameRect);
  2600. switch (this->m_3dSubtitleMethod)
  2601. {
  2602. case AnyVODEnums::S3M_TOP_BOTTOM:
  2603. {
  2604. firstPos.setX(pos.x());
  2605. firstPos.setY(pos.y() / 2 + frameRect.y());
  2606. secondPos.setX(pos.x());
  2607. secondPos.setY(frameRect.y() + (pos.y() + frameRect.height()) / 2);
  2608. direction = AnyVODEnums::SD_HEIGHT;
  2609. break;
  2610. }
  2611. case AnyVODEnums::S3M_LEFT_RIGHT:
  2612. {
  2613. firstPos.setX(pos.x() / 2 + frameRect.x());
  2614. firstPos.setY(pos.y() + frameRect.y());
  2615. secondPos.setX(frameRect.x() + (pos.x() + frameRect.width()) / 2);
  2616. secondPos.setY(firstPos.y());
  2617. direction = AnyVODEnums::SD_WIDTH;
  2618. break;
  2619. }
  2620. case AnyVODEnums::S3M_PAGE_FLIP:
  2621. {
  2622. firstPos = pos;
  2623. secondPos = pos;
  2624. direction = AnyVODEnums::SD_HEIGHT;
  2625. scale = 1.0f;
  2626. break;
  2627. }
  2628. case AnyVODEnums::S3M_INTERLACED:
  2629. case AnyVODEnums::S3M_CHECKER_BOARD:
  2630. {
  2631. firstPos = pos;
  2632. secondPos = pos;
  2633. direction = AnyVODEnums::SD_HEIGHT;
  2634. scale = 1.0f;
  2635. break;
  2636. }
  2637. case AnyVODEnums::S3M_ANAGLYPH:
  2638. {
  2639. firstPos = pos;
  2640. secondPos = pos;
  2641. direction = AnyVODEnums::SD_HEIGHT;
  2642. colorBlend = true;
  2643. scale = 1.0f;
  2644. break;
  2645. }
  2646. default:
  2647. {
  2648. direction = AnyVODEnums::SD_NONE;
  2649. break;
  2650. }
  2651. }
  2652. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE)
  2653. {
  2654. scale = 0.5f;
  2655. direction = AnyVODEnums::SD_ALL;
  2656. }
  2657. firstPos -= this->m_3dSubtitleOffset * this->m_devicePixelRatio;
  2658. secondPos += this->m_3dSubtitleOffset * this->m_devicePixelRatio;
  2659. switch (this->m_3dSubtitleMethod)
  2660. {
  2661. case AnyVODEnums::S3M_PAGE_FLIP:
  2662. {
  2663. shader.startSimple();
  2664. if (vp->leftOrTop3D)
  2665. len = font.renderText(shader, ShaderCompositer::ST_SIMPLE, firstPos, text, firstContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2666. else
  2667. len = font.renderText(shader, ShaderCompositer::ST_SIMPLE, secondPos, text, secondContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2668. shader.endSimple();
  2669. break;
  2670. }
  2671. case AnyVODEnums::S3M_INTERLACED:
  2672. {
  2673. shader.startSubtitleInterlace(shader.getLeftOrTop());
  2674. font.renderText(shader, ShaderCompositer::ST_SUBTITLE_INTERLACE, firstPos, text, firstContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2675. shader.endSubtitleInterlace();
  2676. shader.startSubtitleInterlace(!shader.getLeftOrTop());
  2677. len = font.renderText(shader, ShaderCompositer::ST_SUBTITLE_INTERLACE, secondPos, text, secondContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2678. shader.endSubtitleInterlace();
  2679. break;
  2680. }
  2681. case AnyVODEnums::S3M_CHECKER_BOARD:
  2682. {
  2683. shader.startSubtitleCheckerBoard(shader.getLeftOrTop());
  2684. font.renderText(shader, ShaderCompositer::ST_SUBTITLE_CHECKER_BOARD, firstPos, text, firstContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2685. shader.endSubtitleCheckerBoard();
  2686. shader.startSubtitleCheckerBoard(!shader.getLeftOrTop());
  2687. len = font.renderText(shader, ShaderCompositer::ST_SUBTITLE_CHECKER_BOARD, secondPos, text, secondContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2688. shader.endSubtitleCheckerBoard();
  2689. break;
  2690. }
  2691. case AnyVODEnums::S3M_ANAGLYPH:
  2692. {
  2693. shader.startSubtitleAnaglyph(shader.getLeftOrTop());
  2694. font.renderText(shader, ShaderCompositer::ST_SUBTITLE_ANAGLYPH, firstPos, text, firstContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2695. shader.endSubtitleAnaglyph();
  2696. shader.startSubtitleAnaglyph(!shader.getLeftOrTop());
  2697. len = font.renderText(shader, ShaderCompositer::ST_SUBTITLE_ANAGLYPH, secondPos, text, secondContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2698. shader.endSubtitleAnaglyph();
  2699. break;
  2700. }
  2701. default:
  2702. {
  2703. shader.startSimple();
  2704. font.renderText(shader, ShaderCompositer::ST_SIMPLE, firstPos, text, firstContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2705. len = font.renderText(shader, ShaderCompositer::ST_SIMPLE, secondPos, text, secondContext, outline, outlineColor, opaque, direction, scale, colorBlend);
  2706. shader.endSimple();
  2707. break;
  2708. }
  2709. }
  2710. }
  2711. return len;
  2712. }
  2713. void MediaPresenter::drawOptionDesc(ShaderCompositer &shader, const VideoPicture *vp)
  2714. {
  2715. QMutexLocker locker(&this->m_optionDescMutex);
  2716. const QColor color(0, 148, 255);
  2717. const int outline = this->m_fontOutlineSize;
  2718. QFontMetrics fm(this->m_font.getQFont());
  2719. QString optionDesc = fm.elidedText(this->m_optionDesc, Qt::ElideMiddle, this->m_width - (10 * 2 * this->m_devicePixelRatio));
  2720. this->drawOutlined(shader, this->m_font, QPoint(10 * this->m_devicePixelRatio, (this->m_optionDescY + 10) * this->m_devicePixelRatio), optionDesc, outline, Font::Context(color), 1.0f, vp);
  2721. }
  2722. void MediaPresenter::getSubtitleSize(const QFontMetrics &fm, const QString &text, QSize *ret) const
  2723. {
  2724. int heightDiv = 1;
  2725. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE)
  2726. heightDiv = 2;
  2727. ret->setWidth(fm.width(text));
  2728. ret->setHeight(fm.height() / heightDiv);
  2729. }
  2730. bool MediaPresenter::isLeftAlignLine(const QString &text) const
  2731. {
  2732. if (this->m_halign != AnyVODEnums::HAM_AUTO)
  2733. return false;
  2734. if (text.startsWith("-"))
  2735. return true;
  2736. else
  2737. return false;
  2738. }
  2739. int MediaPresenter::findWordWrapPos(const QFontMetrics &fm, const QString &text) const
  2740. {
  2741. QSize len;
  2742. int pos = -1;
  2743. int left = 0;
  2744. int right = text.count() - 1;
  2745. int mid = right / 2;
  2746. int maxWidth = this->m_width - fm.maxWidth();
  2747. while (true)
  2748. {
  2749. if (left < right - 1)
  2750. {
  2751. QString sub = text.mid(0, mid);
  2752. this->getSubtitleSize(fm, sub, &len);
  2753. if (len.width() >= maxWidth)
  2754. right = mid;
  2755. else
  2756. left = mid;
  2757. mid = (left + right) / 2;
  2758. continue;
  2759. }
  2760. bool found = false;
  2761. for (int k = mid; k >= 0; k--)
  2762. {
  2763. if (text[k].isSpace())
  2764. {
  2765. pos = k + 1;
  2766. found = true;
  2767. break;
  2768. }
  2769. }
  2770. if (found)
  2771. break;
  2772. pos = mid;
  2773. break;
  2774. }
  2775. return pos;
  2776. }
  2777. void MediaPresenter::applyWordWrap(const QFontMetrics &fm, SAMIParser::Paragraph *ret) const
  2778. {
  2779. int maxWidth = this->m_width - fm.maxWidth();
  2780. for (int i = 0; i < ret->lines.count(); i++)
  2781. {
  2782. SAMIParser::Line &line = ret->lines[i];
  2783. QString totalText;
  2784. QSize len;
  2785. for (int j = 0; j < line.subtitles.count(); j++)
  2786. totalText += line.subtitles[j].text;
  2787. if (totalText.isEmpty())
  2788. continue;
  2789. this->getSubtitleSize(fm, totalText, &len);
  2790. if (len.width() < maxWidth)
  2791. continue;
  2792. int pos = this->findWordWrapPos(fm, totalText);
  2793. int sum = 0;
  2794. int prevSum = 0;
  2795. int toMove = -1;
  2796. if (pos <= 0)
  2797. continue;
  2798. for (int j = 0; j < line.subtitles.count(); j++)
  2799. {
  2800. int count = line.subtitles[j].text.count();
  2801. sum += count;
  2802. if (sum >= pos)
  2803. {
  2804. toMove = j;
  2805. break;
  2806. }
  2807. prevSum = sum;
  2808. }
  2809. SAMIParser::Line newLine;
  2810. for (int j = toMove; j < line.subtitles.count(); j++)
  2811. newLine.subtitles.append(line.subtitles[j]);
  2812. line.subtitles = line.subtitles.mid(0, toMove + 1);
  2813. SAMIParser::Text &oldText = line.subtitles.last();
  2814. SAMIParser::Text &newText = newLine.subtitles.first();
  2815. if (prevSum <= pos)
  2816. pos -= prevSum;
  2817. oldText.text = oldText.text.mid(0, pos).trimmed();
  2818. newText.text = newText.text.mid(pos).trimmed();
  2819. ret->lines.insert(i + 1, newLine);
  2820. }
  2821. }
  2822. void MediaPresenter::applyWordWrap(const QFontMetrics &fm, SRTParser::Item *ret) const
  2823. {
  2824. int maxWidth = this->m_width - fm.maxWidth();
  2825. for (int i = 0; i < ret->texts.count(); i++)
  2826. {
  2827. QString &line = ret->texts[i];
  2828. QSize len;
  2829. if (line.isEmpty())
  2830. continue;
  2831. this->getSubtitleSize(fm, line, &len);
  2832. if (len.width() < maxWidth)
  2833. continue;
  2834. int pos = this->findWordWrapPos(fm, line);
  2835. if (pos <= 0)
  2836. continue;
  2837. QString newLine = line.mid(pos).trimmed();
  2838. line = line.mid(0, pos).trimmed();
  2839. ret->texts.insert(i + 1, newLine);
  2840. }
  2841. }
  2842. int MediaPresenter::drawSubtitleLine(ShaderCompositer &shader, int lineNum, int totalLineCount, const QString &text,
  2843. const QPoint &margin, const Font::Context &context, const QString &totalText, int maxWidth,
  2844. bool forcedLeft, const VideoPicture *vp)
  2845. {
  2846. int defaultLineMargin = 5;
  2847. QPoint point;
  2848. int defaultVMargin;
  2849. int currentLine;
  2850. QSize size;
  2851. int left = 0;
  2852. int base;
  2853. int dir;
  2854. QFontMetrics fm(this->m_subtitleFont.getQFont());
  2855. int baseWidth;
  2856. this->getSubtitleSize(fm, totalText, &size);
  2857. defaultVMargin = -size.height() / 2;
  2858. if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_LEFT_RIGHT)
  2859. baseWidth = this->m_state->frameSize.width;
  2860. else
  2861. baseWidth = this->m_width;
  2862. switch (this->m_halign)
  2863. {
  2864. case AnyVODEnums::HAM_LEFT:
  2865. {
  2866. left = (baseWidth - maxWidth) / 2;
  2867. break;
  2868. }
  2869. case AnyVODEnums::HAM_MIDDLE:
  2870. {
  2871. left = (baseWidth - size.width()) / 2;
  2872. break;
  2873. }
  2874. case AnyVODEnums::HAM_RIGHT:
  2875. {
  2876. left = (baseWidth - maxWidth) / 2 + maxWidth - size.width();
  2877. break;
  2878. }
  2879. default:
  2880. {
  2881. if (forcedLeft)
  2882. left = (baseWidth - maxWidth) / 2;
  2883. else
  2884. left = (baseWidth - size.width()) / 2;
  2885. break;
  2886. }
  2887. }
  2888. point.setX(left + margin.x());
  2889. if (this->m_valign == AnyVODEnums::VAM_TOP)
  2890. {
  2891. currentLine = lineNum;
  2892. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE || this->m_3dSubtitleMethod == AnyVODEnums::S3M_NONE)
  2893. {
  2894. base = 0;
  2895. }
  2896. else
  2897. {
  2898. QRect frameRect;
  2899. this->getPictureRect(&frameRect);
  2900. base = frameRect.y();
  2901. }
  2902. dir = 1;
  2903. }
  2904. else
  2905. {
  2906. currentLine = totalLineCount - lineNum - 1;
  2907. if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_NONE)
  2908. {
  2909. base = this->m_height;
  2910. }
  2911. else if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_LEFT_RIGHT)
  2912. {
  2913. base = this->m_state->frameSize.height;
  2914. }
  2915. else
  2916. {
  2917. QRect frameRect;
  2918. this->getPictureRect(&frameRect);
  2919. base = frameRect.y() + frameRect.height() - 1;
  2920. }
  2921. dir = -1;
  2922. }
  2923. point.setY(base + dir * (size.height() + currentLine * size.height()) + defaultVMargin + margin.y());
  2924. QPoint posOffset;
  2925. this->getSubtitlePositionOffset(&posOffset);
  2926. point -= posOffset;
  2927. point.ry() += defaultLineMargin * dir;
  2928. return this->drawOutlined(shader, this->m_subtitleFont, point, text, this->m_subtitleOutlineSize, context, this->m_subtitleOpaque, vp);
  2929. }
  2930. bool MediaPresenter::needMaxWidth() const
  2931. {
  2932. return this->m_halign != AnyVODEnums::HAM_MIDDLE && this->m_halign != AnyVODEnums::HAM_NONE;
  2933. }
  2934. void MediaPresenter::drawSubtitles(ShaderCompositer &shader, const VideoPicture *vp)
  2935. {
  2936. int maxWidth = 0;
  2937. bool forcedLeft = false;
  2938. QSize size;
  2939. QFontMetrics fm(this->m_subtitleFont.getQFont());
  2940. int32_t time = vp->time;
  2941. int32_t delay = this->m_state->frameTimer.lastDelay * 1000;
  2942. if (this->m_hwDecoder.isOpened())
  2943. time -= this->m_hwDecoder.getSurfaceQueueCount() * delay;
  2944. if (this->m_filterGraph.hasFilters())
  2945. time -= this->m_filterGraph.getDelayCount() * delay;
  2946. if (this->m_samiParser.isExist())
  2947. {
  2948. SAMIParser::Paragraph para;
  2949. QString className;
  2950. this->m_samiParser.getDefaultClassName(&className);
  2951. if (this->m_samiParser.get(className, time, &para))
  2952. {
  2953. this->applyWordWrap(fm, &para);
  2954. if (this->needMaxWidth())
  2955. {
  2956. for (int i = 0; i < para.lines.count(); i++)
  2957. {
  2958. SAMIParser::Line &line = para.lines[i];
  2959. QString totalText;
  2960. for (int j = 0; j < line.subtitles.count(); j++)
  2961. totalText += line.subtitles[j].text;
  2962. this->getSubtitleSize(fm, totalText, &size);
  2963. maxWidth = max(maxWidth, size.width());
  2964. forcedLeft |= this->isLeftAlignLine(totalText);
  2965. }
  2966. }
  2967. for (int i = 0; i < para.lines.count(); i++)
  2968. {
  2969. SAMIParser::Line &line = para.lines[i];
  2970. int left = 0;
  2971. QString totalText;
  2972. for (int j = 0; j < line.subtitles.count(); j++)
  2973. totalText += line.subtitles[j].text;
  2974. for (int j = 0; j < line.subtitles.count(); j++)
  2975. {
  2976. SAMIParser::Text &text = line.subtitles[j];
  2977. Font::Context context(text.color, text.bold, text.underline, text.italic, text.strike);
  2978. left += this->drawSubtitleLine(shader, i, para.lines.count(), text.text, QPoint(left, 0), context, totalText, maxWidth, forcedLeft, vp);
  2979. }
  2980. }
  2981. }
  2982. }
  2983. else if (this->m_assParser.isExist())
  2984. {
  2985. this->drawASS(ASSRM_FILE, time, shader, vp);
  2986. }
  2987. else if (this->m_srtParser.isExist() || this->m_youtubeParser.isExist())
  2988. {
  2989. SRTParser::Item item;
  2990. bool exist = false;
  2991. if (this->m_srtParser.isExist())
  2992. exist = this->m_srtParser.get(time, &item);
  2993. else if (this->m_youtubeParser.isExist())
  2994. exist = this->m_youtubeParser.get(time, &item);
  2995. if (exist)
  2996. {
  2997. this->applyWordWrap(fm, &item);
  2998. if (this->needMaxWidth())
  2999. {
  3000. for (int i = 0; i < item.texts.count(); i++)
  3001. {
  3002. this->getSubtitleSize(fm, item.texts[i], &size);
  3003. maxWidth = max(maxWidth, size.width());
  3004. forcedLeft |= this->isLeftAlignLine(item.texts[i]);
  3005. }
  3006. }
  3007. for (int i = 0; i < item.texts.count(); i++)
  3008. this->drawSubtitleLine(shader, i, item.texts.count(), item.texts[i], QPoint(0, 0), Font::Context(Qt::white), item.texts[i], maxWidth, forcedLeft, vp);
  3009. }
  3010. }
  3011. else if (this->m_avParser.isExist())
  3012. {
  3013. AVSubtitle *sp;
  3014. if (this->m_avParser.get(time, &sp))
  3015. {
  3016. if (this->drawAVSubtitle(shader, fm, sp, vp))
  3017. this->drawASS(ASSRM_AV, time, shader, vp);
  3018. }
  3019. }
  3020. else
  3021. {
  3022. MediaState *ms = this->m_state;
  3023. Subtitle &subtitle = ms->subtitle;
  3024. if (subtitle.stream.stream == nullptr)
  3025. return;
  3026. if (subtitle.isASS)
  3027. {
  3028. this->drawASS(ASSRM_SINGLE, time, shader, vp);
  3029. }
  3030. else
  3031. {
  3032. SubtitleFrames &frames = ms->subtitleFrames;
  3033. QMutexLocker locker(&frames.lock);
  3034. int count = frames.items.count();
  3035. if (count <= 0)
  3036. return;
  3037. AVSubtitle *sp = nullptr;
  3038. for (int i = count - 1; i >= 0; i--)
  3039. {
  3040. SubtitleElement &item = frames.items[i];
  3041. AVSubtitle &spItem = item.subtitle;
  3042. uint64_t base = (uint64_t)(item.pts * 1000);
  3043. if (base + spItem.start_display_time <= (uint64_t)time && (uint64_t)time <= base + spItem.end_display_time)
  3044. {
  3045. sp = &spItem;
  3046. break;
  3047. }
  3048. }
  3049. if (sp)
  3050. this->drawAVSubtitle(shader, fm, sp, vp);
  3051. }
  3052. }
  3053. }
  3054. bool MediaPresenter::drawAVSubtitle(ShaderCompositer &shader, const QFontMetrics &fm, const AVSubtitle *sp, const VideoPicture *vp)
  3055. {
  3056. MediaState *ms = this->m_state;
  3057. int currentCount = 0;
  3058. int maxWidth = 0;
  3059. bool forcedLeft = false;
  3060. bool isASS = false;
  3061. QSize size;
  3062. TextureInfo &texInfo = this->m_texInfo[TEX_FFMPEG_SUBTITLE];
  3063. for (unsigned int i = 0; i < sp->num_rects; i++)
  3064. {
  3065. AVSubtitleRect *rect = sp->rects[i];
  3066. if (rect->type == SUBTITLE_BITMAP)
  3067. {
  3068. QRect rc;
  3069. QRect picRect;
  3070. Surface *subtitleImg;
  3071. AVPixelFormat srcFMT;
  3072. QPoint posOffset;
  3073. QPoint newPos(rect->x, rect->y);
  3074. this->getSubtitlePositionOffsetByFrame(QSize(vp->orgWidth, vp->orgHeight), &posOffset);
  3075. this->getPictureRect(&picRect);
  3076. if (rect->x + rect->w > vp->orgWidth)
  3077. newPos.rx() = vp->orgWidth - rect->w;
  3078. if (rect->y + rect->h > vp->orgHeight)
  3079. newPos.ry() = vp->orgHeight - rect->h;
  3080. if (newPos.x() < 0)
  3081. {
  3082. rc.setX(posOffset.x() + picRect.x());
  3083. rc.setWidth(ms->frameSize.width);
  3084. }
  3085. else
  3086. {
  3087. rc.setX(Utils::mapTo(vp->orgWidth, ms->frameSize.width, newPos.x()) - posOffset.x() + picRect.x());
  3088. rc.setWidth(Utils::mapTo(vp->orgWidth, ms->frameSize.width, rect->w));
  3089. }
  3090. if (newPos.y() < 0)
  3091. {
  3092. rc.setY(posOffset.y() + picRect.y());
  3093. rc.setHeight(ms->frameSize.height);
  3094. }
  3095. else
  3096. {
  3097. rc.setY(Utils::mapTo(vp->orgHeight, ms->frameSize.height, newPos.y()) - posOffset.y() + picRect.y());
  3098. rc.setHeight(Utils::mapTo(vp->orgHeight, ms->frameSize.height, rect->h));
  3099. }
  3100. if (rect->nb_colors == 2)
  3101. {
  3102. srcFMT = AV_PIX_FMT_MONOWHITE;
  3103. }
  3104. else if (rect->nb_colors >= 4 && rect->nb_colors <= 256)
  3105. {
  3106. srcFMT = AV_PIX_FMT_PAL8;
  3107. }
  3108. else if (rect->nb_colors == 65536)
  3109. {
  3110. srcFMT = AV_PIX_FMT_RGB555;
  3111. }
  3112. else if (rect->nb_colors == 16777216)
  3113. {
  3114. srcFMT = AV_PIX_FMT_RGB24;
  3115. }
  3116. else
  3117. {
  3118. this->m_detail.subtitleValidColor = false;
  3119. return false;
  3120. }
  3121. this->m_detail.subtitleBitmap = true;
  3122. this->m_detail.subtitleColorCount = rect->nb_colors;
  3123. subtitleImg = this->createSurface(rect->w, rect->h, AV_PIX_FMT_BGR32);
  3124. if (subtitleImg)
  3125. {
  3126. ms->imageRGBConverter = sws_getCachedContext(
  3127. ms->imageRGBConverter,
  3128. rect->w, rect->h, srcFMT,
  3129. rect->w, rect->h, AV_PIX_FMT_BGR32,
  3130. SWS_POINT, nullptr, nullptr, nullptr);
  3131. AVFrame pict;
  3132. memset(&pict, 0, sizeof(pict));
  3133. pict.data[0] = subtitleImg->pixels[0];
  3134. pict.linesize[0] = subtitleImg->lineSize[0];
  3135. if (ms->imageRGBConverter)
  3136. sws_scale(ms->imageRGBConverter, rect->data, rect->linesize, 0, rect->h, pict.data, pict.linesize);
  3137. bool enabledBlend = GL_PREFIX glIsEnabled(GL_BLEND);
  3138. GL_PREFIX glEnable(GL_BLEND);
  3139. QRectF renderRect;
  3140. renderRect = QRectF(0.0, 0.0, 1.0, 1.0);
  3141. for (unsigned int i = 0; i < texInfo.textureCount; i++)
  3142. texInfo.init[i] = false;
  3143. if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_NONE)
  3144. {
  3145. QColor color = Qt::white;
  3146. color.setAlphaF(this->m_subtitleOpaque);
  3147. shader.startSimple();
  3148. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3149. this->renderTexture(shader, ShaderCompositer::ST_SIMPLE, rc, texInfo, *subtitleImg, renderRect, color, QMatrix4x4(), true);
  3150. shader.endSimple();
  3151. }
  3152. else
  3153. {
  3154. QColor firstColorValue = Qt::white;
  3155. QColor secondColorValue = Qt::white;
  3156. QRect firstRect = rc;
  3157. QRect secondRect = rc;
  3158. firstColorValue.setAlphaF(this->m_subtitleOpaque);
  3159. secondColorValue.setAlphaF(this->m_subtitleOpaque);
  3160. switch (this->m_3dSubtitleMethod)
  3161. {
  3162. case AnyVODEnums::S3M_TOP_BOTTOM:
  3163. {
  3164. firstRect.setTop((rc.y() + picRect.y()) / 2);
  3165. firstRect.setHeight(rc.height() / 2);
  3166. secondRect.setTop(firstRect.y() + (picRect.height() - 1) / 2);
  3167. secondRect.setHeight(rc.height() / 2);
  3168. break;
  3169. }
  3170. case AnyVODEnums::S3M_LEFT_RIGHT:
  3171. {
  3172. firstRect.setLeft((rc.x() + picRect.x()) / 2);
  3173. firstRect.setWidth(rc.width() / 2);
  3174. secondRect.setLeft(firstRect.x() + (picRect.width() - 1) / 2);
  3175. secondRect.setWidth(rc.width() / 2);
  3176. break;
  3177. }
  3178. default:
  3179. {
  3180. break;
  3181. }
  3182. }
  3183. firstRect.translate(-this->m_3dSubtitleOffset * this->m_devicePixelRatio);
  3184. secondRect.translate(this->m_3dSubtitleOffset * this->m_devicePixelRatio);
  3185. switch (this->m_3dSubtitleMethod)
  3186. {
  3187. case AnyVODEnums::S3M_PAGE_FLIP:
  3188. {
  3189. shader.startSimple();
  3190. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3191. if (vp->leftOrTop3D)
  3192. this->renderTexture(shader, ShaderCompositer::ST_SIMPLE, firstRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), true);
  3193. else
  3194. this->renderTexture(shader, ShaderCompositer::ST_SIMPLE, secondRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), true);
  3195. shader.endSimple();
  3196. break;
  3197. }
  3198. case AnyVODEnums::S3M_INTERLACED:
  3199. {
  3200. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3201. shader.startSubtitleInterlace(shader.getLeftOrTop());
  3202. this->renderTexture(shader, ShaderCompositer::ST_SUBTITLE_INTERLACE, firstRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), true);
  3203. shader.endSubtitleInterlace();
  3204. shader.startSubtitleInterlace(!shader.getLeftOrTop());
  3205. this->renderTexture(shader, ShaderCompositer::ST_SUBTITLE_INTERLACE, secondRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), false);
  3206. shader.endSubtitleInterlace();
  3207. break;
  3208. }
  3209. case AnyVODEnums::S3M_CHECKER_BOARD:
  3210. {
  3211. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3212. shader.startSubtitleCheckerBoard(shader.getLeftOrTop());
  3213. this->renderTexture(shader, ShaderCompositer::ST_SUBTITLE_CHECKER_BOARD, firstRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), true);
  3214. shader.endSubtitleCheckerBoard();
  3215. shader.startSubtitleCheckerBoard(!shader.getLeftOrTop());
  3216. this->renderTexture(shader, ShaderCompositer::ST_SUBTITLE_CHECKER_BOARD, secondRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), false);
  3217. shader.endSubtitleCheckerBoard();
  3218. break;
  3219. }
  3220. case AnyVODEnums::S3M_ANAGLYPH:
  3221. {
  3222. GL_PREFIX glBlendFunc(GL_SRC_COLOR, GL_ONE);
  3223. shader.startSubtitleAnaglyph(shader.getLeftOrTop());
  3224. this->renderTexture(shader, ShaderCompositer::ST_SUBTITLE_ANAGLYPH, firstRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), true);
  3225. shader.endSubtitleAnaglyph();
  3226. shader.startSubtitleAnaglyph(!shader.getLeftOrTop());
  3227. this->renderTexture(shader, ShaderCompositer::ST_SUBTITLE_ANAGLYPH, secondRect, texInfo, *subtitleImg, renderRect, secondColorValue, QMatrix4x4(), false);
  3228. shader.endSubtitleAnaglyph();
  3229. break;
  3230. }
  3231. default:
  3232. {
  3233. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3234. shader.startSimple();
  3235. this->renderTexture(shader, ShaderCompositer::ST_SIMPLE, firstRect, texInfo, *subtitleImg, renderRect, firstColorValue, QMatrix4x4(), true);
  3236. this->renderTexture(shader, ShaderCompositer::ST_SIMPLE, secondRect, texInfo, *subtitleImg, renderRect, secondColorValue, QMatrix4x4(), false);
  3237. shader.endSimple();
  3238. break;
  3239. }
  3240. }
  3241. }
  3242. if (!enabledBlend)
  3243. GL_PREFIX glDisable(GL_BLEND);
  3244. this->deleteSurface(subtitleImg);
  3245. }
  3246. }
  3247. else if (rect->type == SUBTITLE_TEXT)
  3248. {
  3249. QString text = QString::fromUtf8(rect->text);
  3250. QStringList texts;
  3251. text.remove('\r');
  3252. texts = text.split('\n');
  3253. if (this->needMaxWidth())
  3254. {
  3255. for (int i = 0; i < texts.count(); i++)
  3256. {
  3257. this->getSubtitleSize(fm, texts[i], &size);
  3258. maxWidth = max(maxWidth, size.width());
  3259. forcedLeft |= this->isLeftAlignLine(texts[i]);
  3260. }
  3261. }
  3262. for (int j = 0; j < texts.count(); j++, currentCount++)
  3263. this->drawSubtitleLine(shader, currentCount, texts.count() * sp->num_rects, texts[j], QPoint(0, 0), Font::Context(Qt::white), texts[j], maxWidth, forcedLeft, vp);
  3264. }
  3265. else if (rect->type == SUBTITLE_ASS)
  3266. {
  3267. isASS = true;
  3268. }
  3269. }
  3270. return isASS;
  3271. }
  3272. void MediaPresenter::drawASS(const ASS_RENDER_METHOD method, const int32_t time, ShaderCompositer &shader, const VideoPicture *vp)
  3273. {
  3274. ASS_Image *images = nullptr;
  3275. bool changed = false;
  3276. bool success = false;
  3277. switch (method)
  3278. {
  3279. case ASSRM_FILE:
  3280. success = this->m_assParser.get(time, &images, &changed);
  3281. break;
  3282. case ASSRM_SINGLE:
  3283. success = this->m_assParser.getSingle(time, &images, &changed);
  3284. break;
  3285. case ASSRM_AV:
  3286. success = this->m_avParser.getASSImage(time, &images, &changed);
  3287. break;
  3288. }
  3289. if (success)
  3290. this->renderASS(images, changed, shader, vp);
  3291. }
  3292. void MediaPresenter::renderASS(ASS_Image *ass, bool blend, ShaderCompositer &shader, const VideoPicture *vp)
  3293. {
  3294. if (ass->w <= 0 || ass->h <= 0)
  3295. return;
  3296. Surface *frame = this->m_state->video.assFrame;
  3297. if (frame == nullptr)
  3298. return;
  3299. if (blend)
  3300. {
  3301. memset(frame->pixels[0], 0, frame->height * frame->lineSize[0]);
  3302. while (ass)
  3303. {
  3304. this->blendASS(ass, frame);
  3305. ass = ass->next;
  3306. }
  3307. }
  3308. QRect rect;
  3309. QPoint posOffset;
  3310. this->getPictureRect(&rect);
  3311. this->getSubtitlePositionOffsetByFrame(QSize(frame->width, frame->height), &posOffset);
  3312. rect.translate(-posOffset);
  3313. bool enabledBlend = GL_PREFIX glIsEnabled(GL_BLEND);
  3314. GL_PREFIX glEnable(GL_BLEND);
  3315. if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_NONE)
  3316. {
  3317. QColor color = Qt::white;
  3318. color.setAlphaF(this->m_subtitleOpaque);
  3319. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3320. shader.startSimple();
  3321. this->renderASSSub(shader, ShaderCompositer::ST_SIMPLE, rect, *frame, blend, color, true);
  3322. shader.endSimple();
  3323. }
  3324. else
  3325. {
  3326. QColor firstColorValue = Qt::white;
  3327. QColor secondColorValue = Qt::white;
  3328. QRect firstRect = rect;
  3329. QRect secondRect = rect;
  3330. firstColorValue.setAlphaF(this->m_subtitleOpaque);
  3331. secondColorValue.setAlphaF(this->m_subtitleOpaque);
  3332. switch (this->m_3dSubtitleMethod)
  3333. {
  3334. case AnyVODEnums::S3M_TOP_BOTTOM:
  3335. {
  3336. firstRect.setHeight(firstRect.height() / 2);
  3337. secondRect.setY(secondRect.y() + firstRect.height());
  3338. break;
  3339. }
  3340. case AnyVODEnums::S3M_LEFT_RIGHT:
  3341. {
  3342. firstRect.setWidth(firstRect.width() / 2);
  3343. secondRect.setX(secondRect.x() + firstRect.width());
  3344. break;
  3345. }
  3346. default:
  3347. {
  3348. break;
  3349. }
  3350. }
  3351. firstRect.translate(-this->m_3dSubtitleOffset * this->m_devicePixelRatio);
  3352. secondRect.translate(this->m_3dSubtitleOffset * this->m_devicePixelRatio);
  3353. switch (this->m_3dSubtitleMethod)
  3354. {
  3355. case AnyVODEnums::S3M_PAGE_FLIP:
  3356. {
  3357. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3358. shader.startSimple();
  3359. if (vp->leftOrTop3D)
  3360. this->renderASSSub(shader, ShaderCompositer::ST_SIMPLE, firstRect, *frame, blend, firstColorValue, true);
  3361. else
  3362. this->renderASSSub(shader, ShaderCompositer::ST_SIMPLE, secondRect, *frame, blend, firstColorValue, true);
  3363. shader.endSimple();
  3364. break;
  3365. }
  3366. case AnyVODEnums::S3M_INTERLACED:
  3367. {
  3368. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3369. shader.startSubtitleInterlace(shader.getLeftOrTop());
  3370. this->renderASSSub(shader, ShaderCompositer::ST_SUBTITLE_INTERLACE, firstRect, *frame, blend, firstColorValue, true);
  3371. shader.endSubtitleInterlace();
  3372. shader.startSubtitleInterlace(!shader.getLeftOrTop());
  3373. this->renderASSSub(shader, ShaderCompositer::ST_SUBTITLE_INTERLACE, secondRect, *frame, blend, firstColorValue, false);
  3374. shader.endSubtitleInterlace();
  3375. break;
  3376. }
  3377. case AnyVODEnums::S3M_CHECKER_BOARD:
  3378. {
  3379. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3380. shader.startSubtitleCheckerBoard(shader.getLeftOrTop());
  3381. this->renderASSSub(shader, ShaderCompositer::ST_SUBTITLE_CHECKER_BOARD, firstRect, *frame, blend, firstColorValue, true);
  3382. shader.endSubtitleCheckerBoard();
  3383. shader.startSubtitleCheckerBoard(!shader.getLeftOrTop());
  3384. this->renderASSSub(shader, ShaderCompositer::ST_SUBTITLE_CHECKER_BOARD, secondRect, *frame, blend, firstColorValue, false);
  3385. shader.endSubtitleCheckerBoard();
  3386. break;
  3387. }
  3388. case AnyVODEnums::S3M_ANAGLYPH:
  3389. {
  3390. GL_PREFIX glBlendFunc(GL_SRC_COLOR, GL_ONE_MINUS_SRC_COLOR);
  3391. shader.startSubtitleAnaglyph(shader.getLeftOrTop());
  3392. this->renderASSSub(shader, ShaderCompositer::ST_SUBTITLE_ANAGLYPH, firstRect, *frame, blend, firstColorValue, true);
  3393. shader.endSubtitleAnaglyph();
  3394. shader.startSubtitleAnaglyph(!shader.getLeftOrTop());
  3395. this->renderASSSub(shader, ShaderCompositer::ST_SUBTITLE_ANAGLYPH, secondRect, *frame, blend, secondColorValue, false);
  3396. shader.endSubtitleAnaglyph();
  3397. break;
  3398. }
  3399. default:
  3400. {
  3401. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  3402. shader.startSimple();
  3403. this->renderASSSub(shader, ShaderCompositer::ST_SIMPLE, firstRect, *frame, blend, firstColorValue, true);
  3404. this->renderASSSub(shader, ShaderCompositer::ST_SIMPLE, secondRect, *frame, blend, secondColorValue, false);
  3405. shader.endSimple();
  3406. break;
  3407. }
  3408. }
  3409. }
  3410. if (!enabledBlend)
  3411. GL_PREFIX glDisable(GL_BLEND);
  3412. }
  3413. void MediaPresenter::renderASSSub(ShaderCompositer &shader, ShaderCompositer::ShaderType type, const QRect &rect,
  3414. const Surface &frame, bool blend, const QColor &color, bool updateTexture) const
  3415. {
  3416. QRectF renderRect(0.0, 0.0, 1.0, 1.0);
  3417. if (blend)
  3418. {
  3419. this->renderTexture(shader, type, rect, this->m_texInfo[TEX_ASS_SUBTITLE], frame, renderRect, color, QMatrix4x4(), updateTexture);
  3420. }
  3421. else
  3422. {
  3423. Surface tmp = frame;
  3424. tmp.pixels[0] = nullptr;
  3425. this->renderTexture(shader, type, rect, this->m_texInfo[TEX_ASS_SUBTITLE], tmp, renderRect, color, QMatrix4x4(), updateTexture);
  3426. }
  3427. }
  3428. void MediaPresenter::blendASS(ASS_Image *single, Surface *frame) const
  3429. {
  3430. int pixelSize = GET_PIXEL_SIZE(true);
  3431. uint8_t r = GET_RED_VALUE(single->color);
  3432. uint8_t g = GET_GREEN_VALUE(single->color);
  3433. uint8_t b = GET_BLUE_VALUE(single->color);
  3434. uint8_t a = 255 - GET_ALPHA_VALUE(single->color);
  3435. uint8_t *src = single->bitmap;
  3436. uint8_t *dst = frame->pixels[0] + single->dst_y * frame->lineSize[0] + single->dst_x * pixelSize;
  3437. for (int y = 0; y < single->h; ++y)
  3438. {
  3439. for (int x = 0; x < single->w; ++x)
  3440. {
  3441. uint8_t k = src[x] * a / 255;
  3442. uint8_t revOp = 255 - k;
  3443. int xOffset = x * pixelSize;
  3444. if (pixelSize == 4)
  3445. {
  3446. int aOffset = xOffset + 3;
  3447. uint8_t alphaDst = dst[aOffset];
  3448. if (k == 0 && alphaDst == 0)
  3449. continue;
  3450. dst[aOffset] = (k * 255 + revOp * alphaDst) / 255;
  3451. }
  3452. int rOffset = xOffset + 0;
  3453. int gOffset = xOffset + 1;
  3454. int bOffset = xOffset + 2;
  3455. dst[rOffset] = (k * r + revOp * dst[rOffset]) / 255;
  3456. dst[gOffset] = (k * g + revOp * dst[gOffset]) / 255;
  3457. dst[bOffset] = (k * b + revOp * dst[bOffset]) / 255;
  3458. }
  3459. src += single->stride;
  3460. dst += frame->lineSize[0];
  3461. }
  3462. }
  3463. Surface* MediaPresenter::createSurface(int width, int height, AVPixelFormat format) const
  3464. {
  3465. Surface *surface;
  3466. int planes = av_pix_fmt_count_planes(format);
  3467. if (planes <= 0)
  3468. return nullptr;
  3469. surface = new Surface;
  3470. if (!surface)
  3471. return nullptr;
  3472. for (int i = 0; i < planes; i++)
  3473. {
  3474. int lineSize = av_image_get_linesize(format, width, i);
  3475. if (lineSize > 0)
  3476. {
  3477. surface->lineSize[i] = lineSize;
  3478. surface->pixels[i] = (uint8_t*)av_mallocz(surface->lineSize[i] * height);
  3479. }
  3480. }
  3481. surface->height = height;
  3482. surface->width = width;
  3483. surface->format = format;
  3484. surface->plane = planes;
  3485. return surface;
  3486. }
  3487. void MediaPresenter::deleteSurface(Surface *surface) const
  3488. {
  3489. for (int i = 0; i < surface->plane; i++)
  3490. {
  3491. if (surface->pixels[i])
  3492. av_freep(&surface->pixels[i]);
  3493. }
  3494. delete surface;
  3495. }
  3496. bool MediaPresenter::isUseGPUConvert(AVPixelFormat format) const
  3497. {
  3498. bool use = this->m_useGPUConvert;
  3499. #ifdef Q_OS_WIN
  3500. use &= GL_PREFIX glActiveTextureARB != nullptr;
  3501. #endif
  3502. return use && this->isYUV(format);
  3503. }
  3504. bool MediaPresenter::isYUV(AVPixelFormat format) const
  3505. {
  3506. switch (format)
  3507. {
  3508. case AV_PIX_FMT_YUV420P:
  3509. case AV_PIX_FMT_YUYV422:
  3510. case AV_PIX_FMT_UYVY422:
  3511. case AV_PIX_FMT_YVYU422:
  3512. case AV_PIX_FMT_NV12:
  3513. case AV_PIX_FMT_NV21:
  3514. return true;
  3515. default:
  3516. return false;
  3517. }
  3518. }
  3519. AVPixelFormat MediaPresenter::getCompatibleFormat(AVPixelFormat format) const
  3520. {
  3521. if (!this->m_useLowQualityMode)
  3522. return DEFAULT_PIX_FORMAT;
  3523. #if defined Q_OS_MOBILE
  3524. switch (format)
  3525. {
  3526. case AV_PIX_FMT_YUV420P9:
  3527. case AV_PIX_FMT_YUV420P10:
  3528. case AV_PIX_FMT_YUV420P12:
  3529. case AV_PIX_FMT_YUV420P14:
  3530. case AV_PIX_FMT_YUV420P16:
  3531. return AV_PIX_FMT_YUV420P;
  3532. default:
  3533. return DEFAULT_PIX_FORMAT;
  3534. }
  3535. #else
  3536. (void)format;
  3537. return DEFAULT_PIX_FORMAT;
  3538. #endif
  3539. }
  3540. uint8_t MediaPresenter::getLuminanceAvg(uint8_t *data, int size, AVPixelFormat format) const
  3541. {
  3542. if (Utils::is8bitFormat(format) && size)
  3543. {
  3544. uint64_t avg = 0;
  3545. int step = 4;
  3546. for (int i = 0; i < size; i += step)
  3547. avg += data[i];
  3548. return (uint8_t)(avg / (size / step));
  3549. }
  3550. return 127;
  3551. }
  3552. void MediaPresenter::resizePicture(const VideoPicture *src, VideoPicture *dest) const
  3553. {
  3554. MediaState *ms = this->m_state;
  3555. int width = 0;
  3556. int height = 0;
  3557. this->getFrameSize(&width, &height);
  3558. if (src->surface)
  3559. {
  3560. if (width != dest->width || height != dest->height)
  3561. {
  3562. if (dest->surface)
  3563. this->deleteSurface(dest->surface);
  3564. *dest = *src;
  3565. dest->surface = this->createSurface(width, height, src->surface->format);
  3566. dest->height = height;
  3567. dest->width = width;
  3568. }
  3569. dest->lumAvg = src->lumAvg;
  3570. dest->pts = src->pts;
  3571. dest->time = src->time;
  3572. if (dest->surface)
  3573. {
  3574. ms->imageRGBConverter = sws_getCachedContext(
  3575. ms->imageRGBConverter,
  3576. src->width, src->height, src->surface->format,
  3577. width, height, dest->surface->format,
  3578. SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
  3579. AVFrame pict;
  3580. uint8_t *data[PICTURE_MAX_PLANE];
  3581. int lineSize[PICTURE_MAX_PLANE];
  3582. memset(&pict, 0, sizeof(pict));
  3583. memcpy(data, src->surface->pixels, sizeof(data));
  3584. memcpy(lineSize, src->surface->lineSize, sizeof(lineSize));
  3585. memcpy(pict.data, dest->surface->pixels, sizeof(data));
  3586. memcpy(pict.linesize, dest->surface->lineSize, sizeof(lineSize));
  3587. if (ms->imageRGBConverter)
  3588. sws_scale(ms->imageRGBConverter, data, lineSize, 0, src->height, pict.data, pict.linesize);
  3589. }
  3590. }
  3591. }
  3592. void MediaPresenter::copyPicture(const VideoPicture *src, VideoPicture *dest) const
  3593. {
  3594. int width = 0;
  3595. int height = 0;
  3596. this->getFrameSize(&width, &height);
  3597. if (src->surface)
  3598. {
  3599. if (width != dest->width || height != dest->height)
  3600. {
  3601. if (dest->surface)
  3602. this->deleteSurface(dest->surface);
  3603. *dest = *src;
  3604. dest->surface = this->createSurface(width, height, src->surface->format);
  3605. dest->height = height;
  3606. dest->width = width;
  3607. }
  3608. dest->lumAvg = src->lumAvg;
  3609. dest->pts = src->pts;
  3610. dest->time = src->time;
  3611. if (dest->surface)
  3612. {
  3613. AVFrame picDest;
  3614. AVFrame picSrc;
  3615. memcpy(picDest.data, dest->surface->pixels, sizeof(dest->surface->pixels));
  3616. memcpy(picDest.linesize, dest->surface->lineSize, sizeof(dest->surface->lineSize));
  3617. memcpy(picSrc.data, src->surface->pixels, sizeof(src->surface->pixels));
  3618. memcpy(picSrc.linesize, src->surface->lineSize, sizeof(src->surface->lineSize));
  3619. av_image_copy(picDest.data, picDest.linesize, (const uint8_t**)picSrc.data, picSrc.linesize, dest->surface->format, width, height);
  3620. }
  3621. }
  3622. }
  3623. bool MediaPresenter::getFrameSize(int *width, int *height) const
  3624. {
  3625. MediaState *ms = this->m_state;
  3626. if (ms && this->isEnabledVideo())
  3627. {
  3628. AVCodecContext *codec = ms->video.stream.ctx;
  3629. if (codec)
  3630. {
  3631. *width = codec->width;
  3632. *height = codec->height;
  3633. return true;
  3634. }
  3635. }
  3636. return false;
  3637. }
  3638. void MediaPresenter::getSubtitlePositionOffset(QPoint *ret) const
  3639. {
  3640. QRect desktopRect;
  3641. #if defined Q_OS_MOBILE
  3642. desktopRect = QRect(0, 0, this->m_width, this->m_height);
  3643. #else
  3644. QDesktopWidget *desktop = QApplication::desktop();
  3645. desktopRect = desktop->screenGeometry(desktop->primaryScreen());
  3646. #endif
  3647. ret->setX(Utils::mapTo(desktopRect.width(), this->m_width, this->m_horiPosition * MediaPresenter::DEFAULT_HORI_SUBTITLE_RATIO * this->m_devicePixelRatio));
  3648. ret->setY(Utils::mapTo(desktopRect.height(), this->m_height, this->m_vertPosition * MediaPresenter::DEFAULT_VIRT_SUBTITLE_RATIO * this->m_devicePixelRatio));
  3649. }
  3650. void MediaPresenter::getSubtitlePositionOffsetByFrame(const QSize &org, QPoint *ret) const
  3651. {
  3652. const FrameSize &frame = this->m_state->frameSize;
  3653. ret->setX(Utils::mapTo(org.width(), frame.width, this->m_horiPosition * MediaPresenter::DEFAULT_HORI_SUBTITLE_RATIO * this->m_devicePixelRatio));
  3654. ret->setY(Utils::mapTo(org.height(), frame.height, this->m_vertPosition * MediaPresenter::DEFAULT_VIRT_SUBTITLE_RATIO * this->m_devicePixelRatio));
  3655. }
  3656. void MediaPresenter::renderTexture2D(ShaderCompositer &shader, ShaderCompositer::ShaderType type, const QRect &rect,
  3657. AVPixelFormat format, const QMatrix4x4 &modelView, const QRectF &texCoord, const QColor &color) const
  3658. {
  3659. QVector4D vColor(color.redF(), color.greenF(), color.blueF(), color.alphaF());
  3660. QVector3D vertices[] =
  3661. {
  3662. QVector3D(rect.left(), rect.bottom(), 0.0f),
  3663. QVector3D(rect.right(), rect.bottom(), 0.0f),
  3664. QVector3D(rect.left(), rect.top(), 0.0f),
  3665. QVector3D(rect.right(), rect.top(), 0.0f)
  3666. };
  3667. QVector2D texCoords[] =
  3668. {
  3669. QVector2D(texCoord.left(), texCoord.bottom()),
  3670. QVector2D(texCoord.right(), texCoord.bottom()),
  3671. QVector2D(texCoord.left(), texCoord.top()),
  3672. QVector2D(texCoord.right(), texCoord.top())
  3673. };
  3674. shader.setRenderData(type, this->m_ortho, modelView, vertices, texCoords, vColor, format);
  3675. GL_PREFIX glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  3676. }
  3677. void MediaPresenter::renderTexture3D(ShaderCompositer &shader, ShaderCompositer::ShaderType type, const QRect &rect, AVPixelFormat format, const QMatrix4x4 &modelView, const QColor &color, bool leftOrTop) const
  3678. {
  3679. QVector4D vColor(color.redF(), color.greenF(), color.blueF(), color.alphaF());
  3680. const QVector2D *texCoords;
  3681. switch (this->m_vrInputSource)
  3682. {
  3683. case AnyVODEnums::VRI_LEFT_RIGHT_LEFT_PRIOR:
  3684. texCoords = Sphere::getSBSTexCoords(leftOrTop);
  3685. break;
  3686. case AnyVODEnums::VRI_LEFT_RIGHT_RIGHT_PRIOR:
  3687. texCoords = Sphere::getSBSTexCoords(!leftOrTop);
  3688. break;
  3689. case AnyVODEnums::VRI_TOP_BOTTOM_TOP_PRIOR:
  3690. texCoords = Sphere::getTABTexCoords(leftOrTop);
  3691. break;
  3692. case AnyVODEnums::VRI_TOP_BOTTOM_BOTTOM_PRIOR:
  3693. texCoords = Sphere::getTABTexCoords(!leftOrTop);
  3694. break;
  3695. default:
  3696. texCoords = Sphere::getTexCoords();
  3697. break;
  3698. }
  3699. QMatrix4x4 perspective;
  3700. perspective.setToIdentity();
  3701. if (this->m_vrInputSource == AnyVODEnums::VRI_NONE)
  3702. {
  3703. perspective.perspective(PERSPECTIVE_FOV_Y / 2.0f, (float)this->m_width / this->m_height, PERSPECTIVE_ZNEAR, PERSPECTIVE_ZFAR);
  3704. shader.setRenderData(type, perspective, modelView, Sphere::getVertices(), texCoords, vColor, format);
  3705. GL_PREFIX glDrawElements(GL_TRIANGLES, Sphere::getIndexCount(), GL_UNSIGNED_SHORT, Sphere::getIndices());
  3706. }
  3707. else
  3708. {
  3709. GLint viewport[4];
  3710. perspective.perspective(PERSPECTIVE_FOV_Y, (float)rect.width() / this->m_height, PERSPECTIVE_ZNEAR, PERSPECTIVE_ZFAR);
  3711. GL_PREFIX glGetIntegerv(GL_VIEWPORT, viewport);
  3712. GL_PREFIX glViewport(rect.x(), 0, rect.width(), this->m_height);
  3713. shader.setRenderData(type, perspective, modelView, Sphere::getVertices(), texCoords, vColor, format);
  3714. GL_PREFIX glDrawElements(GL_TRIANGLES, Sphere::getIndexCount(), GL_UNSIGNED_SHORT, Sphere::getIndices());
  3715. GL_PREFIX glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
  3716. }
  3717. }
  3718. void MediaPresenter::renderTexture(ShaderCompositer &shader, ShaderCompositer::ShaderType type,
  3719. const QRect &rect, TextureInfo &texInfo, const Surface &surface,
  3720. const QRectF &renderRect, const QColor &color, const QMatrix4x4 &modelView,
  3721. bool updateTexture) const
  3722. {
  3723. if (this->isUseGPUConvert(surface.format) && type == ShaderCompositer::ST_SCREEN)
  3724. {
  3725. for (int i = 0; i < surface.plane; i++)
  3726. {
  3727. const TextureInfo &info = this->m_texInfo[TEX_YUV_0 + i];
  3728. #ifdef Q_OS_WIN
  3729. GL_PREFIX glActiveTextureARB(GL_TEXTURE0 + i);
  3730. #else
  3731. GL_PREFIX glActiveTexture(GL_TEXTURE0 + i);
  3732. #endif
  3733. GL_PREFIX glBindTexture(GL_TEXTURE_2D, info.id[info.index]);
  3734. shader.setTextureSampler(type, i, surface.format);
  3735. }
  3736. }
  3737. else
  3738. {
  3739. GL_PREFIX glBindTexture(GL_TEXTURE_2D, texInfo.id[texInfo.index]);
  3740. }
  3741. if (surface.pixels[0])
  3742. {
  3743. bool uploadable = true;
  3744. GLvoid *pixels[PICTURE_MAX_PLANE] = {nullptr, };
  3745. bool usePBO = this->isUsingPBO(surface.format);
  3746. if (updateTexture)
  3747. {
  3748. if (usePBO)
  3749. {
  3750. #if !defined Q_OS_MOBILE
  3751. size_t bufSize = surface.lineSize[0] * surface.height;
  3752. GLubyte *buf;
  3753. GL_PREFIX glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, texInfo.idPBO[texInfo.indexPBO]);
  3754. GL_PREFIX glBufferDataARB(GL_PIXEL_UNPACK_BUFFER_ARB, bufSize, nullptr, GL_STREAM_DRAW_ARB);
  3755. buf = (GLubyte*)GL_PREFIX glMapBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, GL_WRITE_ONLY_ARB);
  3756. if (buf)
  3757. {
  3758. memcpy(buf, surface.pixels[0], bufSize);
  3759. GL_PREFIX glUnmapBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB);
  3760. }
  3761. else
  3762. {
  3763. uploadable = false;
  3764. }
  3765. texInfo.indexPBO = (texInfo.indexPBO + 1) % MAX_PBO_COUNT;
  3766. pixels[0] = (GLvoid*)0;
  3767. #endif
  3768. }
  3769. else
  3770. {
  3771. for (int i = 0; i < surface.plane; i++)
  3772. pixels[i] = surface.pixels[i];
  3773. }
  3774. }
  3775. else
  3776. {
  3777. uploadable = false;
  3778. }
  3779. if (uploadable)
  3780. {
  3781. if (surface.plane > 0 && this->isUseGPUConvert(surface.format) && type == ShaderCompositer::ST_SCREEN)
  3782. {
  3783. GLsizei widths[surface.plane];
  3784. GLsizei heights[surface.plane];
  3785. GLenum formats[surface.plane];
  3786. GLint aligns[surface.plane];
  3787. switch (surface.format)
  3788. {
  3789. case AV_PIX_FMT_YUV420P:
  3790. {
  3791. widths[0] = surface.width;
  3792. widths[1] = surface.width / 2;
  3793. widths[2] = surface.width / 2;
  3794. heights[0] = surface.height;
  3795. heights[1] = surface.height / 2;
  3796. heights[2] = surface.height / 2;
  3797. formats[0] = GL_LUMINANCE;
  3798. formats[1] = GL_LUMINANCE;
  3799. formats[2] = GL_LUMINANCE;
  3800. aligns[0] = 1;
  3801. aligns[1] = 1;
  3802. aligns[2] = 1;
  3803. break;
  3804. }
  3805. case AV_PIX_FMT_NV12:
  3806. case AV_PIX_FMT_NV21:
  3807. {
  3808. widths[0] = surface.width;
  3809. widths[1] = surface.width;
  3810. heights[0] = surface.height;
  3811. heights[1] = surface.height / 2;
  3812. formats[0] = GL_LUMINANCE;
  3813. formats[1] = GL_LUMINANCE_ALPHA;
  3814. aligns[0] = 1;
  3815. aligns[1] = 2;
  3816. break;
  3817. }
  3818. case AV_PIX_FMT_YUYV422:
  3819. case AV_PIX_FMT_UYVY422:
  3820. case AV_PIX_FMT_YVYU422:
  3821. {
  3822. widths[0] = surface.width / 2;
  3823. heights[0] = surface.height;
  3824. formats[0] = GL_RGBA;
  3825. aligns[0] = 4;
  3826. break;
  3827. }
  3828. default:
  3829. {
  3830. break;
  3831. }
  3832. }
  3833. for (int i = 0; i < surface.plane; i++)
  3834. {
  3835. TextureInfo &info = this->m_texInfo[TEX_YUV_0 + i];
  3836. GL_PREFIX glPixelStorei(GL_UNPACK_ALIGNMENT, aligns[i]);
  3837. GL_PREFIX glBindTexture(GL_TEXTURE_2D, info.id[info.index]);
  3838. if (info.init[info.index])
  3839. {
  3840. GL_PREFIX glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, widths[i], heights[i], formats[i], GL_UNSIGNED_BYTE, pixels[i]);
  3841. }
  3842. else
  3843. {
  3844. GL_PREFIX glTexImage2D(GL_TEXTURE_2D, 0, formats[i], widths[i], heights[i], 0, formats[i], GL_UNSIGNED_BYTE, pixels[i]);
  3845. info.init[info.index] = true;
  3846. }
  3847. }
  3848. }
  3849. else
  3850. {
  3851. GL_PREFIX glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
  3852. if (texInfo.init[texInfo.index])
  3853. {
  3854. GL_PREFIX glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, surface.width, surface.height, GL_RGBA, GL_UNSIGNED_BYTE, pixels[0]);
  3855. }
  3856. else
  3857. {
  3858. GL_PREFIX glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, surface.width, surface.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels[0]);
  3859. texInfo.init[texInfo.index] = true;
  3860. }
  3861. }
  3862. }
  3863. if (updateTexture)
  3864. {
  3865. #if !defined Q_OS_MOBILE
  3866. if (usePBO)
  3867. GL_PREFIX glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, 0);
  3868. #endif
  3869. }
  3870. }
  3871. if (this->m_distortionAdjustMode != AnyVODEnums::DAM_NONE)
  3872. {
  3873. float ratio = this->m_width / 2560.0f;
  3874. int verticalGap = 50 * ratio;
  3875. int horizentaGap = 50 * ratio;
  3876. int verticalCount = this->m_width / verticalGap + 1;
  3877. int horizentalCount = this->m_height / horizentaGap + 1;
  3878. bool enabledBlend = GL_PREFIX glIsEnabled(GL_BLEND);
  3879. GLfloat lineWidth = 0.0f;
  3880. GL_PREFIX glGetFloatv(GL_LINE_WIDTH, &lineWidth);
  3881. GL_PREFIX glDisable(GL_BLEND);
  3882. GL_PREFIX glLineWidth(2.0f);
  3883. QVector4D vColor(1.0f, 1.0f, 1.0f, 1.0f);
  3884. QVector<QVector3D> vertices;
  3885. for (int i = 0; i < verticalCount; i++)
  3886. {
  3887. vertices.append(QVector3D(i * verticalGap, 0, 0));
  3888. vertices.append(QVector3D(i * verticalGap, this->m_height, 0));
  3889. }
  3890. for (int i = 0; i < horizentalCount; i++)
  3891. {
  3892. vertices.append(QVector3D(0, i * horizentaGap, 0));
  3893. vertices.append(QVector3D(this->m_width, i * horizentaGap, 0));
  3894. }
  3895. shader.startLine();
  3896. shader.setRenderData(ShaderCompositer::ST_LINE, this->m_ortho, QMatrix4x4(), vertices.data(), nullptr, vColor, AV_PIX_FMT_NONE);
  3897. GL_PREFIX glDrawArrays(GL_LINES, 0, vertices.size());
  3898. shader.endLine();
  3899. if (enabledBlend)
  3900. GL_PREFIX glEnable(GL_BLEND);
  3901. GL_PREFIX glLineWidth(lineWidth);
  3902. }
  3903. else
  3904. {
  3905. if (shader.is360Degree())
  3906. this->renderTexture3D(shader, type, rect, surface.format, modelView, color, updateTexture);
  3907. else
  3908. this->renderTexture2D(shader, type, rect, surface.format, modelView, renderRect, color);
  3909. }
  3910. if (this->isUseGPUConvert(surface.format) && type == ShaderCompositer::ST_SCREEN)
  3911. {
  3912. const MediaState *ms = this->m_state;
  3913. const Pause &pause = ms->pause;
  3914. const Seek &seek = ms->seek;
  3915. bool incIndex = updateTexture && !pause.pause && !seek.requestPauseOnRender && surface.pixels[0];
  3916. for (int i = surface.plane - 1; i >= 0 ; i--)
  3917. {
  3918. #ifdef Q_OS_WIN
  3919. GL_PREFIX glActiveTextureARB(GL_TEXTURE0 + i);
  3920. #else
  3921. GL_PREFIX glActiveTexture(GL_TEXTURE0 + i);
  3922. #endif
  3923. GL_PREFIX glBindTexture(GL_TEXTURE_2D, 0);
  3924. TextureInfo &info = this->m_texInfo[TEX_YUV_0 + i];
  3925. if (incIndex)
  3926. info.index = (info.index + 1) % info.textureCount;
  3927. }
  3928. }
  3929. else
  3930. {
  3931. GL_PREFIX glBindTexture(GL_TEXTURE_2D, 0);
  3932. }
  3933. }
  3934. void MediaPresenter::clearTexture(const QRect &rect, const TextureInfo &texInfo) const
  3935. {
  3936. QImage clear(rect.width(), rect.height(), QImage::Format_ARGB32);
  3937. clear.fill(Qt::transparent);
  3938. for (unsigned int i = 0; i < texInfo.textureCount; i++)
  3939. {
  3940. GL_PREFIX glBindTexture(GL_TEXTURE_2D, texInfo.id[texInfo.index]);
  3941. GL_PREFIX glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
  3942. GL_PREFIX glTexSubImage2D(GL_TEXTURE_2D, 0, rect.x(), rect.y(), rect.width(), rect.height(), GL_RGBA, GL_UNSIGNED_BYTE, clear.constBits());
  3943. }
  3944. GL_PREFIX glBindTexture(GL_TEXTURE_2D, 0);
  3945. }
  3946. bool MediaPresenter::isSideBySide() const
  3947. {
  3948. switch (this->m_3dMethod)
  3949. {
  3950. case AnyVODEnums::V3M_HALF_LEFT:
  3951. case AnyVODEnums::V3M_HALF_RIGHT:
  3952. case AnyVODEnums::V3M_FULL_LEFT_RIGHT:
  3953. case AnyVODEnums::V3M_PAGE_FLIP_LEFT_RIGHT_LEFT_PRIOR:
  3954. case AnyVODEnums::V3M_PAGE_FLIP_LEFT_RIGHT_RIGHT_PRIOR:
  3955. case AnyVODEnums::V3M_ROW_LEFT_RIGHT_LEFT_PRIOR:
  3956. case AnyVODEnums::V3M_ROW_LEFT_RIGHT_RIGHT_PRIOR:
  3957. case AnyVODEnums::V3M_COL_LEFT_RIGHT_LEFT_PRIOR:
  3958. case AnyVODEnums::V3M_COL_LEFT_RIGHT_RIGHT_PRIOR:
  3959. case AnyVODEnums::V3M_RED_CYAN_LEFT_RIGHT_LEFT_PRIOR:
  3960. case AnyVODEnums::V3M_RED_CYAN_LEFT_RIGHT_RIGHT_PRIOR:
  3961. case AnyVODEnums::V3M_GREEN_MAGENTA_LEFT_RIGHT_LEFT_PRIOR:
  3962. case AnyVODEnums::V3M_GREEN_MAGENTA_LEFT_RIGHT_RIGHT_PRIOR:
  3963. case AnyVODEnums::V3M_YELLOW_BLUE_LEFT_RIGHT_LEFT_PRIOR:
  3964. case AnyVODEnums::V3M_YELLOW_BLUE_LEFT_RIGHT_RIGHT_PRIOR:
  3965. case AnyVODEnums::V3M_RED_BLUE_LEFT_RIGHT_LEFT_PRIOR:
  3966. case AnyVODEnums::V3M_RED_BLUE_LEFT_RIGHT_RIGHT_PRIOR:
  3967. case AnyVODEnums::V3M_RED_GREEN_LEFT_RIGHT_LEFT_PRIOR:
  3968. case AnyVODEnums::V3M_RED_GREEN_LEFT_RIGHT_RIGHT_PRIOR:
  3969. case AnyVODEnums::V3M_CHECKER_BOARD_LEFT_RIGHT_LEFT_PRIOR:
  3970. case AnyVODEnums::V3M_CHECKER_BOARD_LEFT_RIGHT_RIGHT_PRIOR:
  3971. return true;
  3972. default:
  3973. return false;
  3974. }
  3975. }
  3976. bool MediaPresenter::get3DParameters(bool leftOrTop3D, const QSizeF &adjust, const QSizeF &surfaceSize, QRectF *renderRect, bool *sideBySide, bool *leftOrTop)
  3977. {
  3978. switch (this->m_3dMethod)
  3979. {
  3980. case AnyVODEnums::V3M_HALF_LEFT:
  3981. {
  3982. *renderRect = QRectF(0.0, 0.0, surfaceSize.width() / 2.0, surfaceSize.height());
  3983. break;
  3984. }
  3985. case AnyVODEnums::V3M_HALF_RIGHT:
  3986. {
  3987. *renderRect = QRectF(surfaceSize.width() / 2.0, 0.0, surfaceSize.width() / 2.0 + adjust.width(), surfaceSize.height());
  3988. break;
  3989. }
  3990. case AnyVODEnums::V3M_HALF_TOP:
  3991. {
  3992. *renderRect = QRectF(0.0, 0.0, surfaceSize.width(), surfaceSize.height() / 2.0);
  3993. break;
  3994. }
  3995. case AnyVODEnums::V3M_HALF_BOTTOM:
  3996. {
  3997. *renderRect = QRectF(0.0, surfaceSize.height() / 2.0, surfaceSize.width(), surfaceSize.height() / 2.0 + adjust.height());
  3998. break;
  3999. }
  4000. case AnyVODEnums::V3M_FULL_LEFT_RIGHT:
  4001. case AnyVODEnums::V3M_FULL_TOP_BOTTOM:
  4002. {
  4003. *renderRect = QRectF(0.0, 0.0, surfaceSize.width(), surfaceSize.height());
  4004. break;
  4005. }
  4006. case AnyVODEnums::V3M_PAGE_FLIP_LEFT_RIGHT_LEFT_PRIOR:
  4007. case AnyVODEnums::V3M_PAGE_FLIP_LEFT_RIGHT_RIGHT_PRIOR:
  4008. {
  4009. if (leftOrTop3D)
  4010. *renderRect = QRectF(0.0, 0.0, surfaceSize.width() / 2.0, surfaceSize.height());
  4011. else
  4012. *renderRect = QRectF(surfaceSize.width() / 2.0, 0.0, surfaceSize.width() / 2.0 + adjust.width(), surfaceSize.height());
  4013. break;
  4014. }
  4015. case AnyVODEnums::V3M_PAGE_FLIP_TOP_BOTTOM_TOP_PRIOR:
  4016. case AnyVODEnums::V3M_PAGE_FLIP_TOP_BOTTOM_BOTTOM_PRIOR:
  4017. {
  4018. if (leftOrTop3D)
  4019. *renderRect = QRectF(0.0, 0.0, surfaceSize.width(), surfaceSize.height() / 2.0);
  4020. else
  4021. *renderRect = QRectF(0.0, surfaceSize.height() / 2.0, surfaceSize.width(), surfaceSize.height() / 2.0 + adjust.height());
  4022. break;
  4023. }
  4024. case AnyVODEnums::V3M_ROW_LEFT_RIGHT_LEFT_PRIOR:
  4025. case AnyVODEnums::V3M_ROW_TOP_BOTTOM_TOP_PRIOR:
  4026. case AnyVODEnums::V3M_COL_LEFT_RIGHT_LEFT_PRIOR:
  4027. case AnyVODEnums::V3M_COL_TOP_BOTTOM_TOP_PRIOR:
  4028. case AnyVODEnums::V3M_RED_CYAN_LEFT_RIGHT_LEFT_PRIOR:
  4029. case AnyVODEnums::V3M_RED_CYAN_TOP_BOTTOM_TOP_PRIOR:
  4030. case AnyVODEnums::V3M_GREEN_MAGENTA_LEFT_RIGHT_LEFT_PRIOR:
  4031. case AnyVODEnums::V3M_GREEN_MAGENTA_TOP_BOTTOM_TOP_PRIOR:
  4032. case AnyVODEnums::V3M_YELLOW_BLUE_LEFT_RIGHT_LEFT_PRIOR:
  4033. case AnyVODEnums::V3M_YELLOW_BLUE_TOP_BOTTOM_TOP_PRIOR:
  4034. case AnyVODEnums::V3M_RED_BLUE_LEFT_RIGHT_LEFT_PRIOR:
  4035. case AnyVODEnums::V3M_RED_BLUE_TOP_BOTTOM_TOP_PRIOR:
  4036. case AnyVODEnums::V3M_RED_GREEN_LEFT_RIGHT_LEFT_PRIOR:
  4037. case AnyVODEnums::V3M_RED_GREEN_TOP_BOTTOM_TOP_PRIOR:
  4038. case AnyVODEnums::V3M_CHECKER_BOARD_LEFT_RIGHT_LEFT_PRIOR:
  4039. case AnyVODEnums::V3M_CHECKER_BOARD_TOP_BOTTOM_TOP_PRIOR:
  4040. {
  4041. *leftOrTop = true;
  4042. *renderRect = QRectF(0.0, 0.0, surfaceSize.width(), surfaceSize.height());
  4043. break;
  4044. }
  4045. case AnyVODEnums::V3M_ROW_LEFT_RIGHT_RIGHT_PRIOR:
  4046. case AnyVODEnums::V3M_ROW_TOP_BOTTOM_BOTTOM_PRIOR:
  4047. case AnyVODEnums::V3M_COL_LEFT_RIGHT_RIGHT_PRIOR:
  4048. case AnyVODEnums::V3M_COL_TOP_BOTTOM_BOTTOM_PRIOR:
  4049. case AnyVODEnums::V3M_RED_CYAN_LEFT_RIGHT_RIGHT_PRIOR:
  4050. case AnyVODEnums::V3M_RED_CYAN_TOP_BOTTOM_BOTTOM_PRIOR:
  4051. case AnyVODEnums::V3M_GREEN_MAGENTA_LEFT_RIGHT_RIGHT_PRIOR:
  4052. case AnyVODEnums::V3M_GREEN_MAGENTA_TOP_BOTTOM_BOTTOM_PRIOR:
  4053. case AnyVODEnums::V3M_YELLOW_BLUE_LEFT_RIGHT_RIGHT_PRIOR:
  4054. case AnyVODEnums::V3M_YELLOW_BLUE_TOP_BOTTOM_BOTTOM_PRIOR:
  4055. case AnyVODEnums::V3M_RED_BLUE_LEFT_RIGHT_RIGHT_PRIOR:
  4056. case AnyVODEnums::V3M_RED_BLUE_TOP_BOTTOM_BOTTOM_PRIOR:
  4057. case AnyVODEnums::V3M_RED_GREEN_LEFT_RIGHT_RIGHT_PRIOR:
  4058. case AnyVODEnums::V3M_RED_GREEN_TOP_BOTTOM_BOTTOM_PRIOR:
  4059. case AnyVODEnums::V3M_CHECKER_BOARD_LEFT_RIGHT_RIGHT_PRIOR:
  4060. case AnyVODEnums::V3M_CHECKER_BOARD_TOP_BOTTOM_BOTTOM_PRIOR:
  4061. {
  4062. *leftOrTop = false;
  4063. *renderRect = QRectF(0.0, 0.0, surfaceSize.width(), surfaceSize.height());
  4064. break;
  4065. }
  4066. default:
  4067. {
  4068. return false;
  4069. }
  4070. }
  4071. *sideBySide = this->isSideBySide();
  4072. return true;
  4073. }
  4074. void MediaPresenter::drawFonts(ShaderCompositer &shader, QRect rect, const VideoPicture *vp, bool distortionBound)
  4075. {
  4076. if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_ANAGLYPH)
  4077. {
  4078. if (distortionBound)
  4079. this->m_distortionFrameBuffer->release();
  4080. this->m_anaglyphFrameBuffer->bind();
  4081. GL_PREFIX glClear(GL_COLOR_BUFFER_BIT);
  4082. }
  4083. if (this->m_showSubtitle)
  4084. this->drawSubtitles(shader, vp);
  4085. if (this->m_showDetail)
  4086. this->drawDetail(shader, vp);
  4087. if (this->m_showingOptionDesc && !this->m_captureMode)
  4088. this->drawOptionDesc(shader, vp);
  4089. if (this->m_3dSubtitleMethod == AnyVODEnums::S3M_ANAGLYPH)
  4090. {
  4091. this->m_anaglyphFrameBuffer->release();
  4092. if (distortionBound)
  4093. this->m_distortionFrameBuffer->bind();
  4094. GLfloat width = this->m_width / (GLfloat)this->m_anaglyphFrameBuffer->width();
  4095. GLfloat height = this->m_height / (GLfloat)this->m_anaglyphFrameBuffer->height();
  4096. bool enabledBlend = GL_PREFIX glIsEnabled(GL_BLEND);
  4097. shader.startSimple();
  4098. GL_PREFIX glEnable(GL_BLEND);
  4099. GL_PREFIX glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
  4100. GL_PREFIX glBindTexture(GL_TEXTURE_2D, this->m_anaglyphFrameBuffer->texture());
  4101. QVector4D vColor(1.0f, 1.0f, 1.0f, 1.0f);
  4102. QVector3D vertices[] =
  4103. {
  4104. QVector3D(rect.left(), rect.bottom(), 0.0f),
  4105. QVector3D(rect.right(), rect.bottom(), 0.0f),
  4106. QVector3D(rect.left(), rect.top(), 0.0f),
  4107. QVector3D(rect.right(), rect.top(), 0.0f)
  4108. };
  4109. QVector2D texCoords[] =
  4110. {
  4111. QVector2D(0.0f, 0.0f),
  4112. QVector2D(width, 0.0f),
  4113. QVector2D(0.0f, height),
  4114. QVector2D(width, height)
  4115. };
  4116. shader.setRenderData(ShaderCompositer::ST_SIMPLE, this->m_ortho, QMatrix4x4(), vertices, texCoords, vColor, AV_PIX_FMT_NONE);
  4117. GL_PREFIX glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  4118. GL_PREFIX glBindTexture(GL_TEXTURE_2D, 0);
  4119. shader.endSimple();
  4120. if (!enabledBlend)
  4121. GL_PREFIX glDisable(GL_BLEND);
  4122. }
  4123. }
  4124. void MediaPresenter::displayVideo(ShaderCompositer &shader, const VideoPicture *vp)
  4125. {
  4126. Surface *surface = vp->surface;
  4127. QRect rect;
  4128. this->getPictureRect(&rect);
  4129. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE || shader.is360Degree())
  4130. {
  4131. this->m_distortionFrameBuffer->bind();
  4132. GL_PREFIX glClear(GL_COLOR_BUFFER_BIT);
  4133. }
  4134. if (surface)
  4135. {
  4136. QRectF renderRect;
  4137. TextureInfo &texInfo = this->m_texInfo[TEX_MOVIE_FRAME];
  4138. bool leftOrTop = false;
  4139. bool sideBySide = false;
  4140. QSizeF surfaceSize(surface->width, surface->height);
  4141. QSizeF screenSize(1.0, 1.0);
  4142. QSizeF adjust(1.0 / surface->width, 1.0 / surface->height);
  4143. if (!this->get3DParameters(vp->leftOrTop3D, adjust, screenSize, &renderRect, &sideBySide, &leftOrTop))
  4144. renderRect = QRectF(QPointF(0.0, 0.0), screenSize);
  4145. shader.setup3D(sideBySide, leftOrTop);
  4146. shader.startScreen(surfaceSize, screenSize, QSize(texInfo.maxSize, texInfo.maxSize), this->getMasterClock(), vp->lumAvg / 255.0, surface->format);
  4147. GLfloat degree;
  4148. this->m_cameraLock.lock();
  4149. degree = (float)(this->m_rotation + this->m_sensorRotation);
  4150. this->m_cameraLock.unlock();
  4151. switch (this->m_screenRotationDegree)
  4152. {
  4153. case AnyVODEnums::SRD_90:
  4154. degree += 90.0f;
  4155. break;
  4156. case AnyVODEnums::SRD_180:
  4157. degree += 180.0f;
  4158. break;
  4159. case AnyVODEnums::SRD_270:
  4160. degree += 270.0f;
  4161. break;
  4162. default:
  4163. break;
  4164. }
  4165. QMatrix4x4 modelViewLeft;
  4166. QMatrix4x4 modelViewRight;
  4167. if (shader.is360Degree())
  4168. {
  4169. if (this->m_vrInputSource == AnyVODEnums::VRI_VIRTUAL_3D)
  4170. {
  4171. modelViewLeft.setToIdentity();
  4172. modelViewLeft.rotate(-degree, 1.0f, 0.0f, 0.0f);
  4173. modelViewRight = modelViewLeft;
  4174. this->m_cameraLock.lock();
  4175. this->m_camera.moveLeftRight(-this->m_virtual3DDepth);
  4176. modelViewLeft = this->m_camera.getMatrix() * modelViewLeft;
  4177. this->m_camera.moveLeftRight(this->m_virtual3DDepth);
  4178. modelViewRight = this->m_camera.getMatrix() * modelViewRight;
  4179. this->m_camera.resetPosition();
  4180. this->m_cameraLock.unlock();
  4181. }
  4182. else
  4183. {
  4184. this->m_cameraLock.lock();
  4185. modelViewLeft = this->m_camera.getMatrix();
  4186. this->m_cameraLock.unlock();
  4187. modelViewLeft.rotate(-degree, 1.0f, 0.0f, 0.0f);
  4188. }
  4189. }
  4190. else
  4191. {
  4192. if (this->m_vrInputSource == AnyVODEnums::VRI_VIRTUAL_3D)
  4193. {
  4194. modelViewLeft.setToIdentity();
  4195. modelViewLeft.translate((this->m_width / 2.0f), (this->m_height / 2.0f), 0.0f);
  4196. modelViewLeft.rotate(degree, 0.0f, 0.0f, 1.0f);
  4197. modelViewLeft.translate(-(this->m_width / 2.0f), -(this->m_height / 2.0f), 0.0f);
  4198. modelViewRight = modelViewLeft;
  4199. modelViewLeft.translate(-80.0f, 0.0f);
  4200. modelViewRight.translate(80.0f, 0.0f);
  4201. }
  4202. else
  4203. {
  4204. modelViewLeft.setToIdentity();
  4205. modelViewLeft.translate((this->m_width / 2.0f), (this->m_height / 2.0f), 0.0f);
  4206. modelViewLeft.rotate(degree, 0.0f, 0.0f, 1.0f);
  4207. modelViewLeft.translate(-(this->m_width / 2.0f), -(this->m_height / 2.0f), 0.0f);
  4208. }
  4209. }
  4210. QColor color = Qt::white;
  4211. color.setAlphaF(1.0);
  4212. switch (this->m_vrInputSource)
  4213. {
  4214. case AnyVODEnums::VRI_LEFT_RIGHT_LEFT_PRIOR:
  4215. case AnyVODEnums::VRI_LEFT_RIGHT_RIGHT_PRIOR:
  4216. case AnyVODEnums::VRI_TOP_BOTTOM_TOP_PRIOR:
  4217. case AnyVODEnums::VRI_TOP_BOTTOM_BOTTOM_PRIOR:
  4218. {
  4219. QRect firstRect = rect;
  4220. QRect secondRect = rect;
  4221. firstRect.setWidth(rect.width() / 2);
  4222. secondRect.setLeft(rect.x() + (rect.width() - 1) / 2);
  4223. secondRect.setWidth(rect.width() / 2);
  4224. QRectF firstTexRect;
  4225. QRectF secondTexRect;
  4226. if (!shader.is360Degree())
  4227. {
  4228. firstTexRect = renderRect;
  4229. secondTexRect = renderRect;
  4230. if (this->m_vrInputSource == AnyVODEnums::VRI_LEFT_RIGHT_LEFT_PRIOR ||
  4231. this->m_vrInputSource == AnyVODEnums::VRI_LEFT_RIGHT_RIGHT_PRIOR)
  4232. {
  4233. firstTexRect.setWidth(renderRect.width() / 2.0);
  4234. secondTexRect.setLeft(renderRect.right() / 2.0);
  4235. }
  4236. else
  4237. {
  4238. firstTexRect.setHeight(renderRect.height() / 2.0);
  4239. secondTexRect.setTop(renderRect.bottom() / 2.0);
  4240. }
  4241. }
  4242. this->renderTexture(shader, ShaderCompositer::ST_SCREEN, firstRect, texInfo, *surface, firstTexRect, color, modelViewLeft, true);
  4243. this->renderTexture(shader, ShaderCompositer::ST_SCREEN, secondRect, texInfo, *surface, secondTexRect, color, modelViewLeft, false);
  4244. break;
  4245. }
  4246. case AnyVODEnums::VRI_COPY:
  4247. {
  4248. QRect firstRect = rect;
  4249. QRect secondRect = rect;
  4250. firstRect.setWidth(rect.width() / 2);
  4251. secondRect.setLeft(rect.x() + (rect.width() - 1) / 2);
  4252. secondRect.setWidth(rect.width() / 2);
  4253. this->renderTexture(shader, ShaderCompositer::ST_SCREEN, firstRect, texInfo, *surface, renderRect, color, modelViewLeft, true);
  4254. this->renderTexture(shader, ShaderCompositer::ST_SCREEN, secondRect, texInfo, *surface, renderRect, color, modelViewLeft, false);
  4255. break;
  4256. }
  4257. case AnyVODEnums::VRI_VIRTUAL_3D:
  4258. {
  4259. QRect firstRect = rect;
  4260. QRect secondRect = rect;
  4261. firstRect.setWidth(rect.width() / 2);
  4262. secondRect.setLeft(rect.x() + (rect.width() - 1) / 2);
  4263. secondRect.setWidth(rect.width() / 2);
  4264. this->renderTexture(shader, ShaderCompositer::ST_SCREEN, firstRect, texInfo, *surface, renderRect, color, modelViewLeft, true);
  4265. this->renderTexture(shader, ShaderCompositer::ST_SCREEN, secondRect, texInfo, *surface, renderRect, color, modelViewRight, false);
  4266. break;
  4267. }
  4268. default:
  4269. {
  4270. this->renderTexture(shader, ShaderCompositer::ST_SCREEN, rect, texInfo, *surface, renderRect, color, modelViewLeft, true);
  4271. break;
  4272. }
  4273. }
  4274. shader.endScreen(surface->format);
  4275. this->m_detail.videoFrameCount.fetchAndAddOrdered(1);
  4276. }
  4277. if (!shader.is360Degree())
  4278. this->drawFonts(shader, rect, vp, this->m_distortionFrameBuffer->isBound());
  4279. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE || shader.is360Degree())
  4280. {
  4281. this->m_distortionFrameBuffer->release();
  4282. QRect renderRect = QRect(QPoint(0, 0), QSize(this->m_width, this->m_height));
  4283. GLfloat width = this->m_width / (GLfloat)this->m_distortionFrameBuffer->width();
  4284. GLfloat height = this->m_height / (GLfloat)this->m_distortionFrameBuffer->height();
  4285. bool enabledBlend = GL_PREFIX glIsEnabled(GL_BLEND);
  4286. QVector4D vColor(1.0f, 1.0f, 1.0f, 1.0f);
  4287. GL_PREFIX glDisable(GL_BLEND);
  4288. GL_PREFIX glBindTexture(GL_TEXTURE_2D, this->m_distortionFrameBuffer->texture());
  4289. QVector3D vertices[] =
  4290. {
  4291. QVector3D(0.0f, renderRect.bottom(), 0.0f),
  4292. QVector3D(renderRect.right(), renderRect.bottom(), 0.0f),
  4293. QVector3D(renderRect.left(), renderRect.top(), 0.0f),
  4294. QVector3D(renderRect.right(), renderRect.top(), 0.0f)
  4295. };
  4296. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE)
  4297. {
  4298. QVector2D leftTexCoords[] =
  4299. {
  4300. QVector2D(0.0f, 0.0f),
  4301. QVector2D(width / 2.0f, 0.0f),
  4302. QVector2D(0.0f, height),
  4303. QVector2D(width / 2.0f, height)
  4304. };
  4305. QVector2D rightTexCoords[] =
  4306. {
  4307. QVector2D(0.0f + (width / 2.0f), 0.0f),
  4308. QVector2D(width, 0.0f),
  4309. QVector2D(0.0f + (width / 2.0f), height),
  4310. QVector2D(width, height)
  4311. };
  4312. shader.startSimple();
  4313. this->m_leftDistortionFrameBuffer->bind();
  4314. shader.setRenderData(ShaderCompositer::ST_SIMPLE, this->m_ortho, QMatrix4x4(), vertices, leftTexCoords, vColor, AV_PIX_FMT_NONE);
  4315. GL_PREFIX glClear(GL_COLOR_BUFFER_BIT);
  4316. GL_PREFIX glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  4317. this->m_leftDistortionFrameBuffer->release();
  4318. this->m_rightDistortionFrameBuffer->bind();
  4319. shader.setRenderData(ShaderCompositer::ST_SIMPLE, this->m_ortho, QMatrix4x4(), vertices, rightTexCoords, vColor, AV_PIX_FMT_NONE);
  4320. GL_PREFIX glClear(GL_COLOR_BUFFER_BIT);
  4321. GL_PREFIX glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  4322. this->m_rightDistortionFrameBuffer->release();
  4323. GL_PREFIX glBindTexture(GL_TEXTURE_2D, 0);
  4324. shader.endSimple();
  4325. if (enabledBlend)
  4326. GL_PREFIX glEnable(GL_BLEND);
  4327. width = this->m_width / (GLfloat)this->m_leftDistortionFrameBuffer->width();
  4328. height = this->m_height / (GLfloat)this->m_leftDistortionFrameBuffer->height();
  4329. enabledBlend = GL_PREFIX glIsEnabled(GL_BLEND);
  4330. ShaderCompositer::ShaderType shaderType;
  4331. if (this->m_useDistortion)
  4332. {
  4333. if (shader.is360Degree())
  4334. {
  4335. shaderType = ShaderCompositer::ST_PINCUSHION_DISTORTION;
  4336. shader.startPincushionDistortion(QVector2D(1.0f, 1.0f), this->m_distortionLensCenter,
  4337. this->m_pincushionDistortionCoefficients, 1.0f);
  4338. }
  4339. else
  4340. {
  4341. shaderType = ShaderCompositer::ST_BARREL_DISTORTION;
  4342. shader.startBarrelDistortion(QVector2D(1.0f, 1.0f), this->m_distortionLensCenter,
  4343. this->m_barrelDistortionCoefficients, 1.0f);
  4344. }
  4345. }
  4346. else
  4347. {
  4348. shaderType = ShaderCompositer::ST_SIMPLE;
  4349. shader.startSimple();
  4350. }
  4351. GL_PREFIX glDisable(GL_BLEND);
  4352. QVector3D leftVertices[] =
  4353. {
  4354. QVector3D(0.0f, renderRect.bottom(), 0.0f),
  4355. QVector3D(renderRect.right() / 2, renderRect.bottom(), 0.0f),
  4356. QVector3D(renderRect.left(), renderRect.top(), 0.0f),
  4357. QVector3D(renderRect.right() / 2, renderRect.top(), 0.0f)
  4358. };
  4359. QVector3D rightVertices[] =
  4360. {
  4361. QVector3D(renderRect.left() + (renderRect.right() / 2), renderRect.bottom(), 0.0f),
  4362. QVector3D(renderRect.right(), renderRect.bottom(), 0.0f),
  4363. QVector3D(renderRect.left() + (renderRect.right() / 2), renderRect.top(), 0.0f),
  4364. QVector3D(renderRect.right(), renderRect.top(), 0.0f)
  4365. };
  4366. QVector2D distortionTexCoords[] =
  4367. {
  4368. QVector2D(0.0f, 0.0f),
  4369. QVector2D(width, 0.0f),
  4370. QVector2D(0.0f, height),
  4371. QVector2D(width, height)
  4372. };
  4373. shader.setRenderData(shaderType, this->m_ortho, QMatrix4x4(), leftVertices, distortionTexCoords, vColor, AV_PIX_FMT_NONE);
  4374. GL_PREFIX glBindTexture(GL_TEXTURE_2D, this->m_leftDistortionFrameBuffer->texture());
  4375. GL_PREFIX glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  4376. shader.setRenderData(shaderType, this->m_ortho, QMatrix4x4(), rightVertices, distortionTexCoords, vColor, AV_PIX_FMT_NONE);
  4377. GL_PREFIX glBindTexture(GL_TEXTURE_2D, this->m_rightDistortionFrameBuffer->texture());
  4378. GL_PREFIX glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  4379. if (this->m_useDistortion)
  4380. {
  4381. if (shader.is360Degree())
  4382. shader.endPincushionDistortion();
  4383. else
  4384. shader.endBarrelDistortion();
  4385. }
  4386. else
  4387. {
  4388. shader.endSimple();
  4389. }
  4390. }
  4391. else if (shader.is360Degree())
  4392. {
  4393. QVector2D texCoords[] =
  4394. {
  4395. QVector2D(0.0f, 0.0f),
  4396. QVector2D(width, 0.0f),
  4397. QVector2D(0.0f, height),
  4398. QVector2D(width, height)
  4399. };
  4400. ShaderCompositer::ShaderType shaderType;
  4401. if (this->m_useDistortion)
  4402. {
  4403. shaderType = ShaderCompositer::ST_PINCUSHION_DISTORTION;
  4404. shader.startPincushionDistortion(QVector2D(1.0f, 1.0f), this->m_distortionLensCenter,
  4405. this->m_pincushionDistortionCoefficients, 1.0f);
  4406. }
  4407. else
  4408. {
  4409. shaderType = ShaderCompositer::ST_SIMPLE;
  4410. shader.startSimple();
  4411. }
  4412. shader.setRenderData(shaderType, this->m_ortho, QMatrix4x4(), vertices, texCoords, vColor, AV_PIX_FMT_NONE);
  4413. GL_PREFIX glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  4414. if (this->m_useDistortion)
  4415. shader.endPincushionDistortion();
  4416. else
  4417. shader.endSimple();
  4418. }
  4419. GL_PREFIX glBindTexture(GL_TEXTURE_2D, 0);
  4420. if (enabledBlend)
  4421. GL_PREFIX glEnable(GL_BLEND);
  4422. }
  4423. if (shader.is360Degree())
  4424. this->drawFonts(shader, rect, vp, this->m_distortionFrameBuffer->isBound());
  4425. }
  4426. void MediaPresenter::updateVideoRefreshTimer(ShaderCompositer &shader)
  4427. {
  4428. MediaState *ms = this->m_state;
  4429. VideoFrames &frames = ms->videoFrames;
  4430. FrameTimer &timer = ms->frameTimer;
  4431. Pause &pause = ms->pause;
  4432. Seek &seek = ms->seek;
  4433. Video &video = ms->video;
  4434. const VideoPicture *vp = &frames.queue[frames.data.rIndex];
  4435. double pts = vp->pts;
  4436. double delay = this->calFrameDelay(pts);
  4437. int frameDrop = 0;
  4438. if (ms->syncType != SYNC_VIDEO_MASTER)
  4439. {
  4440. double incClock = 0.0;
  4441. if (this->m_hwDecoder.isOpened())
  4442. incClock += this->m_hwDecoder.getSurfaceQueueCount() * delay;
  4443. if (this->m_filterGraph.hasFilters())
  4444. incClock += this->m_filterGraph.getDelayCount() * delay;
  4445. double diff = pts - (this->getMasterClock() + incClock);
  4446. double syncThreshold = max(delay * SYNC_THRESHOLD_MULTI, SYNC_THRESHOLD);
  4447. if (fabs(diff) < NOSYNC_THRESHOLD)
  4448. {
  4449. if (diff <= -syncThreshold)
  4450. {
  4451. if (frameDrop <= 0)
  4452. {
  4453. if (timer.lowFrameCounter > FRAME_LOW_WARNING_THRESHOLD / 4)
  4454. frameDrop = (int)fabs(diff / delay);
  4455. else
  4456. frameDrop = 1;
  4457. }
  4458. delay = 0.0;
  4459. }
  4460. else if (diff >= syncThreshold)
  4461. {
  4462. delay *= 2.0;
  4463. }
  4464. }
  4465. }
  4466. else
  4467. {
  4468. if (video.tempo > 0.0)
  4469. delay /= 1.0 + video.tempo / 100.0;
  4470. else if (video.tempo < 0.0)
  4471. delay *= (150.0 - video.tempo) / 100.0;
  4472. }
  4473. timer.timer += delay * MICRO_SECOND;
  4474. int64_t driftTime = pause.driftTime + video.driftTime +
  4475. seek.videoDiscardDriftTime + seek.readDiscardDriftTime + ms->streamChangeDriftTime;
  4476. double actualDelay = (timer.timer - (this->getAbsoluteClock() - driftTime)) / MICRO_SECOND;
  4477. if (delay <= 0.0 && timer.lowFrameCounter <= FRAME_LOW_WARNING_THRESHOLD)
  4478. timer.lowFrameCounter++;
  4479. else
  4480. timer.lowFrameCounter = 0;
  4481. if (timer.lowFrameCounter > FRAME_LOW_WARNING_THRESHOLD && !this->m_showingOptionDesc)
  4482. this->showOptionDesc(tr("프레임 저하가 일어나고 있습니다. 성능에 영향을 미치는 옵션 또는 수직 동기화를 꺼주세요."));
  4483. int refresh = (int)(actualDelay * 1000 + 0.5);
  4484. this->refreshSchedule(refresh);
  4485. this->displayVideo(shader, vp);
  4486. this->copyPicture(vp, &frames.prevPicture);
  4487. if (this->m_useFrameDrop)
  4488. video.frameDrop = frameDrop;
  4489. else
  4490. video.frameDrop = 0;
  4491. if (seek.requestPauseOnRender)
  4492. {
  4493. if (--seek.pauseOnRenderCount <= 0)
  4494. {
  4495. seek.requestPauseOnRender = false;
  4496. this->pause();
  4497. }
  4498. }
  4499. }
  4500. bool MediaPresenter::update(ShaderCompositer &shader)
  4501. {
  4502. MediaState *ms = this->m_state;
  4503. if (!ms)
  4504. return false;
  4505. if (this->m_scheduleRecomputeSubtitleSize)
  4506. {
  4507. this->computeSubtitleSize();
  4508. this->m_scheduleRecomputeSubtitleSize = false;
  4509. }
  4510. Video &video = ms->video;
  4511. VideoFrames &frames = ms->videoFrames;
  4512. if (!video.stream.stream)
  4513. {
  4514. #if defined Q_OS_MOBILE
  4515. GL_PREFIX glClear(GL_COLOR_BUFFER_BIT);
  4516. #endif
  4517. this->refreshSchedule(NO_AUDIO_ALBUM_JACKET_DELAY);
  4518. #if defined Q_OS_MOBILE
  4519. VideoPicture &audioPicture = frames.audioPicture;
  4520. if (!audioPicture.surface)
  4521. audioPicture.surface = this->createSurface(this->m_width, this->m_height, this->m_format);
  4522. this->displayVideo(shader, &audioPicture);
  4523. return true;
  4524. #else
  4525. return false;
  4526. #endif
  4527. }
  4528. int size;
  4529. frames.data.lock.mutex.lock();
  4530. size = frames.data.size;
  4531. frames.data.lock.mutex.unlock();
  4532. if (size == 0)
  4533. {
  4534. VideoPicture &audioPicture = frames.audioPicture;
  4535. GL_PREFIX glClear(GL_COLOR_BUFFER_BIT);
  4536. if (!this->isRemoteFile() && this->isAudio() && this->isEnabledVideo() && !audioPicture.surface)
  4537. {
  4538. FrameExtractor ex;
  4539. FrameExtractor::FRAME_ITEM item;
  4540. ex.setPixFormat(DEFAULT_PIX_FORMAT);
  4541. if (ex.open(this->m_filePath) && ex.getFrame(0.0, true, &item))
  4542. {
  4543. VideoPicture vp;
  4544. vp.height = item.frame.height();
  4545. vp.width = item.frame.width();
  4546. vp.surface = this->createSurface(vp.width, vp.height, DEFAULT_PIX_FORMAT);
  4547. const uint8_t *buffers[4] = {item.buffer, };
  4548. const int lineSizes[4] = {av_image_get_linesize(DEFAULT_PIX_FORMAT, vp.width, 0), };
  4549. av_image_copy(vp.surface->pixels, vp.surface->lineSize,
  4550. buffers, lineSizes, DEFAULT_PIX_FORMAT, vp.width, vp.height);
  4551. this->resizePicture(&vp, &audioPicture);
  4552. this->deleteSurface(vp.surface);
  4553. }
  4554. if (!audioPicture.surface)
  4555. audioPicture.surface = this->createSurface(this->m_width, this->m_height, this->m_format);
  4556. }
  4557. if (!video.stream.queue.hasPacket() && this->isAudio())
  4558. {
  4559. this->refreshSchedule(DEFAULT_REFRESH_DELAY);
  4560. this->displayVideo(shader, &frames.audioPicture);
  4561. }
  4562. else
  4563. {
  4564. this->refreshSchedule(EMPTY_BUFFER_WAIT_DELAY);
  4565. this->displayVideo(shader, &frames.prevPicture);
  4566. }
  4567. return true;
  4568. }
  4569. GL_PREFIX glClear(GL_COLOR_BUFFER_BIT);
  4570. if (ms->pause.pause)
  4571. {
  4572. const VideoPicture *vp = &frames.queue[frames.data.rIndex];
  4573. this->refreshSchedule(DEFAULT_REFRESH_DELAY);
  4574. this->displayVideo(shader, vp);
  4575. return true;
  4576. }
  4577. this->updateVideoRefreshTimer(shader);
  4578. if (ms->seek.flushed && ms->seek.pauseSeeking)
  4579. {
  4580. int maxCount;
  4581. #if defined Q_OS_WIN
  4582. maxCount = 3;
  4583. #else
  4584. maxCount = 1;
  4585. #endif
  4586. if (ms->seek.discardCount++ >= maxCount)
  4587. {
  4588. ms->seek.flushed = false;
  4589. ms->seek.pauseSeeking = false;
  4590. ms->seek.discardCount = 0;
  4591. this->pause();
  4592. }
  4593. }
  4594. if (!ms->seek.firstFrameAfterFlush)
  4595. ms->seek.firstFrameAfterFlush = true;
  4596. if (++frames.data.rIndex >= VIDEO_PICTURE_QUEUE_SIZE)
  4597. frames.data.rIndex = 0;
  4598. frames.data.lock.mutex.lock();
  4599. if (frames.data.size-- == 0)
  4600. frames.data.size = 0;
  4601. frames.data.lock.cond.wakeOne();
  4602. frames.data.lock.mutex.unlock();
  4603. return true;
  4604. }
  4605. void MediaPresenter::allocPicture()
  4606. {
  4607. MediaState *ms = this->m_state;
  4608. VideoFrames &frames = ms->videoFrames;
  4609. VideoPicture *vp = &frames.queue[frames.data.wIndex];
  4610. if (vp->surface)
  4611. this->deleteSurface(vp->surface);
  4612. AVCodecContext *codec = ms->video.stream.ctx;
  4613. int width = 0;
  4614. int height = 0;
  4615. this->getFrameSize(&width, &height);
  4616. vp->surface = this->createSurface(width, height, this->m_format);
  4617. vp->height = height;
  4618. vp->width = width;
  4619. vp->orgHeight = codec->height;
  4620. vp->orgWidth = codec->width;
  4621. }
  4622. bool MediaPresenter::convertPicture(AVFrame &inFrame, double pts, AVPixelFormat format, bool leftOrTop3D)
  4623. {
  4624. MediaState *ms = this->m_state;
  4625. VideoFrames &frames = ms->videoFrames;
  4626. frames.data.lock.mutex.lock();
  4627. while (frames.data.size >= VIDEO_PICTURE_QUEUE_SIZE && !ms->quit && !ms->video.threadQuit)
  4628. frames.data.lock.cond.wait(&frames.data.lock.mutex);
  4629. frames.data.lock.mutex.unlock();
  4630. if (ms->quit)
  4631. return false;
  4632. if (ms->video.threadQuit)
  4633. return false;
  4634. int destw = 0;
  4635. int desth = 0;
  4636. this->getFrameSize(&destw, &desth);
  4637. VideoPicture *vp = &frames.queue[frames.data.wIndex];
  4638. if (!vp->surface || vp->width != destw || vp->height != desth)
  4639. {
  4640. this->allocPicture();
  4641. if (ms->quit)
  4642. return false;
  4643. }
  4644. if (vp->surface)
  4645. {
  4646. AVCodecContext *codec = ms->video.stream.ctx;
  4647. int w = codec->width;
  4648. int h = codec->height;
  4649. AVFrame *frame;
  4650. bool hasFilters = this->m_filterGraph.hasFilters();
  4651. AVFrame filterd;
  4652. if (hasFilters)
  4653. {
  4654. if (!this->m_filterGraph.getFrame(w, h, format, inFrame, &filterd, &format))
  4655. return true;
  4656. frame = &filterd;
  4657. }
  4658. else
  4659. {
  4660. frame = &inFrame;
  4661. }
  4662. AVPixelFormat realFormat;
  4663. int realLineSize[AV_NUM_DATA_POINTERS] = {0, };
  4664. int realHeight;
  4665. if (this->isUseGPUConvert(format))
  4666. {
  4667. AVFrame dst;
  4668. AVFrame src;
  4669. memcpy(dst.data, vp->surface->pixels, sizeof(vp->surface->pixels));
  4670. memcpy(dst.linesize, vp->surface->lineSize, sizeof(vp->surface->lineSize));
  4671. memcpy(src.data, frame->data, sizeof(frame->data));
  4672. memcpy(src.linesize, frame->linesize, sizeof(frame->linesize));
  4673. av_image_copy(dst.data, dst.linesize, (const uint8_t**)src.data, src.linesize, format, w, h);
  4674. realFormat = format;
  4675. realHeight = h;
  4676. memcpy(realLineSize, frame->linesize, sizeof(frame->linesize));
  4677. }
  4678. else
  4679. {
  4680. ms->imageYUV420PConverter = sws_getCachedContext(
  4681. ms->imageYUV420PConverter,
  4682. w, h, format,
  4683. destw, desth, this->m_format,
  4684. SWS_POINT, nullptr, nullptr, nullptr);
  4685. AVFrame pict;
  4686. memset(&pict, 0, sizeof(pict));
  4687. for (int i = 0; i < vp->surface->plane; i++)
  4688. {
  4689. pict.data[i] = (uint8_t*)vp->surface->pixels[i];
  4690. pict.linesize[i] = vp->surface->lineSize[i];
  4691. realLineSize[i] = pict.linesize[i];
  4692. }
  4693. if (ms->imageYUV420PConverter)
  4694. sws_scale(ms->imageYUV420PConverter, frame->data, frame->linesize, 0, h, pict.data, pict.linesize);
  4695. realFormat = this->m_format;
  4696. realHeight = desth;
  4697. }
  4698. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(realFormat);
  4699. if (desc)
  4700. {
  4701. int height2 = AV_CEIL_RSHIFT(realHeight, desc->log2_chroma_h);
  4702. this->m_detail.videoOutputByteCount.fetchAndAddOrdered(realLineSize[0] * realHeight);
  4703. for (int i = 1; i < vp->surface->plane; i++)
  4704. this->m_detail.videoOutputByteCount.fetchAndAddOrdered(realLineSize[i] * height2);
  4705. }
  4706. vp->pts = pts;
  4707. vp->time = (pts + this->m_subtitleSync) * 1000;
  4708. vp->lumAvg = this->getLuminanceAvg(frame->data[0], frame->linesize[0] * h, format);
  4709. vp->leftOrTop3D = leftOrTop3D;
  4710. if (++frames.data.wIndex >= VIDEO_PICTURE_QUEUE_SIZE)
  4711. frames.data.wIndex = 0;
  4712. frames.data.lock.mutex.lock();
  4713. frames.data.size++;
  4714. frames.data.lock.mutex.unlock();
  4715. if (hasFilters)
  4716. av_freep(&filterd.data[0]);
  4717. }
  4718. return true;
  4719. }
  4720. double MediaPresenter::synchronizeVideo(AVFrame *srcFrame, double pts, const AVRational timeBase)
  4721. {
  4722. Video &video = this->m_state->video;
  4723. Seek &seek = this->m_state->seek;
  4724. QMutexLocker locker(&video.stream.clockLock);
  4725. if (pts == 0.0)
  4726. {
  4727. pts = video.stream.clock;
  4728. }
  4729. else
  4730. {
  4731. pts -= this->getAudioClockOffset();
  4732. video.stream.clock = pts;
  4733. }
  4734. #if defined Q_OS_ANDROID
  4735. if (!this->m_hwDecoder.isOpened())
  4736. {
  4737. #endif
  4738. double frameDelay = av_q2d(timeBase);
  4739. frameDelay = srcFrame->repeat_pict * (frameDelay * 0.5);
  4740. video.stream.clock += frameDelay;
  4741. #if defined Q_OS_ANDROID
  4742. }
  4743. #endif
  4744. if (this->m_state->syncType == SYNC_VIDEO_MASTER && seek.firstFrameAfterFlush)
  4745. seek.readable = true;
  4746. return pts;
  4747. }
  4748. int MediaPresenter::getBuffer(AVCodecContext *ctx, AVFrame *pic, int flag)
  4749. {
  4750. int ret;
  4751. MediaPresenter *parent = (MediaPresenter*)ctx->opaque;
  4752. if (parent->m_hwDecoder.isOpened())
  4753. ret = parent->m_hwDecoder.getBuffer(pic) ? 0 : -1;
  4754. else
  4755. ret = avcodec_default_get_buffer2(ctx, pic, flag);
  4756. return ret;
  4757. }
  4758. AVPixelFormat MediaPresenter::getFormat(struct AVCodecContext *ctx, const AVPixelFormat *fmt)
  4759. {
  4760. MediaPresenter *parent = (MediaPresenter*)ctx->opaque;
  4761. bool hwOpened = parent->m_hwDecoder.isOpened();
  4762. for (int i = 0; fmt[i] != AV_PIX_FMT_NONE; i++)
  4763. {
  4764. if (hwOpened && parent->m_hwDecoder.isDecodable(fmt[i]))
  4765. {
  4766. if (parent->m_hwDecoder.prepare(ctx))
  4767. return fmt[i];
  4768. }
  4769. }
  4770. if (hwOpened)
  4771. {
  4772. if (parent->m_disableHWDecoder.callback)
  4773. parent->m_disableHWDecoder.callback(parent->m_disableHWDecoder.userData);
  4774. }
  4775. return avcodec_default_get_format(ctx, fmt);
  4776. }
  4777. int MediaPresenter::decodeAudio(AVCodecContext *ctx, uint8_t **samples, int *frameSize, PacketQueue::Packet *pkt) const
  4778. {
  4779. int sampleCount;
  4780. return this->decodeAudioAndSampleCount(ctx, samples, frameSize, &sampleCount, pkt);
  4781. }
  4782. int MediaPresenter::decodeAudioAndSampleCount(AVCodecContext *ctx, uint8_t **samples, int *frameSize, int *sampleCount, PacketQueue::Packet *pkt) const
  4783. {
  4784. const AudioSpec &spec = this->m_state->audio.spec;
  4785. AVFrame *frame = av_frame_alloc();
  4786. int ret;
  4787. int gotFrame = 0;
  4788. int totalSize = *frameSize;
  4789. bool isDelayed = false;
  4790. ret = Utils::decodeFrame(ctx, &pkt->packet, frame, &gotFrame);
  4791. if (pkt->isNullPacket() && ctx->codec->capabilities & CODEC_CAP_DELAY)
  4792. isDelayed = true;
  4793. *frameSize = 0;
  4794. if ((ret >= 0 || isDelayed) && gotFrame)
  4795. {
  4796. if (ctx->channels == spec.channelCount)
  4797. {
  4798. int dataSize;
  4799. int outCount = totalSize / ctx->channels / av_get_bytes_per_sample(ctx->sample_fmt);
  4800. *sampleCount = swr_convert(this->m_state->audio.audioConverter,
  4801. samples,
  4802. outCount,
  4803. (const uint8_t**)frame->extended_data,
  4804. frame->nb_samples);
  4805. if (*sampleCount < 0)
  4806. dataSize = 0;
  4807. else
  4808. dataSize = av_samples_get_buffer_size(nullptr, ctx->channels, frame->nb_samples, ctx->sample_fmt, 1);
  4809. *frameSize = dataSize;
  4810. }
  4811. }
  4812. if (frame)
  4813. av_frame_free(&frame);
  4814. return ret;
  4815. }
  4816. void MediaPresenter::flushPictureQueue()
  4817. {
  4818. MediaState *ms = this->m_state;
  4819. QMutexLocker locker(&ms->videoFrames.data.lock.mutex);
  4820. ms->videoFrames.data.rIndex = 0;
  4821. ms->videoFrames.data.wIndex = 0;
  4822. ms->videoFrames.data.size = 0;
  4823. }
  4824. void MediaPresenter::closeStreamComponent(unsigned int index, bool isAudio)
  4825. {
  4826. MediaState *ms = this->m_state;
  4827. AVFormatContext *format = isAudio ? ms->audioFormat : ms->format;
  4828. if (index >= format->nb_streams)
  4829. return;
  4830. AVCodecParameters *context = format->streams[index]->codecpar;
  4831. switch (context->codec_type)
  4832. {
  4833. case AVMEDIA_TYPE_AUDIO:
  4834. {
  4835. Audio &audio = ms->audio;
  4836. SPDIFEncoding &encoding = audio.spdifEncoding;
  4837. audio.stream.queue.unlock();
  4838. if (this->m_spdif.isOpened())
  4839. {
  4840. this->m_spdif.close();
  4841. }
  4842. else
  4843. {
  4844. BASS_Stop();
  4845. BASS_Free();
  4846. }
  4847. audio.stream.queue.end();
  4848. audio.stream.stream = nullptr;
  4849. audio.stream.index = -1;
  4850. audio.handle = 0;
  4851. audio.tempo = 0;
  4852. if (audio.audioConverter)
  4853. swr_free(&audio.audioConverter);
  4854. if (encoding.frame)
  4855. av_frame_free(&encoding.frame);
  4856. if (encoding.encoder)
  4857. {
  4858. avcodec_close(encoding.encoder);
  4859. encoding.encoder = nullptr;
  4860. }
  4861. if (encoding.fifo)
  4862. {
  4863. av_audio_fifo_free(encoding.fifo);
  4864. encoding.fifo = nullptr;
  4865. }
  4866. if (encoding.buffers)
  4867. {
  4868. av_freep(&encoding.buffers[0]);
  4869. av_freep(&encoding.buffers);
  4870. encoding.bufferSize = 0;
  4871. }
  4872. if (encoding.tmpBuffers)
  4873. {
  4874. av_freep(&encoding.tmpBuffers[0]);
  4875. av_freep(&encoding.tmpBuffers);
  4876. encoding.tmpBufferSize = 0;
  4877. }
  4878. if (audio.stream.ctx)
  4879. avcodec_free_context(&audio.stream.ctx);
  4880. break;
  4881. }
  4882. case AVMEDIA_TYPE_VIDEO:
  4883. {
  4884. VideoFrames &videoFrames = ms->videoFrames;
  4885. Video &video = ms->video;
  4886. video.threadQuit = true;
  4887. videoFrames.data.lock.mutex.lock();
  4888. videoFrames.data.lock.cond.wakeOne();
  4889. videoFrames.data.lock.mutex.unlock();
  4890. video.stream.queue.unlock();
  4891. if (this->m_videoThread.isRunning())
  4892. this->m_videoThread.wait();
  4893. video.stream.queue.end();
  4894. video.stream.stream = nullptr;
  4895. video.stream.index = -1;
  4896. if (video.assFrame)
  4897. {
  4898. this->deleteSurface(video.assFrame);
  4899. video.assFrame = nullptr;
  4900. }
  4901. this->releasePictures();
  4902. if (this->m_hwDecoder.isOpened())
  4903. this->m_hwDecoder.close();
  4904. if (video.stream.ctx)
  4905. avcodec_free_context(&video.stream.ctx);
  4906. break;
  4907. }
  4908. case AVMEDIA_TYPE_SUBTITLE:
  4909. {
  4910. Subtitle &subtitle = ms->subtitle;
  4911. subtitle.threadQuit = true;
  4912. subtitle.stream.queue.unlock();
  4913. if (this->m_subtitleThread.isRunning())
  4914. this->m_subtitleThread.wait();
  4915. subtitle.stream.queue.end();
  4916. subtitle.stream.stream = nullptr;
  4917. subtitle.stream.index = -1;
  4918. this->releaseSubtitles();
  4919. if (subtitle.stream.ctx)
  4920. avcodec_free_context(&subtitle.stream.ctx);
  4921. break;
  4922. }
  4923. default:
  4924. {
  4925. break;
  4926. }
  4927. }
  4928. }
  4929. Stream* MediaPresenter::getStream(int index)
  4930. {
  4931. Stream *stream = nullptr;
  4932. if (index == this->m_state->video.stream.index)
  4933. stream = &this->m_state->video.stream;
  4934. else if (index == this->m_state->audio.stream.index)
  4935. stream = &this->m_state->audio.stream;
  4936. else if (index == this->m_state->subtitle.stream.index)
  4937. stream = &this->m_state->subtitle.stream;
  4938. return stream;
  4939. }
  4940. AVHWAccel* MediaPresenter::existHWAccel(AVCodecID codecID) const
  4941. {
  4942. AVHWAccel *hwaccel = nullptr;
  4943. while ((hwaccel = av_hwaccel_next(hwaccel)))
  4944. {
  4945. if (hwaccel->id == codecID)
  4946. return hwaccel;
  4947. }
  4948. return nullptr;
  4949. }
  4950. AVHWAccel* MediaPresenter::findHWAccel(AVCodecID codecID) const
  4951. {
  4952. AVHWAccel *hwaccel = nullptr;
  4953. while ((hwaccel = av_hwaccel_next(hwaccel)))
  4954. {
  4955. if (hwaccel->id == codecID && this->m_hwDecoder.isDecodable(hwaccel->pix_fmt))
  4956. return hwaccel;
  4957. }
  4958. return nullptr;
  4959. }
  4960. const char* MediaPresenter::findProfileName(const AVProfile *profiles, int profile) const
  4961. {
  4962. if (profiles)
  4963. {
  4964. for (int i = 0; profiles[i].profile != FF_PROFILE_UNKNOWN; i++)
  4965. {
  4966. if (profiles[i].profile == profile)
  4967. return profiles[i].name;
  4968. }
  4969. }
  4970. return nullptr;
  4971. }
  4972. AVCodec* MediaPresenter::tryInternalHWDecoder(AVCodecContext *context, const QString &postFix) const
  4973. {
  4974. AVCodec *codec = av_codec_next(nullptr);
  4975. while (codec)
  4976. {
  4977. if (codec->id == context->codec_id && QString::fromLatin1(codec->name).contains("_" + postFix))
  4978. {
  4979. if (avcodec_open2(context, codec, nullptr) == 0)
  4980. return codec;
  4981. else
  4982. break;
  4983. }
  4984. codec = av_codec_next(codec);
  4985. }
  4986. return nullptr;
  4987. }
  4988. AVCodec* MediaPresenter::tryCrystalHDDecoder(AVCodecContext *context) const
  4989. {
  4990. return this->tryInternalHWDecoder(context, "crystalhd");
  4991. }
  4992. #if defined Q_OS_RASPBERRY_PI
  4993. AVCodec *MediaPresenter::tryMMALDecoder(AVCodecContext *context) const
  4994. {
  4995. return this->tryInternalHWDecoder(context, "mmal");
  4996. }
  4997. #elif defined Q_OS_ANDROID
  4998. AVCodec *MediaPresenter::tryMediaCodecDecoder(AVCodecContext *context) const
  4999. {
  5000. return this->tryInternalHWDecoder(context, "mediacodec");
  5001. }
  5002. #endif
  5003. void MediaPresenter::getSPDIFParams(const AVCodecContext *context, int *sampleRate, int *channelCount, AVSampleFormat *format)
  5004. {
  5005. this->m_spdif.getParams(context, sampleRate, channelCount, format);
  5006. if (this->m_userSPDIFSampleRate > 0)
  5007. *sampleRate = this->m_userSPDIFSampleRate;
  5008. }
  5009. bool MediaPresenter::initAudio(const AVCodecContext *context)
  5010. {
  5011. MediaState *ms = this->m_state;
  5012. Audio &audio = ms->audio;
  5013. int sampleRate;
  5014. int channelCount;
  5015. AVSampleFormat format;
  5016. if (this->m_spdif.isOpened())
  5017. {
  5018. this->getSPDIFParams(context, &sampleRate, &channelCount, &format);
  5019. if (context->codec_id == AV_CODEC_ID_DTS)
  5020. {
  5021. if (context->profile == FF_PROFILE_DTS_HD_MA || context->profile == FF_PROFILE_DTS_HD_HRA)
  5022. this->m_spdif.setHDRate(sampleRate * channelCount / 2);
  5023. }
  5024. this->m_spdif.setInterval(30);
  5025. audio.spec.latency = this->m_spdif.getLatency();
  5026. audio.spec.currentChannelCount = channelCount;
  5027. this->m_spdif.getDeviceName(this->m_spdif.getDevice(), &this->m_detail.audioSPDIFOutputDevice);
  5028. this->m_detail.audioOutputSampleRate = sampleRate;
  5029. this->m_detail.audioOutputChannels = channelCount;
  5030. this->m_detail.audioOutputBits = 16;
  5031. this->m_detail.audioOutputType = "S/PDIF";
  5032. }
  5033. else
  5034. {
  5035. sampleRate = context->sample_rate;
  5036. channelCount = context->channels;
  5037. format = context->sample_fmt;
  5038. if (!BASS_Init(this->m_audioDevice, sampleRate, BASS_DEVICE_LATENCY, nullptr, nullptr))
  5039. return false;
  5040. if (!BASS_SetConfig(BASS_CONFIG_UPDATEPERIOD, 30))
  5041. return false;
  5042. if (!BASS_SetConfig(BASS_CONFIG_BUFFER, 200))
  5043. return false;
  5044. #ifdef Q_OS_IOS
  5045. if (!BASS_SetConfig(BASS_CONFIG_IOS_MIXAUDIO, 0))
  5046. return false;
  5047. if (!BASS_SetConfig(BASS_CONFIG_IOS_NOCATEGORY, 1))
  5048. return false;
  5049. if (!BASS_SetConfigPtr(BASS_CONFIG_IOS_NOTIFY, (void*)this->m_iosNotify))
  5050. return false;
  5051. #endif
  5052. DWORD flag;
  5053. switch (format)
  5054. {
  5055. case AV_SAMPLE_FMT_U8:
  5056. case AV_SAMPLE_FMT_U8P:
  5057. flag = BASS_SAMPLE_8BITS;
  5058. break;
  5059. case AV_SAMPLE_FMT_S16:
  5060. case AV_SAMPLE_FMT_S16P:
  5061. flag = 0;
  5062. break;
  5063. case AV_SAMPLE_FMT_S32:
  5064. case AV_SAMPLE_FMT_FLT:
  5065. case AV_SAMPLE_FMT_DBL:
  5066. case AV_SAMPLE_FMT_S32P:
  5067. case AV_SAMPLE_FMT_FLTP:
  5068. case AV_SAMPLE_FMT_DBLP:
  5069. case AV_SAMPLE_FMT_S64:
  5070. case AV_SAMPLE_FMT_S64P:
  5071. flag = BASS_SAMPLE_FLOAT;
  5072. break;
  5073. default:
  5074. return false;
  5075. }
  5076. BASS_INFO info;
  5077. if (!BASS_GetInfo(&info))
  5078. return false;
  5079. audio.handle = BASS_Mixer_StreamCreate(sampleRate, info.speakers, flag);
  5080. if (audio.handle == 0)
  5081. return false;
  5082. flag |= BASS_STREAM_DECODE;
  5083. HSTREAM sourceStream = BASS_StreamCreate(sampleRate, channelCount, flag,
  5084. MediaPresenter::audioCallback, this);
  5085. if (sourceStream == 0)
  5086. return false;
  5087. BASS_FX_GetVersion();
  5088. HSTREAM tempo = BASS_FX_TempoCreate(sourceStream, BASS_STREAM_DECODE | BASS_FX_FREESOURCE);
  5089. if (tempo)
  5090. {
  5091. sourceStream = tempo;
  5092. audio.tempo = sourceStream;
  5093. }
  5094. if (!BASS_Mixer_StreamAddChannel(audio.handle, sourceStream, BASS_MIXER_DOWNMIX | BASS_STREAM_AUTOFREE))
  5095. return false;
  5096. BASS_CHANNELINFO cinfo;
  5097. if (!BASS_ChannelGetInfo(audio.handle, &cinfo))
  5098. return false;
  5099. audio.spec.latency = (info.latency * 2 + BASS_GetConfig(BASS_CONFIG_BUFFER) + BASS_GetConfig(BASS_CONFIG_UPDATEPERIOD)) / 1000.0;
  5100. audio.spec.currentChannelCount = info.speakers;
  5101. this->m_detail.audioInputSampleRate = sampleRate;
  5102. this->m_detail.audioOutputSampleRate = cinfo.freq;
  5103. this->m_detail.audioOutputChannels = cinfo.chans;
  5104. if (IS_BIT_SET(cinfo.flags, BASS_SAMPLE_8BITS))
  5105. {
  5106. this->m_detail.audioOutputBits = 8;
  5107. this->m_detail.audioOutputType = "U8";
  5108. }
  5109. else if (IS_BIT_SET(cinfo.flags, BASS_SAMPLE_FLOAT))
  5110. {
  5111. this->m_detail.audioOutputBits = 32;
  5112. this->m_detail.audioOutputType = "FLT";
  5113. }
  5114. else
  5115. {
  5116. this->m_detail.audioOutputBits = 16;
  5117. this->m_detail.audioOutputType = "S16";
  5118. }
  5119. }
  5120. audio.spec.bytesPerSec = channelCount * sampleRate * av_get_bytes_per_sample(format);
  5121. audio.spec.format = format;
  5122. this->m_detail.audioInputSampleRate = sampleRate;
  5123. return true;
  5124. }
  5125. bool MediaPresenter::openStreamComponent(unsigned int index, bool isAudio)
  5126. {
  5127. MediaState *ms = this->m_state;
  5128. AVFormatContext *format = isAudio ? ms->audioFormat : ms->format;
  5129. if (index >= format->nb_streams)
  5130. return false;
  5131. AVCodecContext *context = avcodec_alloc_context3(nullptr);
  5132. if (avcodec_parameters_to_context(context, format->streams[index]->codecpar) < 0)
  5133. {
  5134. avcodec_free_context(&context);
  5135. return false;
  5136. }
  5137. av_codec_set_pkt_timebase(context, format->streams[index]->time_base);
  5138. AVCodec *codec = nullptr;
  5139. AVCodecID codecID = context->codec_id;
  5140. bool internalHWAccel = false;
  5141. if (this->m_useHWDecoder && context->codec_type == AVMEDIA_TYPE_VIDEO)
  5142. {
  5143. codec = this->tryCrystalHDDecoder(context);
  5144. if (!codec)
  5145. {
  5146. #if defined Q_OS_RASPBERRY_PI
  5147. codec = this->tryMMALDecoder(context);
  5148. #elif defined Q_OS_ANDROID
  5149. codec = this->tryMediaCodecDecoder(context);
  5150. #endif
  5151. }
  5152. if (codec)
  5153. {
  5154. internalHWAccel = true;
  5155. }
  5156. else
  5157. {
  5158. if (avcodec_parameters_to_context(context, format->streams[index]->codecpar) < 0)
  5159. {
  5160. avcodec_free_context(&context);
  5161. return false;
  5162. }
  5163. av_codec_set_pkt_timebase(context, format->streams[index]->time_base);
  5164. codecID = context->codec_id;
  5165. }
  5166. }
  5167. if (!internalHWAccel)
  5168. {
  5169. codec = avcodec_find_decoder(codecID);
  5170. if (!codec)
  5171. return false;
  5172. if (context->codec_type == AVMEDIA_TYPE_VIDEO)
  5173. {
  5174. context->get_buffer2 = MediaPresenter::getBuffer;
  5175. context->get_format = MediaPresenter::getFormat;
  5176. context->thread_safe_callbacks = 1;
  5177. context->thread_type = FF_THREAD_SLICE | FF_THREAD_FRAME;
  5178. context->thread_count = 0;
  5179. context->opaque = this;
  5180. bool hwAccel = false;
  5181. #if defined Q_OS_ANDROID
  5182. hwAccel = true;
  5183. #else
  5184. hwAccel = this->existHWAccel(codecID) ? true : false;
  5185. #endif
  5186. if (this->m_useHWDecoder && hwAccel && this->m_hwDecoder.open(context))
  5187. {
  5188. context->thread_safe_callbacks = 0;
  5189. context->thread_type &= ~FF_THREAD_FRAME;
  5190. context->slice_flags |= SLICE_FLAG_ALLOW_FIELD;
  5191. context->strict_std_compliance = FF_COMPLIANCE_STRICT;
  5192. }
  5193. }
  5194. #if defined Q_OS_ANDROID
  5195. if (!(context->codec_type == AVMEDIA_TYPE_VIDEO && this->m_hwDecoder.isOpened()))
  5196. {
  5197. #endif
  5198. if (avcodec_open2(context, codec, nullptr) < 0)
  5199. return false;
  5200. #if defined Q_OS_ANDROID
  5201. }
  5202. #endif
  5203. }
  5204. const int bufSize = 128;
  5205. char buf[bufSize] = {0, };
  5206. QString codecName;
  5207. QString profile = this->findProfileName(codec->profiles, context->profile);
  5208. profile = profile.isEmpty() ? "" : " " + profile;
  5209. codecName = QString("%1 (%2%3)").arg(QString(codec->name).toUpper()).arg(codec->long_name).arg(profile);
  5210. switch (context->codec_type)
  5211. {
  5212. case AVMEDIA_TYPE_AUDIO:
  5213. {
  5214. Audio &audio = ms->audio;
  5215. SPDIFEncoding &encoding = audio.spdifEncoding;
  5216. AVCodecContext *audioCodec = context;
  5217. QStringList sampleFMT = QString(av_get_sample_fmt_string(buf, bufSize, context->sample_fmt)).split(" ", QString::SkipEmptyParts);
  5218. this->m_detail.audioCodec = codecName;
  5219. this->m_detail.audioCodecSimple = QString(codec->name).toUpper() + profile;
  5220. this->m_detail.audioInputType = QString(sampleFMT[0]).toUpper();
  5221. this->m_detail.audioInputBits = sampleFMT[1].toInt();
  5222. if (this->m_useSPDIF)
  5223. {
  5224. int sampleRate;
  5225. int channelCount;
  5226. AVSampleFormat format;
  5227. bool opened = false;
  5228. AVCodecContext *spdifCodec;
  5229. AVCodecID spdifCodecID;
  5230. if (this->isUsingSPDIFEncoding())
  5231. {
  5232. AVCodec *encodingCodec;
  5233. uint64_t channelLayout;
  5234. switch (this->m_SPIDFEncodingMethod)
  5235. {
  5236. case AnyVODEnums::SEM_AC3:
  5237. spdifCodecID = AV_CODEC_ID_AC3;
  5238. break;
  5239. case AnyVODEnums::SEM_DTS:
  5240. spdifCodecID = AV_CODEC_ID_DTS;
  5241. break;
  5242. default:
  5243. return false;
  5244. }
  5245. encodingCodec = avcodec_find_encoder(spdifCodecID);
  5246. encoding.encoder = avcodec_alloc_context3(encodingCodec);
  5247. switch (this->m_SPIDFEncodingMethod)
  5248. {
  5249. case AnyVODEnums::SEM_AC3:
  5250. encoding.encoder->bit_rate = 640000;
  5251. channelLayout = av_get_default_channel_layout(encoding.encoder->channels);
  5252. break;
  5253. case AnyVODEnums::SEM_DTS:
  5254. encoding.encoder->bit_rate = 768000;
  5255. encoding.encoder->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
  5256. channelLayout = AV_CH_LAYOUT_5POINT1;
  5257. break;
  5258. default:
  5259. channelLayout = av_get_default_channel_layout(encoding.encoder->channels);
  5260. break;
  5261. }
  5262. encoding.encoder->sample_fmt = encodingCodec->sample_fmts[0];
  5263. encoding.encoder->sample_rate = 48000;
  5264. encoding.encoder->channels = 6;
  5265. encoding.encoder->channel_layout = channelLayout;
  5266. encoding.encoder->time_base = (AVRational){ 1, encoding.encoder->sample_rate };
  5267. if (avcodec_open2(encoding.encoder, encodingCodec, nullptr) < 0)
  5268. return false;
  5269. spdifCodec = encoding.encoder;
  5270. audioCodec = spdifCodec;
  5271. }
  5272. else
  5273. {
  5274. spdifCodec = context;
  5275. spdifCodecID = codecID;
  5276. }
  5277. this->getSPDIFParams(spdifCodec, &sampleRate, &channelCount, &format);
  5278. opened = this->m_spdif.open(spdifCodecID, MediaPresenter::audioSPDIFCallback, sampleRate, channelCount, 500, this);
  5279. if (this->m_useSPDIF != opened)
  5280. this->showOptionDesc(tr("S/PDIF 출력 초기화를 실패 하였습니다. PCM 출력으로 전환합니다."));
  5281. this->m_useSPDIF = opened;
  5282. }
  5283. if (!this->initAudio(audioCodec))
  5284. {
  5285. if (this->m_spdif.isOpened())
  5286. this->m_spdif.close();
  5287. else
  5288. BASS_Free();
  5289. return false;
  5290. }
  5291. audio.spec.channelCount = context->channels;
  5292. this->m_detail.audioInputChannels = audio.spec.channelCount;
  5293. this->volume(this->m_volume);
  5294. this->mute(this->m_isMute);
  5295. if (this->m_spdif.isOpened())
  5296. {
  5297. if (this->isUsingSPDIFEncoding())
  5298. {
  5299. uint64_t inputLayout = av_get_default_channel_layout(context->channels);
  5300. uint64_t outputLayout = audioCodec->channel_layout;
  5301. AVSampleFormat srcFormat = context->sample_fmt;
  5302. AVSampleFormat destFormat = audioCodec->sample_fmt;
  5303. int inputSampleRate = context->sample_rate;
  5304. int outputSampleRate = audioCodec->sample_rate;
  5305. int outputChannels = audioCodec->channels;
  5306. int frameSize = audioCodec->frame_size;
  5307. audio.audioConverter = swr_alloc_set_opts(nullptr, outputLayout, destFormat, outputSampleRate, inputLayout, srcFormat, inputSampleRate, 0, nullptr);
  5308. if (audio.audioConverter == nullptr)
  5309. return false;
  5310. if (swr_init(audio.audioConverter) < 0)
  5311. return false;
  5312. encoding.fifo = av_audio_fifo_alloc(destFormat, outputChannels, 1);
  5313. if (encoding.fifo == nullptr)
  5314. return false;
  5315. encoding.bufferSize = av_samples_alloc_array_and_samples(&encoding.buffers, nullptr, outputChannels, frameSize, destFormat, 1);
  5316. if (encoding.bufferSize < 0)
  5317. return false;
  5318. encoding.tmpBufferSize = av_samples_alloc_array_and_samples(&encoding.tmpBuffers, nullptr, outputChannels, frameSize * SPDIF_ENCODING_TMP_BUFFER_SIZE, destFormat, 1);
  5319. if (encoding.tmpBufferSize < 0)
  5320. return false;
  5321. encoding.frame = av_frame_alloc();
  5322. if (encoding.frame == nullptr)
  5323. return false;
  5324. encoding.frame->format = destFormat;
  5325. encoding.frame->nb_samples = frameSize;
  5326. encoding.frame->channel_layout = audioCodec->channel_layout;
  5327. avcodec_fill_audio_frame(encoding.frame, outputChannels, destFormat, encoding.buffers[0], encoding.bufferSize, 1);
  5328. }
  5329. }
  5330. else
  5331. {
  5332. uint64_t layout = av_get_default_channel_layout(context->channels);
  5333. AVSampleFormat srcFormat = context->sample_fmt;
  5334. AVSampleFormat destFormat = av_get_packed_sample_fmt(context->sample_fmt);
  5335. int sampleRate = context->sample_rate;
  5336. audio.audioConverter = swr_alloc_set_opts(nullptr, layout, destFormat, sampleRate, layout, srcFormat, sampleRate, 0, nullptr);
  5337. if (audio.audioConverter == nullptr)
  5338. return false;
  5339. if (swr_init(audio.audioConverter) < 0)
  5340. return false;
  5341. if (this->m_audioEffect.useNormalizer)
  5342. this->initNormalizer();
  5343. if (this->m_audioEffect.useEqualizer)
  5344. this->initEqualizer();
  5345. if (this->m_audioEffect.useLowerMusic)
  5346. this->initLowerMusic();
  5347. if (this->m_audioEffect.useLowerVoice)
  5348. this->initLowerVoice();
  5349. if (this->m_audioEffect.useHigherVoice)
  5350. this->initHigherVoice();
  5351. }
  5352. audio.stream.index = index;
  5353. audio.stream.stream = format->streams[index];
  5354. audio.stream.ctx = context;
  5355. audio.bufferSize = 0;
  5356. audio.bufferIndex = 0;
  5357. audio.diffAvgCoef = exp(log(0.01 / AUDIO_DIFF_AVG_NB));
  5358. audio.diffAvgCount = 0;
  5359. audio.diffThreshold = 2.0 * THRESHOLD_FACTOR / context->sample_rate;
  5360. memset(&audio.packet, 0, sizeof(audio.packet));
  5361. audio.stream.queue.init();
  5362. if (audio.stream.stream->start_time != AV_NOPTS_VALUE)
  5363. audio.stream.clockOffset = av_q2d(audio.stream.stream->time_base) * audio.stream.stream->start_time;
  5364. break;
  5365. }
  5366. case AVMEDIA_TYPE_VIDEO:
  5367. {
  5368. Video &video = ms->video;
  5369. FrameTimer &timer = ms->frameTimer;
  5370. AVPixelFormat pixFormat;
  5371. if (this->m_hwDecoder.isOpened())
  5372. {
  5373. AVHWAccel *hwaccel = this->findHWAccel(codecID);
  5374. QString desc;
  5375. QString decoder;
  5376. this->m_hwDecoder.getDecoderDesc(&desc);
  5377. if (hwaccel)
  5378. decoder = QString("%1 (%2)").arg(hwaccel->name).arg(desc);
  5379. else
  5380. decoder = QString("%1").arg(desc);
  5381. this->m_detail.videoHWDecoder = decoder;
  5382. pixFormat = this->m_hwDecoder.getFormat();
  5383. }
  5384. else
  5385. {
  5386. pixFormat = context->pix_fmt;
  5387. }
  5388. QStringList pixFMT = QString(av_get_pix_fmt_string(buf, bufSize, pixFormat)).split(" ", QString::SkipEmptyParts);
  5389. this->m_detail.videoCodec = codecName;
  5390. #if defined Q_OS_ANDROID || defined Q_OS_MAC
  5391. if (!this->m_hwDecoder.isOpened())
  5392. {
  5393. #endif
  5394. this->m_detail.videoInputType = pixFMT[0].toUpper();
  5395. this->m_detail.videoInputBits = pixFMT[2].toInt();
  5396. if (this->isUseGPUConvert(pixFormat))
  5397. this->m_format = pixFormat;
  5398. else
  5399. this->m_format = this->getCompatibleFormat(pixFormat);
  5400. #if defined Q_OS_ANDROID || defined Q_OS_MAC
  5401. }
  5402. #endif
  5403. this->m_detail.videoInputSize = QSize(context->width, context->height);
  5404. pixFMT = QString(av_get_pix_fmt_string(buf, bufSize, this->m_format)).split(" ", QString::SkipEmptyParts);
  5405. this->m_detail.videoOutputType = pixFMT[0].toUpper();
  5406. this->m_detail.videoOutputBits = pixFMT[2].toInt();
  5407. if (this->m_playData.totalFrame <= 0)
  5408. this->m_playData.totalFrame = (int)format->streams[index]->nb_frames;
  5409. video.stream.index = index;
  5410. video.stream.stream = format->streams[index];
  5411. video.stream.ctx = context;
  5412. timer.timer = this->getAbsoluteClock();
  5413. timer.lastDelay = 0.04;
  5414. video.stream.queue.init();
  5415. video.threadQuit = false;
  5416. this->m_videoThread.start();
  5417. video.assFrame = this->createSurface(context->width, context->height, AV_PIX_FMT_BGR32);
  5418. this->m_assParser.setFrameSize(context->width, context->height);
  5419. this->m_avParser.setFrameSize(QSize(context->width, context->height));
  5420. this->m_deinterlacer.setCodec(video.stream.ctx, pixFormat, video.stream.stream->time_base);
  5421. this->m_filterGraph.setCodec(video.stream.ctx, pixFormat, this->m_format, video.stream.stream->time_base);
  5422. video.pixFormat = pixFormat;
  5423. #ifndef Q_OS_MAC
  5424. int maxThreads = QThread::idealThreadCount() + 1;
  5425. if (maxThreads <= 0)
  5426. maxThreads = 1;
  5427. omp_set_num_threads(maxThreads);
  5428. #endif
  5429. break;
  5430. }
  5431. case AVMEDIA_TYPE_SUBTITLE:
  5432. {
  5433. Subtitle &subtitle = ms->subtitle;
  5434. if (!this->existSubtitle())
  5435. this->m_detail.subtitleCodec = QString(SUBTITLE_CODEC_FORMAT).arg(QString(codec->name).toUpper()).arg(codec->long_name);
  5436. subtitle.stream.index = index;
  5437. subtitle.stream.stream = format->streams[index];
  5438. subtitle.stream.ctx = context;
  5439. subtitle.stream.queue.init();
  5440. QString header;
  5441. if (context->subtitle_header_size)
  5442. header = QString::fromUtf8((char*)context->subtitle_header, context->subtitle_header_size);
  5443. this->m_assParser.setHeader(header);
  5444. subtitle.threadQuit = false;
  5445. this->m_subtitleThread.start();
  5446. break;
  5447. }
  5448. default:
  5449. {
  5450. break;
  5451. }
  5452. }
  5453. return true;
  5454. }
  5455. double MediaPresenter::getAspectRatioByVR(float widthScale, int codecWidth, float heightScale, int codecHeight) const
  5456. {
  5457. if (this->m_vrInputSource == AnyVODEnums::VRI_NONE)
  5458. return ((double)codecWidth * widthScale) / ((double)codecHeight * heightScale);
  5459. else
  5460. return ((double)codecHeight * heightScale) / ((double)codecWidth * widthScale);
  5461. }
  5462. void MediaPresenter::computeFrameSize()
  5463. {
  5464. MediaState *ms = this->m_state;
  5465. if (ms)
  5466. {
  5467. FrameSize &frameSize = ms->frameSize;
  5468. AVCodecContext *codec = ms->video.stream.ctx;
  5469. int orgHeight = this->m_height;
  5470. int orgWidth = this->m_width;
  5471. int codecHeight = codec->height;
  5472. int codecWidth = codec->width;
  5473. int height = 0;
  5474. int width = 0;
  5475. double rot = 0.0;
  5476. switch (this->m_screenRotationDegree)
  5477. {
  5478. case AnyVODEnums::SRD_90:
  5479. rot = 90.0;
  5480. break;
  5481. case AnyVODEnums::SRD_270:
  5482. rot = 270.0;
  5483. break;
  5484. default:
  5485. break;
  5486. }
  5487. if (Utils::isPortrait(this->m_rotation + rot))
  5488. std::swap(orgWidth, orgHeight);
  5489. if (this->m_userRatio.use && this->m_userRatio.fullscreen)
  5490. {
  5491. height = orgHeight;
  5492. width = orgWidth;
  5493. }
  5494. else
  5495. {
  5496. double aspectRatio = 0.0;
  5497. float widthScale = 1.0f;
  5498. float heightScale = 1.0f;
  5499. if (this->m_use3DFull && this->m_3dMethod != AnyVODEnums::V3M_NONE)
  5500. {
  5501. if (this->isSideBySide())
  5502. {
  5503. widthScale = 0.5f;
  5504. heightScale = 1.0f;
  5505. }
  5506. else
  5507. {
  5508. widthScale = 1.0f;
  5509. heightScale = 0.5f;
  5510. }
  5511. }
  5512. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE)
  5513. widthScale *= 2.0f;
  5514. if (this->m_userRatio.use)
  5515. aspectRatio = this->m_userRatio.getRatio();
  5516. else if (codec->sample_aspect_ratio.num == 0)
  5517. aspectRatio = 0.0;
  5518. else
  5519. aspectRatio = av_q2d(codec->sample_aspect_ratio) * this->getAspectRatioByVR(widthScale, codecWidth, heightScale, codecHeight);
  5520. if (aspectRatio <= 0.0)
  5521. aspectRatio = this->getAspectRatioByVR(widthScale, codecWidth, heightScale, codecHeight);
  5522. if (this->m_vrInputSource == AnyVODEnums::VRI_NONE)
  5523. {
  5524. height = orgHeight;
  5525. width = ((int)rint(height * aspectRatio)) & -3;
  5526. if (width > orgWidth)
  5527. {
  5528. width = orgWidth;
  5529. height = ((int)rint(width / aspectRatio)) & -3;
  5530. }
  5531. }
  5532. else
  5533. {
  5534. width = orgWidth;
  5535. height = ((int)rint(width * aspectRatio)) & -3;
  5536. if (height > orgHeight)
  5537. {
  5538. height = orgHeight;
  5539. width = ((int)rint(height / aspectRatio)) & -3;
  5540. }
  5541. }
  5542. }
  5543. frameSize.height = height;
  5544. frameSize.width = width;
  5545. this->m_detail.videoOutputSize = QSize(frameSize.width, frameSize.height);
  5546. #if defined Q_OS_MOBILE
  5547. if (!this->m_captureMode &&
  5548. this->m_vrInputSource == AnyVODEnums::VRI_NONE &&
  5549. this->m_useSubtitleCacheMode &&
  5550. this->m_font.willBeInvalidate(this->m_fontOutlineSize))
  5551. {
  5552. ShaderCompositer dummyShader;
  5553. VideoPicture dummyVP;
  5554. bool isPaused = ms->pause.pause;
  5555. if (!isPaused)
  5556. this->pause();
  5557. this->m_font.setCacheMode(true);
  5558. this->drawDetail(dummyShader, &dummyVP);
  5559. this->m_font.setCacheMode(false);
  5560. if (!isPaused)
  5561. this->resume();
  5562. }
  5563. #endif
  5564. if (!this->m_scheduleRecomputeSubtitleSize)
  5565. this->computeSubtitleSize();
  5566. }
  5567. }
  5568. void MediaPresenter::computeSubtitleSize()
  5569. {
  5570. MediaState *ms = this->m_state;
  5571. if (ms)
  5572. {
  5573. int height = ms->frameSize.height;
  5574. int prevSize = this->m_subtitleFontSize;
  5575. this->m_subtitleFontSize = height * 0.064f * this->m_subtitleSize;
  5576. if (this->m_subtitleFontSize > 0 && this->m_subtitleFontSize != prevSize)
  5577. {
  5578. int fontRatio;
  5579. #if defined Q_OS_MOBILE
  5580. fontRatio = 24;
  5581. if (this->m_vrInputSource != AnyVODEnums::VRI_NONE)
  5582. this->m_subtitleFontSize *= 2;
  5583. #else
  5584. fontRatio = 12;
  5585. #endif
  5586. this->m_subtitleFont.getQFont().setFamily(this->m_fontFamily);
  5587. this->m_subtitleFont.getQFont().setPixelSize(this->m_subtitleFontSize);
  5588. this->m_subtitleOutlineSize = this->m_subtitleFontSize / fontRatio;
  5589. if (this->m_subtitleOutlineSize <= 0)
  5590. this->m_subtitleOutlineSize = 1 * this->m_devicePixelRatio;
  5591. else if (this->m_subtitleOutlineSize > this->m_subtitleMaxOutlineSize)
  5592. this->m_subtitleOutlineSize = this->m_subtitleMaxOutlineSize;
  5593. #if defined Q_OS_MOBILE
  5594. uint32_t duration = this->getDuration() * KILO / 3;
  5595. if (!this->m_captureMode &&
  5596. this->m_vrInputSource == AnyVODEnums::VRI_NONE &&
  5597. this->m_useSubtitleCacheMode && duration > 0 && this->existSubtitle() &&
  5598. this->m_subtitleFont.willBeInvalidate(this->m_subtitleOutlineSize))
  5599. {
  5600. ShaderCompositer dummyShader;
  5601. VideoPicture dummyVP;
  5602. const int gap = 500;
  5603. bool isPaused = ms->pause.pause;
  5604. if (!isPaused)
  5605. this->pause();
  5606. this->m_subtitleFont.setCacheMode(true);
  5607. for (dummyVP.time = 0; dummyVP.time < duration; dummyVP.time += gap)
  5608. this->drawSubtitles(dummyShader, &dummyVP);
  5609. this->m_subtitleFont.setCacheMode(false);
  5610. if (!isPaused)
  5611. this->resume();
  5612. }
  5613. #endif
  5614. }
  5615. }
  5616. }
  5617. const QVector<ChapterInfo>& MediaPresenter::getChapters() const
  5618. {
  5619. return this->m_chapters;
  5620. }
  5621. int MediaPresenter::decodingInterruptCallback(void *userData)
  5622. {
  5623. MediaState *state = (MediaState*)userData;
  5624. return state && state->quit;
  5625. }
  5626. void MediaPresenter::seek()
  5627. {
  5628. MediaState *ms = this->m_state;
  5629. Seek &seek = ms->seek;
  5630. Video &video = ms->video;
  5631. Audio &audio = ms->audio;
  5632. Subtitle &subtitle = ms->subtitle;
  5633. int index = -1;
  5634. int64_t seekTarget = seek.pos;
  5635. int64_t seekTargetAudio = seek.pos;
  5636. if (video.stream.index >= 0 && !this->isAudio())
  5637. index = video.stream.index;
  5638. else if (audio.stream.index >= 0)
  5639. index = audio.stream.index;
  5640. else if (subtitle.stream.index >= 0)
  5641. index = subtitle.stream.index;
  5642. if (index >= 0)
  5643. {
  5644. AVRational r = {1, AV_TIME_BASE};
  5645. seekTarget = av_rescale_q(seekTarget, r, ms->format->streams[index]->time_base);
  5646. if (ms->audioFormat)
  5647. seekTargetAudio = av_rescale_q(seekTargetAudio, r, ms->audioFormat->streams[audio.stream.index]->time_base);
  5648. }
  5649. int ret = av_seek_frame(ms->format, index, seekTarget, seek.flags);
  5650. if (ret >= 0)
  5651. {
  5652. if (ms->audioFormat)
  5653. av_seek_frame(ms->audioFormat, audio.stream.index, seekTargetAudio, seek.flags);
  5654. if (audio.stream.index >= 0)
  5655. {
  5656. audio.stream.queue.flush();
  5657. audio.stream.queue.putFlushPacket();
  5658. }
  5659. if (video.stream.index >= 0)
  5660. {
  5661. video.stream.queue.flush();
  5662. video.stream.queue.putFlushPacket();
  5663. }
  5664. if (subtitle.stream.index >= 0)
  5665. {
  5666. subtitle.stream.queue.flush();
  5667. subtitle.stream.queue.putFlushPacket();
  5668. }
  5669. }
  5670. seek.request = false;
  5671. if (this->m_audioSubtitleCallback.callback && this->existAudioSubtitle())
  5672. {
  5673. if (this->m_showSubtitle)
  5674. {
  5675. Lyrics tmp;
  5676. bool found = false;
  5677. if (this->m_lrcParser.isExist())
  5678. found = this->m_lrcParser.get(seek.time * 1000, &tmp);
  5679. if (!found)
  5680. this->m_audioSubtitleCallback.callback(this->m_audioSubtitleCallback.userData, QVector<Lyrics>());
  5681. }
  5682. else
  5683. {
  5684. this->m_audioSubtitleCallback.callback(this->m_audioSubtitleCallback.userData, QVector<Lyrics>());
  5685. }
  5686. }
  5687. if (seek.pauseSeeking && ret >= 0)
  5688. this->resume();
  5689. }
  5690. bool MediaPresenter::recover(double clock)
  5691. {
  5692. QMutexLocker locker(&this->m_controlLocker);
  5693. bool isPaused;
  5694. QString subtitlePath = this->m_subtitleFilePath;
  5695. QString audioPath = this->m_audioPath;
  5696. ExtraPlayData playData = this->m_playData;
  5697. Range repeatRange = this->m_repeatRange;
  5698. float subtitleSync = this->m_subtitleSync;
  5699. bool result = false;
  5700. bool isTmpSubtitle = false;
  5701. SUBTITLE_TYPE prevSubtitleType = this->m_subtitleType;
  5702. bool isRemoteFile = this->m_isRemoteFile;
  5703. if (this->m_state)
  5704. isPaused = this->m_state->pause.pause;
  5705. else
  5706. isPaused = false;
  5707. if (this->m_isRemoteProtocol)
  5708. {
  5709. if (this->m_subtitleType != ST_YOUTUBE && this->m_subtitleType != ST_AV && this->m_subtitleType != ST_ASS)
  5710. {
  5711. QString tmpFilePath = QDir::tempPath();
  5712. QString ext = QFileInfo(subtitlePath).suffix();
  5713. Utils::appendDirSeparator(&tmpFilePath);
  5714. tmpFilePath += "XXXXXX.";
  5715. tmpFilePath += ext;
  5716. QTemporaryFile tmpFile(tmpFilePath);
  5717. tmpFile.setAutoRemove(false);
  5718. if (tmpFile.open())
  5719. {
  5720. QString filePath = tmpFile.fileName();
  5721. tmpFile.close();
  5722. switch (this->m_subtitleType)
  5723. {
  5724. case ST_SAMI:
  5725. this->m_samiParser.save(filePath, 0.0);
  5726. break;
  5727. case ST_SRT:
  5728. this->m_srtParser.save(filePath, 0.0);
  5729. break;
  5730. case ST_LRC:
  5731. this->m_lrcParser.save(filePath, 0.0);
  5732. break;
  5733. default:
  5734. break;
  5735. }
  5736. isTmpSubtitle = true;
  5737. subtitlePath = filePath;
  5738. }
  5739. }
  5740. }
  5741. this->stop();
  5742. this->m_playData = playData;
  5743. this->m_repeatRange = repeatRange;
  5744. this->m_subtitleSync = subtitleSync;
  5745. this->m_audioPath = audioPath;
  5746. this->m_isRemoteFile = isRemoteFile;
  5747. if (this->play())
  5748. {
  5749. if (!subtitlePath.isEmpty())
  5750. this->openSubtitle(subtitlePath, prevSubtitleType == ST_YOUTUBE);
  5751. if (isPaused)
  5752. this->pause();
  5753. if (isTmpSubtitle)
  5754. {
  5755. QFile f(subtitlePath);
  5756. f.remove();
  5757. }
  5758. this->seekStream(clock, 0.0, AVSEEK_FLAG_ANY | AVSEEK_FLAG_BACKWARD);
  5759. result = true;
  5760. }
  5761. if (this->m_recoverCallback.callback)
  5762. this->m_recoverCallback.callback(this->m_recoverCallback.userData);
  5763. return result;
  5764. }
  5765. bool MediaPresenter::isUseAudioPath() const
  5766. {
  5767. return !this->m_audioPath.isEmpty();
  5768. }
  5769. void MediaPresenter::closeStream()
  5770. {
  5771. MediaState *ms = this->m_state;
  5772. if (ms)
  5773. {
  5774. if (this->m_readThread.isRunning())
  5775. this->m_readThread.wait();
  5776. if (ms->video.stream.index >= 0)
  5777. {
  5778. this->closeStreamComponent(ms->video.stream.index, false);
  5779. this->m_subtitleFontSize = 0;
  5780. }
  5781. if (ms->audio.stream.index >= 0)
  5782. this->closeStreamComponent(ms->audio.stream.index, this->isUseAudioPath());
  5783. if (ms->subtitle.stream.index >= 0)
  5784. this->closeStreamComponent(ms->subtitle.stream.index, false);
  5785. if (ms->imageYUV420PConverter)
  5786. sws_freeContext(ms->imageYUV420PConverter);
  5787. if (ms->imageRGBConverter)
  5788. sws_freeContext(ms->imageRGBConverter);
  5789. if (ms->format)
  5790. avformat_close_input(&ms->format);
  5791. if (ms->audioFormat)
  5792. avformat_close_input(&ms->audioFormat);
  5793. this->m_audioPath.clear();
  5794. this->m_assParser.deInit();
  5795. this->m_chapters.clear();
  5796. this->m_cueParser.close();
  5797. this->m_refreshThread.stop();
  5798. delete this->m_state;
  5799. this->m_state = nullptr;
  5800. }
  5801. this->m_subtitleSync = 0.0;
  5802. }
  5803. void MediaPresenter::useHWDecoder(bool enable)
  5804. {
  5805. bool prev = this->m_useHWDecoder;
  5806. this->changeUseHWDecoder(enable);
  5807. if (prev != enable)
  5808. {
  5809. if (this->isEnabledVideo())
  5810. this->recover(this->getMasterClock());
  5811. }
  5812. }
  5813. bool MediaPresenter::isUseHWDecoder() const
  5814. {
  5815. return this->m_useHWDecoder;
  5816. }
  5817. bool MediaPresenter::isOpenedHWDecoder() const
  5818. {
  5819. return this->m_hwDecoder.isOpened();
  5820. }
  5821. void MediaPresenter::changeUseHWDecoder(bool enable)
  5822. {
  5823. this->m_useHWDecoder = enable;
  5824. }
  5825. void MediaPresenter::useLowQualityMode(bool enable)
  5826. {
  5827. bool prev = this->m_useLowQualityMode;
  5828. this->m_useLowQualityMode = enable;
  5829. if (prev != enable)
  5830. {
  5831. if (this->isEnabledVideo())
  5832. this->recover(this->getMasterClock());
  5833. }
  5834. }
  5835. bool MediaPresenter::isUseLowQualityMode() const
  5836. {
  5837. return this->m_useLowQualityMode;
  5838. }
  5839. void MediaPresenter::useSPDIF(bool enable)
  5840. {
  5841. this->m_useSPDIF = enable;
  5842. this->resetAudioStream();
  5843. }
  5844. bool MediaPresenter::isUseSPDIF() const
  5845. {
  5846. return this->m_useSPDIF;
  5847. }
  5848. bool MediaPresenter::isOpenedSPDIF() const
  5849. {
  5850. return this->m_spdif.isOpened();
  5851. }
  5852. bool MediaPresenter::isSPDIFAvailable() const
  5853. {
  5854. return this->m_spdif.isAvailable();
  5855. }
  5856. void MediaPresenter::setSPDIFEncodingMethod(AnyVODEnums::SPDIFEncodingMethod method)
  5857. {
  5858. this->m_SPIDFEncodingMethod = method;
  5859. if (this->m_spdif.isOpened())
  5860. this->resetAudioStream();
  5861. }
  5862. AnyVODEnums::SPDIFEncodingMethod MediaPresenter::getSPDIFEncodingMethod() const
  5863. {
  5864. return this->m_SPIDFEncodingMethod;
  5865. }
  5866. bool MediaPresenter::isUsingSPDIFEncoding() const
  5867. {
  5868. return this->m_SPIDFEncodingMethod != AnyVODEnums::SEM_NONE;
  5869. }
  5870. void MediaPresenter::setScreenRotationDegree(AnyVODEnums::ScreenRotationDegree degree)
  5871. {
  5872. this->m_screenRotationDegree = degree;
  5873. this->computeFrameSize();
  5874. }
  5875. AnyVODEnums::ScreenRotationDegree MediaPresenter::getScreenRotationDegree() const
  5876. {
  5877. return this->m_screenRotationDegree;
  5878. }
  5879. void MediaPresenter::use3DFull(bool enable)
  5880. {
  5881. this->m_use3DFull = enable;
  5882. this->computeFrameSize();
  5883. }
  5884. bool MediaPresenter::isUse3DFull() const
  5885. {
  5886. return this->m_use3DFull;
  5887. }
  5888. void MediaPresenter::usePBO(bool enable)
  5889. {
  5890. this->m_usePBO = enable;
  5891. }
  5892. bool MediaPresenter::isUsePBO() const
  5893. {
  5894. return this->m_usePBO;
  5895. }
  5896. bool MediaPresenter::isUsingPBO(AVPixelFormat format) const
  5897. {
  5898. return this->isUsePBO() && this->isUsablePBO(format);
  5899. }
  5900. QSize MediaPresenter::getSize() const
  5901. {
  5902. return QSize(this->m_width, this->m_height);
  5903. }
  5904. #pragma GCC diagnostic push
  5905. #pragma GCC diagnostic ignored "-Waddress"
  5906. bool MediaPresenter::isUsablePBO(AVPixelFormat format) const
  5907. {
  5908. #if defined Q_OS_MOBILE
  5909. (void)format;
  5910. return false;
  5911. #else
  5912. return !this->isUseGPUConvert(format) && (GL_PREFIX glBindBufferARB != nullptr) && (GL_PREFIX glBufferDataARB != nullptr) && (GL_PREFIX glMapBufferARB != nullptr) && (GL_PREFIX glUnmapBufferARB != nullptr);
  5913. #endif
  5914. }
  5915. #pragma GCC diagnostic pop
  5916. void MediaPresenter::setUserSPDIFSampleRate(int sampleRate)
  5917. {
  5918. this->m_userSPDIFSampleRate = sampleRate;
  5919. if (this->m_useSPDIF)
  5920. this->useSPDIF(this->m_useSPDIF);
  5921. }
  5922. int MediaPresenter::getUserSPDIFSampleRate() const
  5923. {
  5924. return this->m_userSPDIFSampleRate;
  5925. }
  5926. void MediaPresenter::releaseSubtitles()
  5927. {
  5928. MediaState *ms = this->m_state;
  5929. SubtitleFrames &frames = ms->subtitleFrames;
  5930. frames.lock.lock();
  5931. for (int i = 0; i < frames.items.count(); i++)
  5932. avsubtitle_free(&frames.items[i].subtitle);
  5933. frames.items.clear();
  5934. frames.lock.unlock();
  5935. }
  5936. void MediaPresenter::releasePictures()
  5937. {
  5938. MediaState *ms = this->m_state;
  5939. if (!ms)
  5940. return;
  5941. VideoFrames &frames = ms->videoFrames;
  5942. for (int i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++)
  5943. {
  5944. VideoPicture *vp = &frames.queue[i];
  5945. if (vp && vp->surface)
  5946. {
  5947. this->deleteSurface(vp->surface);
  5948. *vp = VideoPicture();
  5949. }
  5950. }
  5951. if (frames.audioPicture.surface)
  5952. {
  5953. this->deleteSurface(frames.audioPicture.surface);
  5954. frames.audioPicture = VideoPicture();
  5955. }
  5956. if (frames.prevPicture.surface)
  5957. {
  5958. this->deleteSurface(frames.prevPicture.surface);
  5959. frames.prevPicture = VideoPicture();
  5960. }
  5961. }
  5962. SyncType MediaPresenter::getRecommandSyncType() const
  5963. {
  5964. if (this->m_state->audio.stream.index >= 0)
  5965. return SYNC_AUDIO_MASTER;
  5966. else if (this->m_state->video.stream.index >= 0)
  5967. return SYNC_VIDEO_MASTER;
  5968. else
  5969. return SYNC_EXTERNAL_MASTER;
  5970. }
  5971. bool MediaPresenter::openAudioStream(int *ret)
  5972. {
  5973. AVFormatContext *format = nullptr;
  5974. MediaState *ms = this->m_state;
  5975. QString filePath = this->m_audioPath;
  5976. int audioIndex = -1;
  5977. format = avformat_alloc_context();
  5978. if (!format)
  5979. return false;
  5980. format->interrupt_callback.callback = MediaPresenter::decodingInterruptCallback;
  5981. format->interrupt_callback.opaque = ms;
  5982. if (avformat_open_input(&format, Utils::convertPathToFileSystemRepresentation(filePath), nullptr, nullptr) != 0)
  5983. {
  5984. avformat_close_input(&format);
  5985. return false;
  5986. }
  5987. ms->audioFormat = format;
  5988. if (avformat_find_stream_info(format, nullptr) < 0)
  5989. return false;
  5990. this->m_audioStreamInfo.clear();
  5991. for (unsigned int i = 0; i < format->nb_streams; i++)
  5992. {
  5993. AVCodecParameters *context = format->streams[i]->codecpar;
  5994. AVDictionary *meta = format->streams[i]->metadata;
  5995. AVDictionaryEntry *entry;
  5996. QString desc;
  5997. QString lang;
  5998. entry = av_dict_get(meta, "language", nullptr, AV_DICT_IGNORE_SUFFIX);
  5999. if (entry)
  6000. lang = QString::fromLocal8Bit(entry->value).toUpper();
  6001. AVCodec *codec = avcodec_find_decoder(context->codec_id);
  6002. if (codec)
  6003. desc = QString("%1 %2").arg(codec->long_name).arg(this->findProfileName(codec->profiles, context->profile));
  6004. if (!lang.isEmpty())
  6005. desc = lang + ", " + desc;
  6006. switch (context->codec_type)
  6007. {
  6008. case AVMEDIA_TYPE_AUDIO:
  6009. {
  6010. AudioStreamInfo info;
  6011. info.index = i;
  6012. info.name = QString("%1, %2 Channels").arg(desc).arg(context->channels);
  6013. this->m_audioStreamInfo.append(info);
  6014. break;
  6015. }
  6016. default:
  6017. {
  6018. break;
  6019. }
  6020. }
  6021. }
  6022. if (this->m_audioStreamInfo.count() > 0)
  6023. {
  6024. if (this->m_lastAudioStream == -1 || this->m_lastAudioStream >= (int)format->nb_streams ||
  6025. format->streams[this->m_lastAudioStream]->codecpar->codec_type != AVMEDIA_TYPE_AUDIO)
  6026. {
  6027. audioIndex = this->m_audioStreamInfo[0].index;
  6028. this->m_lastAudioStream = audioIndex;
  6029. }
  6030. else
  6031. {
  6032. audioIndex = this->m_lastAudioStream;
  6033. }
  6034. }
  6035. *ret = audioIndex;
  6036. return true;
  6037. }
  6038. bool MediaPresenter::openStream()
  6039. {
  6040. MediaState *ms = new MediaState;
  6041. int videoIndex = -1;
  6042. int audioIndex = -1;
  6043. int subtitleIndex = -1;
  6044. AVFormatContext *format = nullptr;
  6045. QString filePath;
  6046. QFileInfo fileInfo(this->m_filePath);
  6047. this->m_state = ms;
  6048. ms->video.stream.index = -1;
  6049. ms->audio.stream.index = -1;
  6050. ms->subtitle.stream.index = -1;
  6051. this->m_assParser.init();
  6052. if (fileInfo.suffix().toLower() == "cue" && this->m_cueParser.open(this->m_filePath))
  6053. {
  6054. this->m_cueParser.getFilePath(&filePath);
  6055. this->m_cueParser.getChapters(&this->m_chapters);
  6056. filePath = fileInfo.absolutePath() + QDir::separator() + filePath;
  6057. }
  6058. else
  6059. {
  6060. filePath = this->m_filePath;
  6061. }
  6062. filePath = filePath.replace("mms://", "mmst://", Qt::CaseInsensitive);
  6063. AVInputFormat *inputFormat = nullptr;
  6064. #if !defined Q_OS_ANDROID && !defined Q_OS_IOS
  6065. if (Utils::determinDevice(filePath))
  6066. {
  6067. QString format = Utils::getDeviceType(filePath);
  6068. inputFormat = av_find_input_format(format.toUtf8());
  6069. if (inputFormat)
  6070. filePath = Utils::getDevicePath(filePath);
  6071. }
  6072. #endif
  6073. format = avformat_alloc_context();
  6074. if (!format)
  6075. return false;
  6076. format->interrupt_callback.callback = MediaPresenter::decodingInterruptCallback;
  6077. format->interrupt_callback.opaque = ms;
  6078. if (avformat_open_input(&format, Utils::convertPathToFileSystemRepresentation(filePath), inputFormat, nullptr) != 0)
  6079. {
  6080. avformat_close_input(&format);
  6081. return false;
  6082. }
  6083. ms->format = format;
  6084. if (avformat_find_stream_info(format, nullptr) < 0)
  6085. return false;
  6086. AVInputFormat *iformat = format->iformat;
  6087. if (iformat)
  6088. this->m_detail.fileFormat = QString("%1 (%2)").arg(QString(iformat->name).toUpper(), iformat->long_name);
  6089. for (unsigned int i = 0; i < format->nb_chapters; i++)
  6090. {
  6091. AVChapter *chapter = format->chapters[i];
  6092. double base = av_q2d(chapter->time_base);
  6093. ChapterInfo info;
  6094. AVDictionaryEntry *entry = av_dict_get(chapter->metadata, "title", nullptr, AV_DICT_IGNORE_SUFFIX);
  6095. info.start = base * chapter->start;
  6096. info.end = base * chapter->end;
  6097. if (entry)
  6098. info.desc = QString::fromUtf8(entry->value);
  6099. this->m_chapters.push_back(info);
  6100. }
  6101. int subtitleNum = 1;
  6102. for (unsigned int i = 0; i < format->nb_streams; i++)
  6103. {
  6104. AVCodecParameters *context = format->streams[i]->codecpar;
  6105. AVDictionary *meta = format->streams[i]->metadata;
  6106. AVDictionaryEntry *entry;
  6107. QString desc;
  6108. QString lang;
  6109. entry = av_dict_get(meta, "language", nullptr, AV_DICT_IGNORE_SUFFIX);
  6110. if (entry)
  6111. lang = QString::fromLocal8Bit(entry->value).toUpper();
  6112. AVCodec *codec = avcodec_find_decoder(context->codec_id);
  6113. if (codec)
  6114. desc = QString("%1 %2").arg(codec->long_name).arg(this->findProfileName(codec->profiles, context->profile));
  6115. if (!lang.isEmpty())
  6116. desc = lang + ", " + desc;
  6117. switch (context->codec_type)
  6118. {
  6119. case AVMEDIA_TYPE_VIDEO:
  6120. {
  6121. if (videoIndex < 0)
  6122. videoIndex = i;
  6123. if (this->isAudio() && !this->m_showAlbumJacket)
  6124. videoIndex = -1;
  6125. if (Utils::zeroDouble(this->m_rotation) <= 0.0)
  6126. {
  6127. QString rotation;
  6128. entry = av_dict_get(meta, "rotate", nullptr, AV_DICT_MATCH_CASE);
  6129. if (entry)
  6130. rotation = QString::fromLocal8Bit(entry->value);
  6131. this->m_rotation = rotation.toDouble();
  6132. }
  6133. break;
  6134. }
  6135. case AVMEDIA_TYPE_AUDIO:
  6136. {
  6137. AudioStreamInfo info;
  6138. info.index = i;
  6139. info.name = QString("%1, %2 Channels").arg(desc).arg(context->channels);
  6140. this->m_audioStreamInfo.append(info);
  6141. break;
  6142. }
  6143. case AVMEDIA_TYPE_SUBTITLE:
  6144. {
  6145. SubtitleStreamInfo info;
  6146. info.index = i;
  6147. info.name = QString("%1 (%2)").arg(desc).arg(subtitleNum);
  6148. subtitleNum++;
  6149. this->m_subtitleStreamInfo.append(info);
  6150. break;
  6151. }
  6152. case AVMEDIA_TYPE_ATTACHMENT:
  6153. {
  6154. switch (context->codec_id)
  6155. {
  6156. case AV_CODEC_ID_OTF:
  6157. case AV_CODEC_ID_TTF:
  6158. {
  6159. AVDictionaryEntry *fileName = av_dict_get(meta, "filename", nullptr, 0);
  6160. if (fileName && fileName->value)
  6161. this->m_assParser.addFont(fileName->value, context->extradata, context->extradata_size);
  6162. break;
  6163. }
  6164. default:
  6165. {
  6166. break;
  6167. }
  6168. }
  6169. break;
  6170. }
  6171. default:
  6172. {
  6173. break;
  6174. }
  6175. }
  6176. }
  6177. if (this->m_audioStreamInfo.count() > 0)
  6178. {
  6179. if (this->m_lastAudioStream == -1 || this->m_lastAudioStream >= (int)format->nb_streams ||
  6180. format->streams[this->m_lastAudioStream]->codecpar->codec_type != AVMEDIA_TYPE_AUDIO)
  6181. {
  6182. audioIndex = this->m_audioStreamInfo[0].index;
  6183. this->m_lastAudioStream = audioIndex;
  6184. }
  6185. else
  6186. {
  6187. audioIndex = this->m_lastAudioStream;
  6188. }
  6189. }
  6190. if (this->m_subtitleStreamInfo.count() > 0)
  6191. {
  6192. if (this->m_lastSubtitleStream == -1 || this->m_lastSubtitleStream >= (int)format->nb_streams ||
  6193. format->streams[this->m_lastSubtitleStream]->codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE)
  6194. {
  6195. subtitleIndex = this->m_subtitleStreamInfo[0].index;
  6196. this->m_lastSubtitleStream = subtitleIndex;
  6197. }
  6198. else
  6199. {
  6200. subtitleIndex = this->m_lastSubtitleStream;
  6201. }
  6202. }
  6203. if (this->isUseAudioPath())
  6204. {
  6205. int retry = 3;
  6206. bool success = false;
  6207. while (retry --> 0)
  6208. {
  6209. if (this->openAudioStream(&audioIndex))
  6210. {
  6211. success = true;
  6212. break;
  6213. }
  6214. }
  6215. if (!success)
  6216. return false;
  6217. }
  6218. this->m_font.getQFont().setFamily(this->m_fontFamily);
  6219. this->m_font.getQFont().setPixelSize(this->m_fontSize);
  6220. if (this->isRemoteFile() && this->isAudio())
  6221. videoIndex = -1;
  6222. if (videoIndex >= 0)
  6223. {
  6224. if (this->openStreamComponent(videoIndex, false))
  6225. this->computeFrameSize();
  6226. else
  6227. return false;
  6228. }
  6229. if (audioIndex >= 0)
  6230. {
  6231. bool success = this->openStreamComponent(audioIndex, this->isUseAudioPath());
  6232. if (!success)
  6233. {
  6234. audioIndex = -1;
  6235. if (this->isAudio())
  6236. return false;
  6237. }
  6238. }
  6239. if (ms->video.stream.index < 0 && ms->audio.stream.index < 0)
  6240. return false;
  6241. if (subtitleIndex >= 0)
  6242. this->openStreamComponent(subtitleIndex, false);
  6243. if (this->m_assParser.isExist() && !this->m_subtitleFilePath.isEmpty())
  6244. {
  6245. QFileInfo f(this->m_subtitleFilePath);
  6246. if (f.exists())
  6247. this->m_assParser.open(this->m_subtitleFilePath);
  6248. }
  6249. this->m_assParser.setDefaultFont(this->m_assFontFamily);
  6250. ms->externalClock.base = this->getAbsoluteClock();
  6251. ms->syncType = this->getRecommandSyncType();
  6252. bool calDuration = true;
  6253. if (this->isRemoteProtocol())
  6254. {
  6255. char path[MAX_FILEPATH_CHAR_SIZE];
  6256. QString pathPart;
  6257. av_url_split(nullptr, 0, nullptr, 0, nullptr, 0, nullptr, path, sizeof(path), this->m_realFilePath.toUtf8().constData());
  6258. pathPart = QString::fromUtf8(path);
  6259. calDuration = !QFileInfo(pathPart).fileName().isEmpty();
  6260. }
  6261. if (calDuration && this->m_playData.duration == 0.0)
  6262. this->m_playData.duration = format->duration / (double)AV_TIME_BASE;
  6263. if (this->m_playData.duration < 0.0)
  6264. this->m_playData.duration = 0.0;
  6265. if (this->m_playData.userData.isEmpty())
  6266. this->m_detail.fileName = QFileInfo(this->m_realFilePath).fileName();
  6267. else
  6268. this->m_detail.fileName = this->m_title;
  6269. this->m_detail.totalTime = this->getDuration();
  6270. if (this->m_playData.totalFrame <= 0 && videoIndex >= 0)
  6271. {
  6272. double total = av_q2d(format->streams[videoIndex]->avg_frame_rate);
  6273. this->m_playData.totalFrame = this->getDuration() * total;
  6274. }
  6275. this->m_detail.videoTotalFrame = this->m_playData.totalFrame;
  6276. if (videoIndex >= 0)
  6277. this->computeSubtitleSize();
  6278. if (audioIndex >= 0)
  6279. {
  6280. if (this->m_spdif.isOpened())
  6281. {
  6282. if (!this->m_spdif.play())
  6283. return false;
  6284. }
  6285. else
  6286. {
  6287. if (!BASS_ChannelPlay(ms->audio.handle, true))
  6288. return false;
  6289. }
  6290. }
  6291. this->startReadThread();
  6292. this->refreshSchedule(FIRST_REFRESH_DELAY);
  6293. this->m_refreshThread.start();
  6294. return true;
  6295. }
  6296. void MediaPresenter::startReadThread()
  6297. {
  6298. this->m_state->readThreadQuit = false;
  6299. this->m_readThread.start();
  6300. }
  6301. void MediaPresenter::seekStream(double pos, double dir, int flag)
  6302. {
  6303. MediaState *ms = this->m_state;
  6304. Seek &seek = ms->seek;
  6305. if (seek.discard || seek.request || (!seek.readable && !ms->pause.pause))
  6306. return;
  6307. seek.flags = dir < 0.0 ? AVSEEK_FLAG_BACKWARD : 0;
  6308. seek.flags |= flag;
  6309. double dur = this->getDuration();
  6310. if (pos > dur)
  6311. {
  6312. pos = dur;
  6313. seek.flags |= AVSEEK_FLAG_BACKWARD;
  6314. }
  6315. if (IS_BIT_SET(seek.flags, AVSEEK_FLAG_BACKWARD))
  6316. ms->willBeEnd = false;
  6317. seek.time = pos + this->getAudioClockOffset();
  6318. seek.pos = (int64_t)(seek.time * AV_TIME_BASE);
  6319. if (ms->pause.pause)
  6320. {
  6321. if (this->isAudio())
  6322. {
  6323. seek.pauseSeeking = false;
  6324. seek.inc += dir;
  6325. }
  6326. else
  6327. {
  6328. seek.pauseSeeking = true;
  6329. seek.flushed = false;
  6330. }
  6331. }
  6332. bool discard = IS_BIT_SET(seek.flags, AVSEEK_FLAG_ANY);
  6333. if (discard)
  6334. {
  6335. if (ms->video.stream.index != -1)
  6336. {
  6337. if (!this->isAudio())
  6338. ms->video.stream.discard = true;
  6339. ms->video.stream.discardCount = 0;
  6340. }
  6341. if (ms->audio.stream.index != -1)
  6342. {
  6343. ms->audio.stream.discard = true;
  6344. ms->audio.stream.discardCount = 0;
  6345. if (this->m_spdif.isOpened())
  6346. this->m_spdif.pause();
  6347. else
  6348. BASS_Pause();
  6349. }
  6350. if (ms->subtitle.stream.index != -1)
  6351. {
  6352. ms->subtitle.stream.discard = true;
  6353. ms->subtitle.stream.discardCount = 0;
  6354. }
  6355. seek.flags &= ~AVSEEK_FLAG_ANY;
  6356. seek.discardTime = pos;
  6357. seek.readDiscardStartTime = this->getAbsoluteClock();
  6358. }
  6359. seek.request = true;
  6360. if (discard)
  6361. seek.discard = true;
  6362. ms->seek.readable = false;
  6363. ms->subtitle.seekFlags = seek.flags;
  6364. }
  6365. bool MediaPresenter::initNormalizer()
  6366. {
  6367. this->closeNormalizer();
  6368. this->m_audioEffect.damp = BASS_ChannelSetFX(this->m_state->audio.handle, BASS_FX_BFX_DAMP, 4);
  6369. if (!this->m_audioEffect.damp)
  6370. return false;
  6371. this->m_audioEffect.compressor = BASS_ChannelSetFX(this->m_state->audio.handle, BASS_FX_BFX_COMPRESSOR2, 3);
  6372. if (!this->m_audioEffect.compressor)
  6373. {
  6374. this->closeNormalizer();
  6375. return false;
  6376. }
  6377. return true;
  6378. }
  6379. void MediaPresenter::closeNormalizer()
  6380. {
  6381. BASS_ChannelRemoveFX(this->m_state->audio.handle, this->m_audioEffect.damp);
  6382. BASS_ChannelRemoveFX(this->m_state->audio.handle, this->m_audioEffect.compressor);
  6383. this->m_audioEffect.damp = 0;
  6384. this->m_audioEffect.compressor = 0;
  6385. }
  6386. bool MediaPresenter::initEqualizer()
  6387. {
  6388. this->closeEqualizer();
  6389. this->m_audioEffect.eqaulizer = BASS_ChannelSetFX(this->m_state->audio.handle, BASS_FX_BFX_PEAKEQ, 5);
  6390. if (!this->m_audioEffect.eqaulizer)
  6391. return false;
  6392. this->m_audioEffect.preamp = BASS_ChannelSetFX(this->m_state->audio.handle, BASS_FX_BFX_VOLUME, 6);
  6393. if (!this->m_audioEffect.preamp)
  6394. {
  6395. this->closeEqualizer();
  6396. return false;
  6397. }
  6398. if (!this->setPreAmp(this->m_audioEffect.preampValue))
  6399. {
  6400. this->closeEqualizer();
  6401. return false;
  6402. }
  6403. QVector<Equalizer> eqValues = this->m_audioEffect.equalizerValues;
  6404. for (int i = 0; i < eqValues.count(); i++)
  6405. {
  6406. if (!this->setEqualizerGain(i, eqValues[i].gain))
  6407. {
  6408. this->closeEqualizer();
  6409. return false;
  6410. }
  6411. }
  6412. return true;
  6413. }
  6414. void MediaPresenter::closeEqualizer()
  6415. {
  6416. BASS_ChannelRemoveFX(this->m_state->audio.handle, this->m_audioEffect.eqaulizer);
  6417. BASS_ChannelRemoveFX(this->m_state->audio.handle, this->m_audioEffect.preamp);
  6418. this->m_audioEffect.eqaulizer = 0;
  6419. this->m_audioEffect.preamp = 0;
  6420. }
  6421. bool MediaPresenter::initLowerVoice()
  6422. {
  6423. this->closeLowerVoice();
  6424. this->m_audioEffect.lowerVoice = BASS_ChannelSetFX(this->m_state->audio.handle, BASS_FX_BFX_BQF, 1);
  6425. if (!this->m_audioEffect.lowerVoice)
  6426. return false;
  6427. BASS_BFX_BQF param;
  6428. param.lFilter = BASS_BFX_BQF_NOTCH;
  6429. param.fCenter = 531.0f;
  6430. param.fBandwidth = 4.0f;
  6431. param.lChannel = BASS_BFX_CHANALL;
  6432. param.fGain = 0.0f;
  6433. param.fS = 0.0f;
  6434. param.fQ = 0.0f;
  6435. if (BASS_FXSetParameters(this->m_audioEffect.lowerVoice, &param) == FALSE)
  6436. {
  6437. this->closeLowerVoice();
  6438. return false;
  6439. }
  6440. return true;
  6441. }
  6442. void MediaPresenter::closeLowerVoice()
  6443. {
  6444. BASS_ChannelRemoveFX(this->m_state->audio.handle, this->m_audioEffect.lowerVoice);
  6445. this->m_audioEffect.lowerVoice = 0;
  6446. }
  6447. bool MediaPresenter::initHigherVoice()
  6448. {
  6449. this->closeHigherVoice();
  6450. this->m_audioEffect.higherVoice = BASS_ChannelSetFX(this->m_state->audio.handle, BASS_FX_BFX_PEAKEQ, 2);
  6451. if (!this->m_audioEffect.higherVoice)
  6452. return false;
  6453. BASS_BFX_PEAKEQ value;
  6454. value.fBandwidth = this->m_audioEffect.higherVoiceValue.octave;
  6455. value.fCenter = this->m_audioEffect.higherVoiceValue.center;
  6456. value.fGain = this->m_audioEffect.higherVoiceValue.gain;
  6457. value.lBand = 0;
  6458. value.lChannel = BASS_BFX_CHANALL;
  6459. if (BASS_FXSetParameters(this->m_audioEffect.higherVoice, &value) == FALSE)
  6460. {
  6461. this->closeHigherVoice();
  6462. return false;
  6463. }
  6464. return true;
  6465. }
  6466. void MediaPresenter::closeHigherVoice()
  6467. {
  6468. BASS_ChannelRemoveFX(this->m_state->audio.handle, this->m_audioEffect.higherVoice);
  6469. this->m_audioEffect.higherVoice = 0;
  6470. }
  6471. bool MediaPresenter::initLowerMusic()
  6472. {
  6473. this->closeLowerMusic();
  6474. this->m_audioEffect.lowerMusic = BASS_ChannelSetFX(this->m_state->audio.handle, BASS_FX_BFX_BQF, 0);
  6475. if (!this->m_audioEffect.lowerMusic)
  6476. {
  6477. this->closeLowerMusic();
  6478. return false;
  6479. }
  6480. BASS_BFX_BQF param;
  6481. param.lFilter = BASS_BFX_BQF_BANDPASS;
  6482. param.fCenter = 531.0f;
  6483. param.fBandwidth = 4.0f;
  6484. param.lChannel = BASS_BFX_CHANALL;
  6485. param.fGain = 0.0f;
  6486. param.fS = 0.0f;
  6487. param.fQ = 0.0f;
  6488. if (BASS_FXSetParameters(this->m_audioEffect.lowerMusic, &param) == FALSE)
  6489. {
  6490. this->closeLowerMusic();
  6491. return false;
  6492. }
  6493. return true;
  6494. }
  6495. void MediaPresenter::closeLowerMusic()
  6496. {
  6497. BASS_ChannelRemoveFX(this->m_state->audio.handle, this->m_audioEffect.lowerMusic);
  6498. this->m_audioEffect.lowerMusic = 0;
  6499. }
  6500. void MediaPresenter::processSkipRange()
  6501. {
  6502. if (!this->m_state || !this->m_state->seek.readable || this->m_state->seek.discard || !this->hasDuration())
  6503. return;
  6504. for (int i = 0; i < this->m_skipRanges.count(); i++)
  6505. {
  6506. Range &range = this->m_skipRanges[i];
  6507. if (range.end <= range.start)
  6508. {
  6509. if (range.start > 0.0 && range.end > 0.0)
  6510. {
  6511. range.enable = false;
  6512. continue;
  6513. }
  6514. }
  6515. double curTime = this->getCurrentPosition();
  6516. QString startTime;
  6517. QString endTime;
  6518. double destTime = -1.0;
  6519. QString desc;
  6520. if (range.start < 0.0 && curTime < range.end)
  6521. {
  6522. if (!this->m_skipOpening || range.end <= 0.0)
  6523. continue;
  6524. destTime = range.end;
  6525. Utils::getTimeString(range.end, Utils::TIME_HH_MM_SS, &endTime);
  6526. desc = tr("오프닝 스킵 : %1").arg(endTime);
  6527. }
  6528. else if (this->m_playData.duration - range.start <= curTime && range.end < 0.0)
  6529. {
  6530. if (!this->m_skipEnding)
  6531. continue;
  6532. if (this->m_ended.callback)
  6533. this->m_ended.callback(this->m_ended.userData);
  6534. this->pause();
  6535. }
  6536. else if (range.start <= curTime && curTime < range.end)
  6537. {
  6538. if (!range.enable || !this->m_useSkipRange)
  6539. continue;
  6540. destTime = range.end;
  6541. Utils::getTimeString(range.start, Utils::TIME_HH_MM_SS, &startTime);
  6542. Utils::getTimeString(range.end, Utils::TIME_HH_MM_SS, &endTime);
  6543. desc = tr("재생 스킵 : %1 ~ %2").arg(startTime).arg(endTime);
  6544. }
  6545. if (destTime >= 0.0 && destTime < this->getDuration())
  6546. {
  6547. const double offset = destTime > 0.0 ? 0.5 : -0.5;
  6548. QMutexLocker locker(&this->m_controlLocker);
  6549. this->seekStream(destTime + offset, destTime - curTime, AVSEEK_FLAG_ANY | AVSEEK_FLAG_BACKWARD);
  6550. if (!desc.isEmpty())
  6551. this->showOptionDesc(desc);
  6552. break;
  6553. }
  6554. }
  6555. }
  6556. void MediaPresenter::run()
  6557. {
  6558. const int playTime = 100;
  6559. const int detailTime = 1000;
  6560. const int cpuUsageTime = 1000;
  6561. const int showOptionDescTime = OPTION_DESC_TIME;
  6562. QTime playingTimer;
  6563. QTime showOptionDescTimer;
  6564. QTime detailTimer;
  6565. QTime cpuUsageTimer;
  6566. MediaState *ms = this->m_state;
  6567. playingTimer.start();
  6568. showOptionDescTimer.start();
  6569. detailTimer.start();
  6570. cpuUsageTimer.start();
  6571. while (!this->m_forceExit)
  6572. {
  6573. this->m_detail.currentTime = this->getCurrentPosition();
  6574. this->m_detail.timePercentage = (this->m_detail.currentTime / this->m_detail.totalTime) * 100.0;
  6575. if (std::isnan(this->m_detail.timePercentage))
  6576. this->m_detail.timePercentage = 0.0;
  6577. this->m_detail.videoCurrentFrame.fetchAndStoreOrdered((this->m_detail.timePercentage * this->m_detail.videoTotalFrame) / 100.0);
  6578. if (detailTimer.elapsed() >= detailTime)
  6579. {
  6580. float elapsed = detailTimer.elapsed() / 1000.0f;
  6581. detailTimer.restart();
  6582. this->m_detail.videoFPS = this->m_detail.videoFrameCount.fetchAndStoreOrdered(0) / elapsed;
  6583. this->m_detail.videoInputByteRate = this->m_detail.videoInputByteCount.fetchAndStoreOrdered(0) / elapsed;
  6584. this->m_detail.videoOutputByteRate = this->m_detail.videoOutputByteCount.fetchAndStoreOrdered(0) / elapsed;
  6585. this->m_detail.audioInputByteRate = this->m_detail.audioInputByteCount.fetchAndStoreOrdered(0) / elapsed;
  6586. this->m_detail.audioOutputByteRate = this->m_detail.audioOutputByteCount.fetchAndStoreOrdered(0) / elapsed;
  6587. this->m_detail.dtvSignal = true;
  6588. }
  6589. if (cpuUsageTimer.elapsed() >= cpuUsageTime)
  6590. {
  6591. cpuUsageTimer.restart();
  6592. this->m_detail.cpuUsage = this->getCPUUsage();
  6593. }
  6594. if (playingTimer.elapsed() >= playTime)
  6595. {
  6596. if (ms && this->m_readThread.isRunning() && ms->seek.readable)
  6597. {
  6598. if (this->m_playing.callback)
  6599. this->m_playing.callback(this->m_playing.userData);
  6600. }
  6601. playingTimer.restart();
  6602. }
  6603. if (this->m_showOptionDesc)
  6604. {
  6605. this->m_showOptionDesc = false;
  6606. this->m_showingOptionDesc = true;
  6607. showOptionDescTimer.restart();
  6608. }
  6609. if (this->m_showingOptionDesc && showOptionDescTimer.elapsed() > showOptionDescTime)
  6610. {
  6611. this->m_showingOptionDesc = false;
  6612. if (this->m_showAudioOptionDescCallback.callback && this->isAudio())
  6613. this->m_showAudioOptionDescCallback.callback(this->m_showAudioOptionDescCallback.userData, QString(), false);
  6614. }
  6615. if (this->m_repeatRange.enable && this->m_state->seek.readable && !this->m_state->seek.discard)
  6616. {
  6617. if (this->m_repeatRange.end <= this->m_repeatRange.start)
  6618. {
  6619. this->m_repeatRange.enable = false;
  6620. }
  6621. else
  6622. {
  6623. double curTime = this->getCurrentPosition();
  6624. if (curTime > this->m_repeatRange.end)
  6625. {
  6626. QString startTime;
  6627. QString endTime;
  6628. QMutexLocker locker(&this->m_controlLocker);
  6629. Utils::getTimeString(this->getRepeatStart(), Utils::TIME_HH_MM_SS_ZZZ, &startTime);
  6630. Utils::getTimeString(this->getRepeatEnd(), Utils::TIME_HH_MM_SS_ZZZ, &endTime);
  6631. this->seekStream(this->m_repeatRange.start, this->m_repeatRange.start - curTime, AVSEEK_FLAG_ANY);
  6632. this->showOptionDesc(tr("구간 반복 : %1 ~ %2").arg(startTime).arg(endTime));
  6633. }
  6634. }
  6635. }
  6636. if (this->m_spdif.isOpened() && this->m_spdif.getFailCount() > SPDIF_MAX_FAIL_COUNT)
  6637. {
  6638. this->useSPDIF(false);
  6639. this->showOptionDesc(tr("S/PDIF 출력을 지원하지 않은 포맷이므로 PCM 출력으로 전환합니다"));
  6640. }
  6641. this->processSkipRange();
  6642. this->msleep(READ_CONTINUE_DELAY);
  6643. }
  6644. }