/libavcodec/fraps.c

http://github.com/FFmpeg/FFmpeg · C · 352 lines · 249 code · 37 blank · 66 comment · 55 complexity · 0b3911eb236d290f56f4d16f1b9292f2 MD5 · raw file

  1. /*
  2. * Fraps FPS1 decoder
  3. * Copyright (c) 2005 Roine Gustafsson
  4. * Copyright (c) 2006 Konstantin Shishkov
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * Lossless Fraps 'FPS1' decoder
  25. * @author Roine Gustafsson (roine at users sf net)
  26. * @author Konstantin Shishkov
  27. *
  28. * Codec algorithm for version 0 is taken from Transcode <www.transcoding.org>
  29. *
  30. * Version 2 files support by Konstantin Shishkov
  31. */
  32. #include "avcodec.h"
  33. #include "get_bits.h"
  34. #include "huffman.h"
  35. #include "bytestream.h"
  36. #include "bswapdsp.h"
  37. #include "internal.h"
  38. #include "thread.h"
  39. #define FPS_TAG MKTAG('F', 'P', 'S', 'x')
  40. #define VLC_BITS 11
  41. /**
  42. * local variable storage
  43. */
  44. typedef struct FrapsContext {
  45. AVCodecContext *avctx;
  46. BswapDSPContext bdsp;
  47. uint8_t *tmpbuf;
  48. int tmpbuf_size;
  49. } FrapsContext;
  50. /**
  51. * initializes decoder
  52. * @param avctx codec context
  53. * @return 0 on success or negative if fails
  54. */
  55. static av_cold int decode_init(AVCodecContext *avctx)
  56. {
  57. FrapsContext * const s = avctx->priv_data;
  58. s->avctx = avctx;
  59. s->tmpbuf = NULL;
  60. ff_bswapdsp_init(&s->bdsp);
  61. return 0;
  62. }
  63. /**
  64. * Comparator - our nodes should ascend by count
  65. * but with preserved symbol order
  66. */
  67. static int huff_cmp(const void *va, const void *vb)
  68. {
  69. const Node *a = va, *b = vb;
  70. return (a->count - b->count)*256 + a->sym - b->sym;
  71. }
  72. /**
  73. * decode Fraps v2 packed plane
  74. */
  75. static int fraps2_decode_plane(FrapsContext *s, uint8_t *dst, int stride, int w,
  76. int h, const uint8_t *src, int size, int Uoff,
  77. const int step)
  78. {
  79. int i, j, ret;
  80. GetBitContext gb;
  81. VLC vlc;
  82. Node nodes[512];
  83. for (i = 0; i < 256; i++)
  84. nodes[i].count = bytestream_get_le32(&src);
  85. size -= 1024;
  86. if ((ret = ff_huff_build_tree(s->avctx, &vlc, 256, VLC_BITS,
  87. nodes, huff_cmp,
  88. FF_HUFFMAN_FLAG_ZERO_COUNT)) < 0)
  89. return ret;
  90. /* we have built Huffman table and are ready to decode plane */
  91. /* convert bits so they may be used by standard bitreader */
  92. s->bdsp.bswap_buf((uint32_t *) s->tmpbuf,
  93. (const uint32_t *) src, size >> 2);
  94. if ((ret = init_get_bits8(&gb, s->tmpbuf, size)) < 0)
  95. return ret;
  96. for (j = 0; j < h; j++) {
  97. for (i = 0; i < w*step; i += step) {
  98. dst[i] = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
  99. /* lines are stored as deltas between previous lines
  100. * and we need to add 0x80 to the first lines of chroma planes
  101. */
  102. if (j)
  103. dst[i] += dst[i - stride];
  104. else if (Uoff)
  105. dst[i] += 0x80;
  106. if (get_bits_left(&gb) < 0) {
  107. ff_free_vlc(&vlc);
  108. return AVERROR_INVALIDDATA;
  109. }
  110. }
  111. dst += stride;
  112. }
  113. ff_free_vlc(&vlc);
  114. return 0;
  115. }
  116. static int decode_frame(AVCodecContext *avctx,
  117. void *data, int *got_frame,
  118. AVPacket *avpkt)
  119. {
  120. FrapsContext * const s = avctx->priv_data;
  121. const uint8_t *buf = avpkt->data;
  122. int buf_size = avpkt->size;
  123. ThreadFrame frame = { .f = data };
  124. AVFrame * const f = data;
  125. uint32_t header;
  126. unsigned int version,header_size;
  127. unsigned int x, y;
  128. const uint32_t *buf32;
  129. uint32_t *luma1,*luma2,*cb,*cr;
  130. uint32_t offs[4];
  131. int i, j, ret, is_chroma;
  132. const int planes = 3;
  133. int is_pal;
  134. uint8_t *out;
  135. if (buf_size < 4) {
  136. av_log(avctx, AV_LOG_ERROR, "Packet is too short\n");
  137. return AVERROR_INVALIDDATA;
  138. }
  139. header = AV_RL32(buf);
  140. version = header & 0xff;
  141. is_pal = buf[1] == 2 && version == 1;
  142. header_size = (header & (1<<30))? 8 : 4; /* bit 30 means pad to 8 bytes */
  143. if (version > 5) {
  144. avpriv_report_missing_feature(avctx, "Fraps version %u", version);
  145. return AVERROR_PATCHWELCOME;
  146. }
  147. buf += header_size;
  148. if (is_pal) {
  149. unsigned needed_size = avctx->width * avctx->height + 1024;
  150. needed_size += header_size;
  151. if (buf_size != needed_size) {
  152. av_log(avctx, AV_LOG_ERROR,
  153. "Invalid frame length %d (should be %d)\n",
  154. buf_size, needed_size);
  155. return AVERROR_INVALIDDATA;
  156. }
  157. } else if (version < 2) {
  158. unsigned needed_size = avctx->width * avctx->height * 3;
  159. if (version == 0) needed_size /= 2;
  160. needed_size += header_size;
  161. /* bit 31 means same as previous pic */
  162. if (header & (1U<<31)) {
  163. *got_frame = 0;
  164. return buf_size;
  165. }
  166. if (buf_size != needed_size) {
  167. av_log(avctx, AV_LOG_ERROR,
  168. "Invalid frame length %d (should be %d)\n",
  169. buf_size, needed_size);
  170. return AVERROR_INVALIDDATA;
  171. }
  172. } else {
  173. /* skip frame */
  174. if (buf_size == 8) {
  175. *got_frame = 0;
  176. return buf_size;
  177. }
  178. if (AV_RL32(buf) != FPS_TAG || buf_size < planes*1024 + 24) {
  179. av_log(avctx, AV_LOG_ERROR, "error in data stream\n");
  180. return AVERROR_INVALIDDATA;
  181. }
  182. for (i = 0; i < planes; i++) {
  183. offs[i] = AV_RL32(buf + 4 + i * 4);
  184. if (offs[i] >= buf_size - header_size || (i && offs[i] <= offs[i - 1] + 1024)) {
  185. av_log(avctx, AV_LOG_ERROR, "plane %i offset is out of bounds\n", i);
  186. return AVERROR_INVALIDDATA;
  187. }
  188. }
  189. offs[planes] = buf_size - header_size;
  190. for (i = 0; i < planes; i++) {
  191. av_fast_padded_malloc(&s->tmpbuf, &s->tmpbuf_size, offs[i + 1] - offs[i] - 1024);
  192. if (!s->tmpbuf)
  193. return AVERROR(ENOMEM);
  194. }
  195. }
  196. f->pict_type = AV_PICTURE_TYPE_I;
  197. f->key_frame = 1;
  198. avctx->pix_fmt = version & 1 ? is_pal ? AV_PIX_FMT_PAL8 : AV_PIX_FMT_BGR24 : AV_PIX_FMT_YUVJ420P;
  199. avctx->color_range = version & 1 ? AVCOL_RANGE_UNSPECIFIED
  200. : AVCOL_RANGE_JPEG;
  201. avctx->colorspace = version & 1 ? AVCOL_SPC_UNSPECIFIED : AVCOL_SPC_BT709;
  202. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  203. return ret;
  204. switch (version) {
  205. case 0:
  206. default:
  207. /* Fraps v0 is a reordered YUV420 */
  208. if (((avctx->width % 8) != 0) || ((avctx->height % 2) != 0)) {
  209. av_log(avctx, AV_LOG_ERROR, "Invalid frame size %dx%d\n",
  210. avctx->width, avctx->height);
  211. return AVERROR_INVALIDDATA;
  212. }
  213. buf32 = (const uint32_t*)buf;
  214. for (y = 0; y < avctx->height / 2; y++) {
  215. luma1 = (uint32_t*)&f->data[0][ y * 2 * f->linesize[0] ];
  216. luma2 = (uint32_t*)&f->data[0][ (y * 2 + 1) * f->linesize[0] ];
  217. cr = (uint32_t*)&f->data[1][ y * f->linesize[1] ];
  218. cb = (uint32_t*)&f->data[2][ y * f->linesize[2] ];
  219. for (x = 0; x < avctx->width; x += 8) {
  220. *luma1++ = *buf32++;
  221. *luma1++ = *buf32++;
  222. *luma2++ = *buf32++;
  223. *luma2++ = *buf32++;
  224. *cr++ = *buf32++;
  225. *cb++ = *buf32++;
  226. }
  227. }
  228. break;
  229. case 1:
  230. if (is_pal) {
  231. uint32_t *pal = (uint32_t *)f->data[1];
  232. for (y = 0; y < 256; y++) {
  233. pal[y] = AV_RL32(buf) | 0xFF000000;
  234. buf += 4;
  235. }
  236. for (y = 0; y <avctx->height; y++)
  237. memcpy(&f->data[0][y * f->linesize[0]],
  238. &buf[y * avctx->width],
  239. avctx->width);
  240. } else {
  241. /* Fraps v1 is an upside-down BGR24 */
  242. for (y = 0; y<avctx->height; y++)
  243. memcpy(&f->data[0][(avctx->height - y - 1) * f->linesize[0]],
  244. &buf[y * avctx->width * 3],
  245. 3 * avctx->width);
  246. }
  247. break;
  248. case 2:
  249. case 4:
  250. /**
  251. * Fraps v2 is Huffman-coded YUV420 planes
  252. * Fraps v4 is virtually the same
  253. */
  254. for (i = 0; i < planes; i++) {
  255. is_chroma = !!i;
  256. if ((ret = fraps2_decode_plane(s, f->data[i], f->linesize[i],
  257. avctx->width >> is_chroma,
  258. avctx->height >> is_chroma,
  259. buf + offs[i], offs[i + 1] - offs[i],
  260. is_chroma, 1)) < 0) {
  261. av_log(avctx, AV_LOG_ERROR, "Error decoding plane %i\n", i);
  262. return ret;
  263. }
  264. }
  265. break;
  266. case 3:
  267. case 5:
  268. /* Virtually the same as version 4, but is for RGB24 */
  269. for (i = 0; i < planes; i++) {
  270. if ((ret = fraps2_decode_plane(s, f->data[0] + i + (f->linesize[0] * (avctx->height - 1)),
  271. -f->linesize[0], avctx->width, avctx->height,
  272. buf + offs[i], offs[i + 1] - offs[i], 0, 3)) < 0) {
  273. av_log(avctx, AV_LOG_ERROR, "Error decoding plane %i\n", i);
  274. return ret;
  275. }
  276. }
  277. out = f->data[0];
  278. // convert pseudo-YUV into real RGB
  279. for (j = 0; j < avctx->height; j++) {
  280. uint8_t *line_end = out + 3*avctx->width;
  281. while (out < line_end) {
  282. out[0] += out[1];
  283. out[2] += out[1];
  284. out += 3;
  285. }
  286. out += f->linesize[0] - 3*avctx->width;
  287. }
  288. break;
  289. }
  290. *got_frame = 1;
  291. return buf_size;
  292. }
  293. /**
  294. * closes decoder
  295. * @param avctx codec context
  296. * @return 0 on success or negative if fails
  297. */
  298. static av_cold int decode_end(AVCodecContext *avctx)
  299. {
  300. FrapsContext *s = (FrapsContext*)avctx->priv_data;
  301. av_freep(&s->tmpbuf);
  302. return 0;
  303. }
  304. AVCodec ff_fraps_decoder = {
  305. .name = "fraps",
  306. .long_name = NULL_IF_CONFIG_SMALL("Fraps"),
  307. .type = AVMEDIA_TYPE_VIDEO,
  308. .id = AV_CODEC_ID_FRAPS,
  309. .priv_data_size = sizeof(FrapsContext),
  310. .init = decode_init,
  311. .close = decode_end,
  312. .decode = decode_frame,
  313. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  314. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
  315. };