PageRenderTime 61ms CodeModel.GetById 18ms RepoModel.GetById 0ms app.codeStats 1ms

/decoders/image/png.d

http://github.com/wilkie/djehuty
D | 2722 lines | 380 code | 163 blank | 2179 comment | 66 complexity | c1b69102854802f699bc98aacc78bb5e MD5 | raw file
  1. module decoders.image.png;
  2. import graphics.bitmap;
  3. import core.stream;
  4. import core.string;
  5. import core.endian;
  6. import core.definitions;
  7. import decoders.image.decoder;
  8. import decoders.decoder;
  9. import decoders.binary.zlib;
  10. import io.console;
  11. // Section: Codecs/Image
  12. // Description: The PNG Codec
  13. class PNGDecoder : ImageDecoder {
  14. override string name() {
  15. return "Portable Network Graphics";
  16. }
  17. StreamData decode(Stream stream, ref Bitmap view) {
  18. ImageFrameDescription imageDesc;
  19. bool hasMultipleFrames;
  20. Stream streamToDecode;
  21. streamToDecode = new Stream();
  22. uint* ptr_start; //ptr of the first pixel
  23. uint* ptr; //current ptr in image data
  24. uint* ptr_max_line; //ptr of the next line
  25. uint* ptr_max_page; //ptr outside of image bounds
  26. ulong ptr_len;
  27. uint psamp;
  28. uint nsamp;
  29. ubyte curByte, recon;
  30. uint palIndex;
  31. int p;
  32. int pa;
  33. int pb;
  34. int pc;
  35. float a;
  36. for (;;) {
  37. switch(decoderState) {
  38. case PNG_STATE_INIT_PROGRESS:
  39. ptrPos = 0;
  40. ptrLine = 0;
  41. pngUncompressedData = new Stream();
  42. decoderNextState = PNG_STATE_DECODE_READ_FILTER_TYPE;
  43. // READ HEADERS //
  44. case PNG_STATE_INIT:
  45. if(!(stream.read(&pngHeader, 8))) {
  46. return StreamData.Required;
  47. }
  48. // DETERMINE VALIDITY OF FILE //
  49. if (!(pngHeader[0] == 0x89 &&
  50. pngHeader[1] == 0x50 &&
  51. pngHeader[2] == 0x4E &&
  52. pngHeader[3] == 0x47 &&
  53. pngHeader[4] == 0x0D &&
  54. pngHeader[5] == 0x0A &&
  55. pngHeader[6] == 0x1A &&
  56. pngHeader[7] == 0x0a)) {
  57. //Header is corrupt
  58. // Console.putln("header corrupt");
  59. // Console.putln("png - header corrupt\n");
  60. return StreamData.Invalid;
  61. }
  62. pngPaletteCount = 0;
  63. decoderState = PNG_STATE_READ_CHUNK_HEADER;
  64. // READ CHUNK HEADER //
  65. case PNG_STATE_READ_CHUNK_HEADER:
  66. //Console.putln("png - reading chunk\n");
  67. if(!(stream.read(&pngChunkHeader, _djehuty_image_png_chunk_header.sizeof))) {
  68. return StreamData.Required;
  69. }
  70. pngChunkHeader.pngChunkLength = FromBigEndian32(pngChunkHeader.pngChunkLength);
  71. // Console.putln(toString(pngChunkHeader.pngChunkLength) ~ "\n");
  72. switch(pngChunkHeader.pngChunkType) {
  73. case PNG_CHUNK_IHDR:
  74. //////////Console.putln("png - IHDR\n");
  75. decoderState = PNG_STATE_READ_IHDR;
  76. continue;
  77. case PNG_CHUNK_PLTE:
  78. //////////Console.putln("png - PLTE\n");
  79. decoderState = PNG_STATE_READ_PLTE;
  80. continue;
  81. case PNG_CHUNK_IDAT:
  82. //Console.putln("png - IDAT\n");
  83. decoderState = PNG_STATE_READ_IDAT;
  84. continue;
  85. case PNG_CHUNK_IEND:
  86. //Console.putln("png - IEND\n");
  87. // Console.putln("IEND"); //, streamToDecode);
  88. streamToDecode.rewind();
  89. //Console.putln("rewound");
  90. if (zlibDecompressor is null) {
  91. zlibDecompressor = new ZLIBDecoder();
  92. }
  93. if (zlibDecompressor.decode(streamToDecode, pngUncompressedData) == StreamData.Complete) {
  94. //Console.putln("zlib");
  95. //while (pngUncompressedData.getRemaining())
  96. //{
  97. // pngUncompressedData.Readubyte(curByte);
  98. ////////////Console.putln((":") + toString(curByte) + ("\n"));
  99. //}
  100. //pngUncompressedData.rewind();
  101. view.lockBuffer(cast(void**)&ptr_start, ptr_len);
  102. ptr = ptr_start;
  103. ptr += (pngIHDR.pngWidth * ptrLine);
  104. ptr_max_line = ptr + pngIHDR.pngWidth;
  105. ptr_max_page = ptr_start + (ptr_len / 4);
  106. ptr += ptrPos;
  107. //Console.putln("png - reading filter type!!!\n");
  108. decoderState = PNG_STATE_DECODE_READ_FILTER_TYPE;
  109. }
  110. else {
  111. //Console.putln("zlib");
  112. return StreamData.Complete;
  113. }
  114. continue;
  115. default:
  116. // Console.putln("png - unknown chunk\n");
  117. decoderState = PNG_STATE_SKIP_CHUNK;
  118. }
  119. continue;
  120. // SKIPS CHUNK AND CRC INFO
  121. case PNG_STATE_SKIP_CHUNK:
  122. //////////Console.putln(("png - skipping chunk of length: ") + toString(pngChunkHeader.pngChunkLength) + ("\n"));
  123. if(!(stream.skip(pngChunkHeader.pngChunkLength + 4))) {
  124. return StreamData.Required;
  125. }
  126. decoderState = PNG_STATE_READ_CHUNK_HEADER;
  127. continue;
  128. case PNG_STATE_READ_CHUNK_CRC:
  129. //////////Console.putln("png - checking CRC\n");
  130. if(!(stream.read(&pngChunkCRC, 4))) {
  131. return StreamData.Required;
  132. }
  133. // CHECK CRC
  134. // READ ANOTHER CHUNK
  135. decoderState = PNG_STATE_READ_CHUNK_HEADER;
  136. continue;
  137. case PNG_STATE_READ_IHDR:
  138. if(!(stream.read(&pngIHDR, _djehuty_image_png_ihdr.sizeof))) {
  139. return StreamData.Required;
  140. }
  141. pngIHDR.pngWidth = FromBigEndian32(pngIHDR.pngWidth);
  142. pngIHDR.pngHeight = FromBigEndian32(pngIHDR.pngHeight);
  143. pngPaletteCount = 0;
  144. // determine whether png is valid by this header
  145. switch(pngIHDR.pngColorType) {
  146. //Greyscale
  147. case 0:
  148. switch (pngIHDR.pngBitDepth) {
  149. case 1:
  150. case 2:
  151. case 4:
  152. case 8:
  153. case 16:
  154. // Console.putln(("png - greyscale - ") ~ toString(pngIHDR.pngBitDepth) ~ (" bpp\n"));
  155. break;
  156. default:
  157. // Console.putln("png - invalid color, bit depth combination\n");
  158. break;
  159. }
  160. break;
  161. //Truecolour
  162. case 2:
  163. switch (pngIHDR.pngBitDepth)
  164. {
  165. case 8:
  166. case 16:
  167. // Console.putln(("png - truecolour - ") ~ toString(pngIHDR.pngBitDepth) ~ (" bpp\n"));
  168. break;
  169. default:
  170. // Console.putln("png - invalid color, bit depth combination\n");
  171. break;
  172. }
  173. break;
  174. //Indexed-colour
  175. case 3:
  176. switch (pngIHDR.pngBitDepth) {
  177. case 1:
  178. case 2:
  179. case 4:
  180. case 8:
  181. // Console.putln(("png - Indexed-colour - ") ~ toString(pngIHDR.pngBitDepth) ~ (" bpp\n"));
  182. break;
  183. default:
  184. // Console.putln("png - invalid color, bit depth combination\n");
  185. break;
  186. }
  187. break;
  188. //Greyscale with alpha
  189. case 4:
  190. switch (pngIHDR.pngBitDepth) {
  191. case 8:
  192. case 16:
  193. // Console.putln(("png - greyscale with alpha - ") ~ toString(pngIHDR.pngBitDepth) ~ (" bpp\n"));
  194. break;
  195. default:
  196. // Console.putln("png - invalid color, bit depth combination\n");
  197. break;
  198. }
  199. break;
  200. //Truecolour with alpha
  201. case 6:
  202. switch (pngIHDR.pngBitDepth) {
  203. case 8:
  204. case 16:
  205. // Console.putln(("png - truecolour with alpha - ") ~ toString(pngIHDR.pngBitDepth) ~ (" bpp\n"));
  206. break;
  207. default:
  208. // Console.putln("png - invalid color, bit depth combination\n");
  209. break;
  210. }
  211. break;
  212. default:
  213. // Console.putln("png - invalid color type\n");
  214. return StreamData.Invalid;
  215. }
  216. if (pngIHDR.pngFilterMethod != 0) {
  217. //////Console.putln("png - unsupported filter method\n");
  218. return StreamData.Invalid;
  219. }
  220. if (pngIHDR.pngCompressionMethod != 0) {
  221. //////Console.putln("png - unsupported compression method\n");
  222. return StreamData.Invalid;
  223. }
  224. if (pngIHDR.pngInterlaceMethod) {
  225. ////Console.putln("png - Adam7 interlacing\n");
  226. // SET UP INTERLACE PASS DIMENSIONS
  227. // THAT IS, HOW MUCH DATA WILL BE IN EACH PASS, HOW MUCH
  228. // WILL BE IN EACH SCANLINE FOR EACH PASS
  229. pngInterlacePass = 0;
  230. pngInterlaceCurLine = 0;
  231. // EQUATION FOR INTERLACE WIDTH: (width, height refer to dimensions of final image)
  232. // 1st pass: ceiling(width / 8)
  233. // 2nd pass: ceiling((width - 4) / 8)
  234. // 3rd pass: ceiling(width / 4)
  235. // 4th pass: ceiling((width - 2) / 4)
  236. // 5th pass: ceiling(width / 2)
  237. // 6th pass: ceiling((width - 1) / 2)
  238. // 7th pass: width
  239. // EQUATION FOR INTERLACE HEIGHT:
  240. // 1st, 2nd pass: ceiling(height / 8)
  241. // 3rd pass: ceiling((height - 4) / 8)
  242. // 4th pass: ceiling(height / 4)
  243. // 5th pass: ceiling((height - 2) / 4)
  244. // 6th pass: ceiling(height / 2)
  245. // 7th pass: ceiling((height - 1) / 2)
  246. pngInterlaceWidths[0] = cast(uint)(cast(float)pngIHDR.pngWidth / 8);
  247. if (pngIHDR.pngWidth % 8) {
  248. pngInterlaceWidths[0]++;
  249. }
  250. if (pngIHDR.pngWidth <= 4) {
  251. pngInterlaceWidths[1] = 0;
  252. }
  253. else {
  254. pngInterlaceWidths[1] = cast(uint)((cast(float)pngIHDR.pngWidth - 4) / 8);
  255. if ((pngIHDR.pngWidth - 4) % 8) {
  256. pngInterlaceWidths[1]++;
  257. }
  258. }
  259. pngInterlaceWidths[2] = cast(uint)(cast(float)pngIHDR.pngWidth / 4);
  260. if (pngIHDR.pngWidth % 4) {
  261. pngInterlaceWidths[2]++;
  262. }
  263. if (pngIHDR.pngWidth <= 2) {
  264. pngInterlaceWidths[3] = 0;
  265. }
  266. else {
  267. pngInterlaceWidths[3] = cast(uint)((cast(float)pngIHDR.pngWidth - 2) / 4);
  268. if ((pngIHDR.pngWidth - 2) % 4) {
  269. pngInterlaceWidths[3]++;
  270. }
  271. }
  272. pngInterlaceWidths[4] = cast(uint)(cast(float)pngIHDR.pngWidth / 2);
  273. if (pngIHDR.pngWidth % 2) {
  274. pngInterlaceWidths[4]++;
  275. }
  276. if (pngIHDR.pngWidth <= 1) {
  277. pngInterlaceWidths[5] = 0;
  278. }
  279. else {
  280. pngInterlaceWidths[5] = cast(uint)((cast(float)pngIHDR.pngWidth - 1) / 2);
  281. if ((pngIHDR.pngWidth - 1) % 2) {
  282. pngInterlaceWidths[5]++;
  283. }
  284. }
  285. pngInterlaceWidths[6] = pngIHDR.pngWidth;
  286. pngInterlaceHeights[0] = cast(uint)(cast(float)pngIHDR.pngHeight / 8);
  287. if (pngIHDR.pngHeight % 8) {
  288. pngInterlaceHeights[0]++;
  289. }
  290. pngInterlaceHeights[1] = pngInterlaceHeights[0];
  291. if (pngIHDR.pngHeight <= 4) {
  292. pngInterlaceWidths[2] = 0;
  293. pngInterlaceHeights[2] = 0;
  294. }
  295. else {
  296. pngInterlaceHeights[2] = cast(uint)((cast(float)pngIHDR.pngHeight - 4) / 8);
  297. if ((pngIHDR.pngHeight - 4) % 8) {
  298. pngInterlaceHeights[2]++;
  299. }
  300. }
  301. pngInterlaceHeights[3] = cast(uint)(cast(float)pngIHDR.pngHeight / 4);
  302. if (pngIHDR.pngHeight % 4) {
  303. pngInterlaceHeights[3]++;
  304. }
  305. if (pngIHDR.pngHeight <= 2) {
  306. pngInterlaceWidths[4] = 0;
  307. pngInterlaceHeights[4] = 0;
  308. }
  309. else {
  310. pngInterlaceHeights[4] = cast(uint)((cast(float)pngIHDR.pngHeight - 2) / 4);
  311. if ((pngIHDR.pngHeight - 2) % 4) {
  312. pngInterlaceHeights[4]++;
  313. }
  314. }
  315. pngInterlaceHeights[5] = cast(uint)(cast(float)pngIHDR.pngHeight / 2);
  316. if (pngIHDR.pngHeight % 2) {
  317. pngInterlaceHeights[5]++;
  318. }
  319. if (pngIHDR.pngHeight <= 1) {
  320. pngInterlaceWidths[6] = 0;
  321. pngInterlaceHeights[6] = 0;
  322. }
  323. else {
  324. pngInterlaceHeights[6] = cast(uint)((cast(float)pngIHDR.pngHeight - 1) / 2);
  325. if ((pngIHDR.pngHeight - 1) % 2) {
  326. pngInterlaceHeights[6]++;
  327. }
  328. }
  329. for (p=0; p < 7; p++) {
  330. // Console.putln(toString(p) + (": ") + toString(pngInterlaceWidths[p]) + (" x ") + toString(pngInterlaceHeights[p]) + ("\n"));
  331. }
  332. }
  333. else {
  334. // Console.putln("png - no interlacing\n");
  335. }
  336. // calculate quick reference 'image type' //
  337. pngImageType = (((pngIHDR.pngColorType + 1) << 16) + pngIHDR.pngBitDepth);
  338. // set the image renderer state in the decoder process //
  339. if (pngIHDR.pngInterlaceMethod) {
  340. pngRenderState = PNG_STATE_RENDER_INTERLACED_STATE_BASE + pngImageType;
  341. }
  342. else {
  343. pngRenderState = PNG_STATE_RENDER_STATE_BASE + pngImageType;
  344. // Console.putln("eh!", pngRenderState, PNG_STATE_RENDER_STATE_BASE + PNG_TRUECOLOUR_ALPHA_8BPP);
  345. }
  346. //Console.putln(("png - ") + toString(pngIHDR.pngWidth) + (" x ") + toString(pngIHDR.pngHeight) + ("\n"));
  347. // printf("type: %d\n", pngIHDR.pngWidth);
  348. view.create(pngIHDR.pngWidth, pngIHDR.pngHeight);
  349. // CALCULATE THE NUMBER OF BYTES WE WILL BE READING
  350. switch(pngIHDR.pngColorType) {
  351. case 3: // INDEXED_COLOUR (also 1 sample per pixel, samples are indices)
  352. case 0: // GREYSCALE (1 sample per pixel)
  353. switch(pngIHDR.pngBitDepth) {
  354. case 1:
  355. pngExpectedBytes = 1 + cast(uint)((cast(float)pngIHDR.pngWidth / 8) + 0.5);
  356. pngNumSamples = 1;
  357. break;
  358. case 2:
  359. pngExpectedBytes = 1 + cast(uint)((cast(float)pngIHDR.pngWidth / 4) + 0.5);
  360. pngNumSamples = 1;
  361. break;
  362. case 4:
  363. pngExpectedBytes = 1 + cast(uint)((cast(float)pngIHDR.pngWidth / 2) + 0.5);
  364. pngNumSamples = 1;
  365. break;
  366. case 8:
  367. pngExpectedBytes = pngIHDR.pngWidth;
  368. pngNumSamples = 1;
  369. break;
  370. case 16:
  371. pngExpectedBytes = pngIHDR.pngWidth * 2;
  372. pngNumSamples = 2;
  373. break;
  374. default: break;
  375. }
  376. break;
  377. case 2: // TRUE_COLOUR
  378. switch(pngIHDR.pngBitDepth) {
  379. case 8:
  380. pngExpectedBytes = pngIHDR.pngWidth * 3;
  381. pngNumSamples = 3;
  382. break;
  383. case 16:
  384. pngExpectedBytes = pngIHDR.pngWidth * (3 * 2);
  385. pngNumSamples = 6;
  386. break;
  387. default: break;
  388. }
  389. break;
  390. case 4: // GREYSCALE_ALPHA
  391. switch(pngIHDR.pngBitDepth) {
  392. case 8:
  393. pngExpectedBytes = pngIHDR.pngWidth * 2;
  394. pngNumSamples = 2;
  395. break;
  396. case 16:
  397. pngExpectedBytes = pngIHDR.pngWidth * (2 * 2);
  398. pngNumSamples = 4;
  399. break;
  400. default: break;
  401. }
  402. break;
  403. case 6: // TRUE_COLOUR_ALPHA
  404. switch(pngIHDR.pngBitDepth) {
  405. case 8:
  406. pngExpectedBytes = pngIHDR.pngWidth * 4;
  407. pngNumSamples = 4;
  408. break;
  409. case 16:
  410. pngExpectedBytes = pngIHDR.pngWidth * (4 * 2);
  411. pngNumSamples = 8;
  412. break;
  413. default: break;
  414. }
  415. break;
  416. default: break;
  417. }
  418. // INIT DECODER DATA
  419. for ( pngCounter = 0; pngCounter < 8; pngCounter++) {
  420. ////Console.putln(("png - ") + toString(pngIHDR.pngWidth) + (" x ") + toString(pngIHDR.pngHeight) + ("\n"));
  421. pngBytes[pngCounter] = new ubyte[pngExpectedBytes];
  422. pngBytes[pngCounter][0..pngExpectedBytes] = 0;
  423. }
  424. nsamp = 0;
  425. psamp = 0;
  426. pngCounter = -1;
  427. decoderState = PNG_STATE_READ_CHUNK_CRC;
  428. continue;
  429. case PNG_STATE_READ_PLTE:
  430. // GET NUMBER OF PALETTE ENTRIES
  431. // LOOK AT CHUNK DATA LENGTH, DIVIDE BY THREE
  432. // IF THERE IS A REMAINDER, THIS PNG IS INVALID
  433. pngPaletteCount = (pngChunkHeader.pngChunkLength % 3);
  434. if (pngPaletteCount) {
  435. //////////Console.putln("png - PLTE - invalid palette chunk\n");
  436. }
  437. pngPaletteCount = pngChunkHeader.pngChunkLength / 3;
  438. if (pngPaletteCount > 256) {
  439. //////////Console.putln("png - PLTE - too many entries in palette\n");
  440. return StreamData.Invalid;
  441. }
  442. if (pngPaletteCount == 0) {
  443. //////////Console.putln("png - PLTE - empty palette, proceeding anyway\n");
  444. decoderState = PNG_STATE_READ_CHUNK_CRC;
  445. continue;
  446. }
  447. case PNG_STATE_READ_PLTE_ENTRIES:
  448. if(!(stream.read(&pngPalette, pngPaletteCount*3))) {
  449. return StreamData.Required;
  450. }
  451. // build pngPaletteRealized //
  452. for (palIndex = 0; palIndex < pngPaletteCount; palIndex++) {
  453. pngPaletteRealized[palIndex] = (0xFF000000) | (pngPalette[palIndex].blue) | (pngPalette[palIndex].green << 8) | (pngPalette[palIndex].red << 16);
  454. }
  455. for ( ; palIndex < 256; palIndex++) {
  456. pngPaletteRealized[palIndex] = 0;
  457. }
  458. decoderState = PNG_STATE_READ_CHUNK_CRC;
  459. continue;
  460. case PNG_STATE_READ_IDAT:
  461. // GET THE CONTENTS OF THE CHUNK DATA
  462. //////////Console.putln(toString(pngChunkHeader.pngChunkLength) + ("chunk header\n"));
  463. //streamToDecode.clear();
  464. decoderState = PNG_STATE_FILL_IDAT;
  465. case PNG_STATE_FILL_IDAT:
  466. // Console.putln("fill idat");
  467. // if(!(stream.read(streamToDecode, pngChunkHeader.pngChunkLength)))
  468. if(!(streamToDecode.append(stream, pngChunkHeader.pngChunkLength))) {
  469. return StreamData.Required;
  470. }
  471. // Console.putln("fill idat");
  472. ubyte b;
  473. //Console.putln("pos: ", streamToDecode.getPosition());
  474. //////////Console.putln(toString(streamToDecode.getLength()) + ("\n"));
  475. //streamToDecode.rewind();
  476. //////////Console.putln(toString(byte) + ("oo\n"));
  477. //if (stream.PushRestriction(stream.getPosition(), pngChunkHeader.pngChunkLength))
  478. //{
  479. //////////Console.putln(toString(stream.getLength()) + ("!!!!\n"));
  480. //}
  481. //////////Console.putln(toString(stream.getLength()) + ("oo\n"));
  482. /*
  483. if (zlibCodec.decode(streamToDecode, pngUncompressedData, pngCompressionProgress) == StreamData.Complete)
  484. {
  485. //while (pngUncompressedData.getRemaining())
  486. //{
  487. // pngUncompressedData.Readubyte(curByte);
  488. ////////////Console.putln((":") + toString(curByte) + ("\n"));
  489. //}
  490. //pngUncompressedData.rewind();
  491. view.lockBuffer((void**)&ptr_start, ptr_len);
  492. ptr = ptr_start;
  493. ptr += (pngIHDR.pngWidth * ptrLine);
  494. ptr_max_line = ptr + pngIHDR.pngWidth;
  495. ptr_max_page = ptr_start + (ptr_len / 4);
  496. ptr += ptrPos;
  497. //if (decoderNextState == PNG_STATE_DECODE_READ_FILTER_TYPE)
  498. //{
  499. //////////Console.putln("png - reading filter type\n");
  500. //}
  501. //else
  502. //{
  503. //////////Console.putln("png - returning to scanline render\n");
  504. //}
  505. decoderState = decoderNextState;
  506. }
  507. else
  508. {
  509. decoderState = PNG_STATE_READ_CHUNK_CRC;
  510. }//*/ decoderState = PNG_STATE_READ_CHUNK_CRC;
  511. //stream.PopRestriction();
  512. //////////Console.putln(toString(stream.getLength()) + ("xx\n"));
  513. //////////Console.putln(toString(pngUncompressedData.getRemaining()) + ("\n") + toString(pngChunkHeader.pngChunkLength) + ("\n"));
  514. //stream.skip(pngChunkHeader.pngChunkLength);
  515. continue;
  516. case PNG_STATE_DECODE_READ_FILTER_TYPE:
  517. //Console.putln("read filter type");
  518. // pngUncompressedData.rewind();
  519. if (!(pngUncompressedData.read(&pngFilterType, 1))) {
  520. // need more compress data from IDAT blocks
  521. //////////Console.putln("IDAT empty\n");
  522. decoderState = PNG_STATE_READ_CHUNK_HEADER;
  523. continue;
  524. }
  525. //Console.putln("done filter type");
  526. switch (pngFilterType) {
  527. case 0:
  528. // Console.putln("\npng - filter type - none\n");
  529. decoderState = PNG_STATE_UNFILTER_NONE;
  530. decoderSubState = PNG_STATE_UNFILTER_NONE;
  531. break;
  532. case 1:
  533. // Console.putln("\npng - filter type - Sub\n");
  534. decoderState = PNG_STATE_UNFILTER_SUB;
  535. decoderSubState = PNG_STATE_UNFILTER_SUB;
  536. break;
  537. case 2:
  538. // Console.putln("\npng - filter type - Up\n");
  539. decoderState = PNG_STATE_UNFILTER_UP;
  540. decoderSubState = PNG_STATE_UNFILTER_UP;
  541. break;
  542. case 3:
  543. // Console.putln("\npng - filter type - Average\n");
  544. decoderState = PNG_STATE_UNFILTER_AVERAGE;
  545. decoderSubState = PNG_STATE_UNFILTER_AVERAGE;
  546. break;
  547. case 4:
  548. // Console.putln("\npng - filter type - Paeth\n");
  549. decoderState = PNG_STATE_UNFILTER_PAETH;
  550. decoderSubState = PNG_STATE_UNFILTER_PAETH;
  551. break;
  552. default:
  553. // Console.putln(("\npng - invalid filter type") ~ toString(pngFilterType) ~ ("\n"));
  554. view.unlockBuffer();
  555. return StreamData.Invalid;
  556. }
  557. // set the filter state in the decoder process, should we be interrupted
  558. pngFilterState = decoderState;
  559. for (p = 0; p < pngNumSamples; p++) {
  560. pngPriorPixel[p] = 0;
  561. pngPriorScannedByte[p] = 0;
  562. }
  563. nsamp = 0;
  564. psamp = 0;
  565. pngCounter = -1;
  566. continue;
  567. // FILTER STATES //
  568. case PNG_STATE_UNFILTER_NONE:
  569. // check for decoder termination for the current scanline //
  570. if (ptrPos >= pngIHDR.pngWidth) {
  571. // WE ARE DONE
  572. if (pngIHDR.pngInterlaceMethod) {
  573. pngInterlaceCurLine++;
  574. if (pngInterlaceCurLine == pngInterlaceHeights[pngInterlacePass]) {
  575. pngInterlaceCurLine = 0;
  576. // we are entering a new interlace pass
  577. //////Console.putln("png - interlaced - entering new interlace pass\n");
  578. // reset the prior scanline array
  579. for ( pngCounter = 0; pngCounter < 8; pngCounter++) {
  580. pngBytes[pngCounter][0..pngExpectedBytes] = 0;
  581. }
  582. do {
  583. pngInterlacePass++;
  584. } while ((pngInterlacePass < 7) && (pngInterlaceWidths[pngInterlacePass] == 0));
  585. if (pngInterlacePass >= 7) {
  586. // We are done decoding
  587. view.unlockBuffer();
  588. return StreamData.Complete;
  589. }
  590. ptrLine = pngInterlaceStartsY[pngInterlacePass];
  591. }
  592. else {
  593. ptrLine+=pngInterlaceIncrementsY[pngInterlacePass];
  594. }
  595. ptrPos = pngInterlaceStartsX[pngInterlacePass];
  596. ptr = ptr_start + ((pngIHDR.pngWidth * ptrLine) + ptrPos);
  597. }
  598. else {
  599. ptrPos = 0;
  600. ptrLine++;
  601. }
  602. pngCounter = -1;
  603. decoderState = PNG_STATE_DECODE_READ_FILTER_TYPE;
  604. if (ptrLine >= pngIHDR.pngHeight) {
  605. // Console.putln("done?\n");
  606. view.unlockBuffer();
  607. // Console.putln("done!\n");
  608. return StreamData.Complete;
  609. }
  610. continue;
  611. }
  612. // READ IN DECODED BYTE
  613. if (!(pngUncompressedData.read(&curByte, 1))) {
  614. decoderNextState = decoderState;
  615. //////////Console.putln("png - requiring more data in IDAT\n");
  616. view.unlockBuffer();
  617. decoderState = PNG_STATE_READ_CHUNK_CRC;
  618. continue;
  619. }
  620. // UNFILTER
  621. pngCounter++;
  622. pngCurComponent[psamp] = curByte;
  623. pngBytes[psamp][nsamp] = curByte;
  624. // Console.put(pngCurComponent[psamp], " ");
  625. psamp++;
  626. if (psamp == pngNumSamples) {
  627. nsamp++;
  628. psamp = 0;
  629. // Console.putln("renderstate");
  630. decoderState = pngRenderState;
  631. }
  632. // go to the next state
  633. continue;
  634. // SUB FILTER //
  635. case PNG_STATE_UNFILTER_SUB:
  636. // check for decoder termination for the current scanline //
  637. if (ptrPos >= pngIHDR.pngWidth) {
  638. // WE ARE DONE
  639. if (pngIHDR.pngInterlaceMethod) {
  640. pngInterlaceCurLine++;
  641. if (pngInterlaceCurLine == pngInterlaceHeights[pngInterlacePass]) {
  642. pngInterlaceCurLine = 0;
  643. // we are entering a new interlace pass
  644. //////Console.putln("png - interlaced - entering new interlace pass\n");
  645. // reset the prior scanline array
  646. for ( pngCounter = 0; pngCounter < 8; pngCounter++) {
  647. pngBytes[pngCounter][0..pngExpectedBytes] = 0;
  648. }
  649. pngInterlacePass++;
  650. if (pngInterlacePass == 7) {
  651. // We are done decoding
  652. view.unlockBuffer();
  653. return StreamData.Complete;
  654. }
  655. ptrLine = pngInterlaceStartsY[pngInterlacePass];
  656. }
  657. else {
  658. ptrLine+=pngInterlaceIncrementsY[pngInterlacePass];
  659. }
  660. ptrPos = pngInterlaceStartsX[pngInterlacePass];
  661. ptr = ptr_start + ((pngIHDR.pngWidth * ptrLine) + ptrPos);
  662. }
  663. else {
  664. ptrPos = 0;
  665. ptrLine++;
  666. }
  667. pngCounter = -1;
  668. decoderState = PNG_STATE_DECODE_READ_FILTER_TYPE;
  669. if (ptrLine >= pngIHDR.pngHeight) {
  670. view.unlockBuffer();
  671. return StreamData.Complete;
  672. }
  673. continue;
  674. }
  675. // decode a scanline using SUB filter
  676. if (!(pngUncompressedData.read(&curByte, 1))) {
  677. decoderNextState = decoderState;
  678. //////////Console.putln("png - requiring more data in IDAT\n");
  679. view.unlockBuffer();
  680. decoderState = PNG_STATE_READ_CHUNK_CRC;
  681. continue;
  682. }
  683. pngCounter++;
  684. pngPriorPixel[psamp] += curByte;
  685. pngCurComponent[psamp] = pngPriorPixel[psamp];
  686. pngBytes[psamp][nsamp] = cast(ubyte)pngCurComponent[psamp];
  687. // Console.put(pngCurComponent[psamp], " ");
  688. psamp++;
  689. if (psamp == pngNumSamples) {
  690. nsamp++;
  691. psamp = 0;
  692. // Console.putln("renderstate");
  693. decoderState = pngRenderState;
  694. }
  695. // go to the next state
  696. continue;
  697. case PNG_STATE_UNFILTER_UP:
  698. // check for decoder termination for the current scanline //
  699. if (ptrPos >= pngIHDR.pngWidth) {
  700. // WE ARE DONE
  701. if (pngIHDR.pngInterlaceMethod) {
  702. pngInterlaceCurLine++;
  703. if (pngInterlaceCurLine == pngInterlaceHeights[pngInterlacePass]) {
  704. pngInterlaceCurLine = 0;
  705. // we are entering a new interlace pass
  706. //////Console.putln("png - interlaced - entering new interlace pass\n");
  707. // reset the prior scanline array
  708. for ( pngCounter = 0; pngCounter < 8; pngCounter++) {
  709. pngBytes[pngCounter][0..pngExpectedBytes] = 0;
  710. }
  711. pngInterlacePass++;
  712. if (pngInterlacePass == 7) {
  713. // We are done decoding
  714. view.unlockBuffer();
  715. return StreamData.Complete;
  716. }
  717. ptrLine = pngInterlaceStartsY[pngInterlacePass];
  718. }
  719. else {
  720. ptrLine+=pngInterlaceIncrementsY[pngInterlacePass];
  721. }
  722. ptrPos = pngInterlaceStartsX[pngInterlacePass];
  723. ptr = ptr_start + ((pngIHDR.pngWidth * ptrLine) + ptrPos);
  724. }
  725. else {
  726. ptrPos = 0;
  727. ptrLine++;
  728. }
  729. pngCounter = -1;
  730. decoderState = PNG_STATE_DECODE_READ_FILTER_TYPE;
  731. if (ptrLine >= pngIHDR.pngHeight) {
  732. view.unlockBuffer();
  733. return StreamData.Complete;
  734. }
  735. continue;
  736. }
  737. // decode a scanline using UP filter
  738. if (!(pngUncompressedData.read(&curByte, 1))) {
  739. decoderNextState = decoderState;
  740. //////////Console.putln("png - requiring more data in IDAT\n");
  741. view.unlockBuffer();
  742. decoderState = PNG_STATE_READ_CHUNK_CRC;
  743. continue;
  744. }
  745. pngCounter++;
  746. // Console.put(pngBytes[psamp][nsamp], "+", curByte, "=");
  747. pngBytes[psamp][nsamp] += curByte;
  748. // Console.put(pngBytes[psamp][nsamp], " ");
  749. pngCurComponent[psamp] = pngBytes[psamp][nsamp];
  750. psamp++;
  751. if (psamp == pngNumSamples) {
  752. nsamp++;
  753. psamp = 0;
  754. // Console.putln("renderstate");
  755. decoderState = pngRenderState;
  756. }
  757. // go to the next state
  758. continue;
  759. case PNG_STATE_UNFILTER_AVERAGE:
  760. // check for decoder termination for the current scanline //
  761. if (ptrPos >= pngIHDR.pngWidth) {
  762. // WE ARE DONE
  763. if (pngIHDR.pngInterlaceMethod) {
  764. pngInterlaceCurLine++;
  765. if (pngInterlaceCurLine == pngInterlaceHeights[pngInterlacePass]) {
  766. pngInterlaceCurLine = 0;
  767. // we are entering a new interlace pass
  768. //////Console.putln("png - interlaced - entering new interlace pass\n");
  769. // reset the prior scanline array
  770. for ( pngCounter = 0; pngCounter < 8; pngCounter++) {
  771. pngBytes[pngCounter][0..pngExpectedBytes] = 0;
  772. }
  773. pngInterlacePass++;
  774. if (pngInterlacePass == 7) {
  775. // We are done decoding
  776. view.unlockBuffer();
  777. return StreamData.Complete;
  778. }
  779. ptrLine = pngInterlaceStartsY[pngInterlacePass];
  780. }
  781. else {
  782. ptrLine+=pngInterlaceIncrementsY[pngInterlacePass];
  783. }
  784. ptrPos = pngInterlaceStartsX[pngInterlacePass];
  785. ptr = ptr_start + ((pngIHDR.pngWidth * ptrLine) + ptrPos);
  786. }
  787. else {
  788. ptrPos = 0;
  789. ptrLine++;
  790. }
  791. pngCounter = -1;
  792. decoderState = PNG_STATE_DECODE_READ_FILTER_TYPE;
  793. if (ptrLine >= pngIHDR.pngHeight) {
  794. view.unlockBuffer();
  795. return StreamData.Complete;
  796. }
  797. continue;
  798. }
  799. // decode a scanline using AVERAGE filter
  800. if (!(pngUncompressedData.read(&curByte, 1))) {
  801. decoderNextState = decoderState;
  802. //////////Console.putln("png - requiring more data in IDAT\n");
  803. view.unlockBuffer();
  804. decoderState = PNG_STATE_READ_CHUNK_CRC;
  805. continue;
  806. }
  807. pngCounter++;
  808. pngCurComponent[psamp] = ((cast(uint)pngPriorPixel[psamp] + cast(uint)pngBytes[psamp][nsamp]) / 2);
  809. pngCurComponent[psamp] += curByte;
  810. pngBytes[psamp][nsamp] = cast(ubyte)pngCurComponent[psamp];
  811. pngCurComponent[psamp] = pngBytes[psamp][nsamp];
  812. pngPriorPixel[psamp] = cast(ubyte)pngCurComponent[psamp];
  813. // Console.put(pngCurComponent[psamp], " ");
  814. psamp++;
  815. if (psamp == pngNumSamples) {
  816. nsamp++;
  817. psamp = 0;
  818. // Console.putln("renderstate");
  819. decoderState = pngRenderState;
  820. }
  821. // go to the next state
  822. continue;
  823. case PNG_STATE_UNFILTER_PAETH:
  824. // UNFILTER A SCANLINE
  825. // READ IN DECODED BYTE
  826. if (ptrPos >= pngIHDR.pngWidth) {
  827. // WE ARE DONE
  828. if (pngIHDR.pngInterlaceMethod) {
  829. pngInterlaceCurLine++;
  830. if (pngInterlaceCurLine == pngInterlaceHeights[pngInterlacePass]) {
  831. // we are entering a new interlace pass
  832. pngInterlaceCurLine = 0;
  833. // reset the prior scanline array
  834. for ( pngCounter = 0; pngCounter < 8; pngCounter++) {
  835. pngBytes[pngCounter][0..pngExpectedBytes] = 0;
  836. }
  837. pngInterlacePass++;
  838. if (pngInterlacePass == 7) {
  839. // We are done decoding
  840. view.unlockBuffer();
  841. return StreamData.Complete;
  842. }
  843. ptrLine = pngInterlaceStartsY[pngInterlacePass];
  844. }
  845. else {
  846. ptrLine+=pngInterlaceIncrementsY[pngInterlacePass];
  847. }
  848. ptrPos = pngInterlaceStartsX[pngInterlacePass];
  849. ptr = ptr_start + ((pngIHDR.pngWidth * ptrLine) + ptrPos);
  850. }
  851. else {
  852. ptrPos = 0;
  853. ptrLine++;
  854. }
  855. pngCounter = -1;
  856. decoderState = PNG_STATE_DECODE_READ_FILTER_TYPE;
  857. if (ptrLine >= pngIHDR.pngHeight) {
  858. view.unlockBuffer();
  859. return StreamData.Complete;
  860. }
  861. continue;
  862. }
  863. if (!(pngUncompressedData.read(&curByte, 1))) {
  864. decoderNextState = decoderState;
  865. //////////Console.putln("png - requiring more data in IDAT\n");
  866. view.unlockBuffer();
  867. decoderState = PNG_STATE_READ_CHUNK_CRC;
  868. continue;
  869. }
  870. // UNFILTER
  871. pngCounter++;
  872. pngPriorScannedComp = pngPriorScannedByte[psamp];
  873. pngPriorScannedByte[psamp] = pngBytes[psamp][nsamp];
  874. p = cast(int)pngPriorPixel[psamp] + cast(int)pngPriorScannedByte[psamp] - cast(int)pngPriorScannedComp;
  875. if (p > cast(int)pngPriorPixel[psamp]) {
  876. pa = p - cast(int)pngPriorPixel[psamp];
  877. }
  878. else {
  879. pa = cast(int)pngPriorPixel[psamp] - p;
  880. }
  881. if (p > cast(int)pngPriorScannedByte[psamp]) {
  882. pb = p - cast(int)pngPriorScannedByte[psamp];
  883. }
  884. else {
  885. pb = cast(int)pngPriorScannedByte[psamp] - p;
  886. }
  887. if (p > cast(int)pngPriorScannedComp) {
  888. pc = p - cast(int)pngPriorScannedComp;
  889. }
  890. else {
  891. pc = cast(int)pngPriorScannedComp - p;
  892. }
  893. if (pa <= pb && pa <= pc) {
  894. pngPaethPredictor = pngPriorPixel[psamp];
  895. }
  896. else if (pb <= pc) {
  897. pngPaethPredictor = pngPriorScannedByte[psamp];
  898. }
  899. else {
  900. pngPaethPredictor = pngPriorScannedComp;
  901. }
  902. recon = cast(ubyte)(curByte + pngPaethPredictor);
  903. ////////////Console.putln(("pr: ") + toString(pngPaethPredictor) + (" f(x): ") + toString(recon) + ("\n"));
  904. pngPriorPixel[psamp] = recon;
  905. pngCurComponent[psamp] = recon;
  906. pngBytes[psamp][nsamp] = cast(ubyte)pngCurComponent[psamp];
  907. // Console.put(pngCurComponent[psamp], " ");
  908. ////////////Console.putln(toString(curByte) + (" --> "));
  909. ////////////Console.putln(toString(pngCurComponent[psamp]) + (" a:") + toString(a) + (" b:") + toString(b) + (" c:") + toString(c) + (" pa:") + toString(pa) + (" pb:") + toString(pb) + (" pc:") + toString(pc) + (" p:") + toString(pngPaethPredictor) + ("\n"));
  910. ////////////Console.putln(("result->") + toString(pngCurComponent[psamp]) + ("\n"));
  911. psamp++;
  912. if (psamp == pngNumSamples)
  913. {
  914. nsamp++;
  915. psamp = 0;
  916. // Console.putln("renderstate");
  917. decoderState = pngRenderState;
  918. }
  919. // go to the next state
  920. continue;
  921. // RENDER STATES //
  922. case PNG_STATE_RENDER_STATE_BASE + PNG_GREYSCALE_1BPP:
  923. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  924. decoderState = pngFilterState;
  925. // WE WILL ADD 8 PIXELS, IF WE CAN
  926. pngCurSample = png1BPP[pngCurComponent[0] >> 7];
  927. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  928. ptr++;
  929. ptrPos++;
  930. if (ptrPos == pngIHDR.pngWidth) {
  931. continue;
  932. }
  933. pngCurSample = png1BPP[(pngCurComponent[0] >> 6) & 1];
  934. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  935. ptr++;
  936. ptrPos++;
  937. if (ptrPos == pngIHDR.pngWidth) {
  938. continue;
  939. }
  940. pngCurSample = png1BPP[(pngCurComponent[0] >> 5) & 1];
  941. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  942. ptr++;
  943. ptrPos++;
  944. if (ptrPos == pngIHDR.pngWidth) {
  945. continue;
  946. }
  947. pngCurSample= png1BPP[(pngCurComponent[0] >> 4) & 1];
  948. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  949. ptr++;
  950. ptrPos++;
  951. if (ptrPos == pngIHDR.pngWidth) {
  952. continue;
  953. }
  954. pngCurSample = png1BPP[(pngCurComponent[0] >> 3) & 1];
  955. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  956. ptr++;
  957. ptrPos++;
  958. if (ptrPos == pngIHDR.pngWidth) {
  959. continue;
  960. }
  961. pngCurSample = png1BPP[(pngCurComponent[0] >> 2) & 1];
  962. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  963. ptr++;
  964. ptrPos++;
  965. if (ptrPos == pngIHDR.pngWidth) {
  966. continue;
  967. }
  968. pngCurSample = png1BPP[(pngCurComponent[0] >> 1) & 1];
  969. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  970. ptr++;
  971. ptrPos++;
  972. if (ptrPos == pngIHDR.pngWidth) {
  973. continue;
  974. }
  975. pngCurSample = png1BPP[pngCurComponent[0] & 1];
  976. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  977. ptr++;
  978. ptrPos++;
  979. continue;
  980. case PNG_STATE_RENDER_STATE_BASE + PNG_GREYSCALE_2BPP:
  981. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  982. decoderState = pngFilterState;
  983. // WE WILL ADD 4 PIXELS, IF WE CAN
  984. pngCurSample = png2BPP[pngCurComponent[0] >> 6];
  985. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  986. ptr++;
  987. ptrPos++;
  988. if (ptrPos == pngIHDR.pngWidth) {
  989. continue;
  990. }
  991. pngCurSample = png2BPP[(pngCurComponent[0] >> 4) & 0x3];
  992. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  993. ptr++;
  994. ptrPos++;
  995. if (ptrPos == pngIHDR.pngWidth) {
  996. continue;
  997. }
  998. pngCurSample = png2BPP[(pngCurComponent[0] >> 2) & 0x3];
  999. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1000. ptr++;
  1001. ptrPos++;
  1002. if (ptrPos == pngIHDR.pngWidth) {
  1003. continue;
  1004. }
  1005. pngCurSample = png2BPP[pngCurComponent[0] & 0x3];
  1006. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1007. ptr++;
  1008. ptrPos++;
  1009. continue;
  1010. case PNG_STATE_RENDER_STATE_BASE + PNG_GREYSCALE_4BPP:
  1011. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1012. decoderState = pngFilterState;
  1013. // WE WILL ADD 2 PIXELS, IF WE CAN
  1014. pngCurSample = png4BPP[pngCurComponent[0] >> 4];
  1015. ptr[0] = 0xFF000000 | (pngCurSample) | (pngCurSample << 8) | (pngCurSample << 16);
  1016. ptr++;
  1017. ptrPos++;
  1018. if (ptrPos == pngIHDR.pngWidth) {
  1019. continue;
  1020. }
  1021. pngCurSample = png4BPP[pngCurComponent[0] & 0xF];
  1022. ptr[0] = 0xFF000000 | (pngCurSample) | (pngCurSample << 8) | (pngCurSample << 16);
  1023. ptr++;
  1024. ptrPos++;
  1025. continue;
  1026. case PNG_STATE_RENDER_STATE_BASE + PNG_GREYSCALE_8BPP:
  1027. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1028. decoderState = pngFilterState;
  1029. // JUST ADD THE PIXEL
  1030. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0]);
  1031. ptr++;
  1032. ptrPos++;
  1033. continue;
  1034. case PNG_STATE_RENDER_STATE_BASE + PNG_GREYSCALE_16BPP:
  1035. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1036. decoderState = pngFilterState;
  1037. // JUST ADD THE PIXEL
  1038. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1039. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0]);
  1040. ptr++;
  1041. ptrPos++;
  1042. continue;
  1043. case PNG_STATE_RENDER_STATE_BASE + PNG_TRUECOLOUR_8BPP:
  1044. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1045. decoderState = pngFilterState;
  1046. // JUST ADD THE PIXEL
  1047. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[1], pngCurComponent[2]);
  1048. ptr++;
  1049. ptrPos++;
  1050. continue;
  1051. case PNG_STATE_RENDER_STATE_BASE + PNG_TRUECOLOUR_16BPP:
  1052. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1053. decoderState = pngFilterState;
  1054. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1055. pngCurComponent[1] = cast(ubyte)(((cast(float)((pngCurComponent[2] << 8) | pngCurComponent[3]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1056. pngCurComponent[2] = cast(ubyte)(((cast(float)((pngCurComponent[4] << 8) | pngCurComponent[5]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1057. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[1], pngCurComponent[2]);
  1058. ptr++;
  1059. ptrPos++;
  1060. continue;
  1061. case PNG_STATE_RENDER_STATE_BASE + PNG_INDEXED_COLOUR_1BPP:
  1062. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1063. // Console.putln("index?\n");
  1064. decoderState = pngFilterState;
  1065. // WE WILL ADD 8 PIXELS, IF WE CAN
  1066. pngCurSample = pngCurComponent[0] >> 7;
  1067. if (pngCurSample >= pngPaletteCount) {
  1068. pngCurSample = 0;
  1069. }
  1070. ptr[0] = pngPaletteRealized[pngCurSample];
  1071. ptr++;
  1072. ptrPos++;
  1073. if (ptrPos == pngIHDR.pngWidth) {
  1074. continue;
  1075. }
  1076. pngCurSample = (pngCurComponent[0] >> 6) & 1;
  1077. if (pngCurSample >= pngPaletteCount) {
  1078. pngCurSample = 0;
  1079. }
  1080. ptr[0] = pngPaletteRealized[pngCurSample];
  1081. ptr++;
  1082. ptrPos++;
  1083. if (ptrPos == pngIHDR.pngWidth) {
  1084. continue;
  1085. }
  1086. pngCurSample = (pngCurComponent[0] >> 5) & 1;
  1087. if (pngCurSample >= pngPaletteCount) {
  1088. pngCurSample = 0;
  1089. }
  1090. ptr[0] = pngPaletteRealized[pngCurSample];
  1091. ptr++;
  1092. ptrPos++;
  1093. if (ptrPos == pngIHDR.pngWidth) {
  1094. continue;
  1095. }
  1096. pngCurSample = (pngCurComponent[0] >> 4) & 1;
  1097. if (pngCurSample >= pngPaletteCount) {
  1098. pngCurSample = 0;
  1099. }
  1100. ptr[0] = pngPaletteRealized[pngCurSample];
  1101. ptr++;
  1102. ptrPos++;
  1103. if (ptrPos == pngIHDR.pngWidth) {
  1104. continue;
  1105. }
  1106. pngCurSample = (pngCurComponent[0] >> 3) & 1;
  1107. if (pngCurSample >= pngPaletteCount) {
  1108. pngCurSample = 0;
  1109. }
  1110. ptr[0] = pngPaletteRealized[pngCurSample];
  1111. ptr++;
  1112. ptrPos++;
  1113. if (ptrPos == pngIHDR.pngWidth) {
  1114. continue;
  1115. }
  1116. pngCurSample = (pngCurComponent[0] >> 2) & 1;
  1117. if (pngCurSample >= pngPaletteCount) {
  1118. pngCurSample = 0;
  1119. }
  1120. ptr[0] = pngPaletteRealized[pngCurSample];
  1121. ptr++;
  1122. ptrPos++;
  1123. if (ptrPos == pngIHDR.pngWidth) {
  1124. continue;
  1125. }
  1126. pngCurSample = (pngCurComponent[0] >> 1) & 1;
  1127. if (pngCurSample >= pngPaletteCount) {
  1128. pngCurSample = 0;
  1129. }
  1130. ptr[0] = pngPaletteRealized[pngCurSample];
  1131. ptr++;
  1132. ptrPos++;
  1133. if (ptrPos == pngIHDR.pngWidth) {
  1134. continue;
  1135. }
  1136. pngCurSample = pngCurComponent[0] & 1;
  1137. if (pngCurSample >= pngPaletteCount) {
  1138. pngCurSample = 0;
  1139. }
  1140. ptr[0] = pngPaletteRealized[pngCurSample];
  1141. ptr++;
  1142. ptrPos++;
  1143. continue;
  1144. case PNG_STATE_RENDER_STATE_BASE + PNG_INDEXED_COLOUR_2BPP:
  1145. // Console.putln("hey!\n");
  1146. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1147. decoderState = pngFilterState;
  1148. // WE WILL ADD 4 PIXELS, IF WE CAN
  1149. pngCurSample = pngCurComponent[0] >> 6;
  1150. if (pngCurSample >= pngPaletteCount) {
  1151. pngCurSample = 0;
  1152. }
  1153. // Console.putln("1st pixel...\n");
  1154. ptr[0] = pngPaletteRealized[pngCurSample];
  1155. ptr++;
  1156. ptrPos++;
  1157. // Console.putln("1st pixel!\n");
  1158. if (ptrPos >= pngIHDR.pngWidth) {
  1159. continue;
  1160. }
  1161. pngCurSample = (pngCurComponent[0] >> 4) & 0x3;
  1162. if (pngCurSample >= pngPaletteCount) {
  1163. pngCurSample = 0;
  1164. }
  1165. // Console.putln("2nd pixel...\n");
  1166. ptr[0] = pngPaletteRealized[pngCurSample];
  1167. ptr++;
  1168. ptrPos++;
  1169. // Console.putln("2nd pixel!\n");
  1170. if (ptrPos >= pngIHDR.pngWidth) {
  1171. continue;
  1172. }
  1173. pngCurSample = (pngCurComponent[0] >> 2) & 0x3;
  1174. if (pngCurSample >= pngPaletteCount) {
  1175. pngCurSample = 0;
  1176. }
  1177. // Console.putln("3rd pixel...\n");
  1178. ptr[0] = pngPaletteRealized[pngCurSample];
  1179. ptr++;
  1180. ptrPos++;
  1181. // Console.putln("3rd pixel!\n");
  1182. if (ptrPos >= pngIHDR.pngWidth) {
  1183. continue;
  1184. }
  1185. pngCurSample = pngCurComponent[0] & 0x3;
  1186. if (pngCurSample >= pngPaletteCount) {
  1187. pngCurSample = 0;
  1188. }
  1189. // Console.putln("4th pixel...\n");
  1190. ptr[0] = pngPaletteRealized[pngCurSample];
  1191. ptr++;
  1192. ptrPos++;
  1193. // Console.putln("4th pixel!\n");
  1194. continue;
  1195. case PNG_STATE_RENDER_STATE_BASE + PNG_INDEXED_COLOUR_4BPP:
  1196. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1197. decoderState = pngFilterState;
  1198. // WE WILL ADD 2 PIXELS, IF WE CAN
  1199. pngCurSample = pngCurComponent[0] >> 4;
  1200. if (pngCurSample >= pngPaletteCount) {
  1201. pngCurSample = 0;
  1202. }
  1203. ptr[0] = pngPaletteRealized[pngCurSample];
  1204. ptr++;
  1205. ptrPos++;
  1206. if (ptrPos == pngIHDR.pngWidth) {
  1207. continue;
  1208. }
  1209. pngCurSample = pngCurComponent[0] & 0xF;
  1210. if (pngCurSample >= pngPaletteCount) {
  1211. pngCurSample = 0;
  1212. }
  1213. ptr[0] = pngPaletteRealized[pngCurSample];
  1214. ptr++;
  1215. ptrPos++;
  1216. continue;
  1217. case PNG_STATE_RENDER_STATE_BASE + PNG_INDEXED_COLOUR_8BPP:
  1218. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1219. decoderState = pngFilterState;
  1220. if (pngCurComponent[0] >= pngPaletteCount) {
  1221. pngCurComponent[0] = 0;
  1222. }
  1223. ptr[0] = pngPaletteRealized[pngCurComponent[0]];
  1224. ptr++;
  1225. ptrPos++;
  1226. continue;
  1227. case PNG_STATE_RENDER_STATE_BASE + PNG_GREYSCALE_ALPHA_8BPP:
  1228. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1229. decoderState = pngFilterState;
  1230. // JUST ADD THE PIXEL
  1231. // PRE MULTIPLY ALPHA WITH R, G, B
  1232. a = cast(float)pngCurComponent[1];
  1233. a /= cast(float)0xFF;
  1234. a *= pngCurComponent[0];
  1235. pngCurComponent[0] = cast(ubyte)a;
  1236. pngCurComponent[0] %= 256;
  1237. ptr[0] = view.rgbaTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0], pngCurComponent[1]);
  1238. ptr++;
  1239. ptrPos++;
  1240. continue;
  1241. case PNG_STATE_RENDER_STATE_BASE + PNG_GREYSCALE_ALPHA_16BPP:
  1242. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1243. decoderState = pngFilterState;
  1244. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1245. pngCurComponent[1] = cast(ubyte)(((cast(float)((pngCurComponent[2] << 8) | pngCurComponent[3]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1246. // JUST ADD THE PIXEL
  1247. // PRE MULTIPLY ALPHA WITH R, G, B
  1248. a = cast(float)pngCurComponent[1];
  1249. a /= cast(float)0xFF;
  1250. a *= pngCurComponent[0];
  1251. pngCurComponent[0] = cast(ubyte)a;
  1252. pngCurComponent[0] %= 256;
  1253. ptr[0] = view.rgbaTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0], pngCurComponent[1]);
  1254. ptr++;
  1255. ptrPos++;
  1256. continue;
  1257. case PNG_STATE_RENDER_STATE_BASE + PNG_TRUECOLOUR_ALPHA_8BPP:
  1258. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1259. decoderState = pngFilterState;
  1260. // PRE MULTIPLY ALPHA WITH R, G, B
  1261. ptr[0] = view.rgbaTouint(pngCurComponent[0], pngCurComponent[1], pngCurComponent[2], pngCurComponent[3]);
  1262. ptr++;
  1263. ptrPos++;
  1264. continue;
  1265. case PNG_STATE_RENDER_STATE_BASE + PNG_TRUECOLOUR_ALPHA_16BPP:
  1266. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1267. decoderState = pngFilterState;
  1268. // combine the components and place them in 0, 1, 2, and 3 only
  1269. // that is, truncate every two bytes into 4 bytes
  1270. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1271. pngCurComponent[1] = cast(ubyte)(((cast(float)((pngCurComponent[2] << 8) | pngCurComponent[3]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1272. pngCurComponent[2] = cast(ubyte)(((cast(float)((pngCurComponent[4] << 8) | pngCurComponent[5]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1273. pngCurComponent[3] = cast(ubyte)(((cast(float)((pngCurComponent[6] << 8) | pngCurComponent[7]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1274. ptr[0] = view.rgbaTouint(pngCurComponent[0], pngCurComponent[1], pngCurComponent[2], pngCurComponent[3]);
  1275. ptr++;
  1276. ptrPos++;
  1277. continue;
  1278. // INTERLACED RENDERING ... THESE ARE SEPARATE STATES //
  1279. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_GREYSCALE_1BPP:
  1280. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1281. decoderState = pngFilterState;
  1282. // WE WILL ADD 8 PIXELS, IF WE CAN
  1283. pngCurSample = png1BPP[pngCurComponent[0] >> 7];
  1284. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1285. // INCREMENT WITH RESPECT TO INTERLACING
  1286. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1287. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1288. if (ptrPos == pngIHDR.pngWidth) {
  1289. continue;
  1290. }
  1291. pngCurSample = png1BPP[(pngCurComponent[0] >> 6) & 1];
  1292. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1293. // INCREMENT WITH RESPECT TO INTERLACING
  1294. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1295. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1296. if (ptrPos == pngIHDR.pngWidth) {
  1297. continue;
  1298. }
  1299. pngCurSample = png1BPP[(pngCurComponent[0] >> 5) & 1];
  1300. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1301. // INCREMENT WITH RESPECT TO INTERLACING
  1302. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1303. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1304. if (ptrPos == pngIHDR.pngWidth) {
  1305. continue;
  1306. }
  1307. pngCurSample= png1BPP[(pngCurComponent[0] >> 4) & 1];
  1308. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1309. // INCREMENT WITH RESPECT TO INTERLACING
  1310. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1311. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1312. if (ptrPos == pngIHDR.pngWidth) {
  1313. continue;
  1314. }
  1315. pngCurSample = png1BPP[(pngCurComponent[0] >> 3) & 1];
  1316. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1317. // INCREMENT WITH RESPECT TO INTERLACING
  1318. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1319. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1320. if (ptrPos == pngIHDR.pngWidth) {
  1321. continue;
  1322. }
  1323. pngCurSample = png1BPP[(pngCurComponent[0] >> 2) & 1];
  1324. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1325. // INCREMENT WITH RESPECT TO INTERLACING
  1326. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1327. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1328. if (ptrPos == pngIHDR.pngWidth) {
  1329. continue;
  1330. }
  1331. pngCurSample = png1BPP[(pngCurComponent[0] >> 1) & 1];
  1332. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1333. // INCREMENT WITH RESPECT TO INTERLACING
  1334. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1335. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1336. if (ptrPos == pngIHDR.pngWidth) {
  1337. continue;
  1338. }
  1339. pngCurSample = png1BPP[pngCurComponent[0] & 1];
  1340. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1341. // INCREMENT WITH RESPECT TO INTERLACING
  1342. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1343. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1344. continue;
  1345. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_GREYSCALE_2BPP:
  1346. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1347. decoderState = pngFilterState;
  1348. // WE WILL ADD 4 PIXELS, IF WE CAN
  1349. pngCurSample = png2BPP[pngCurComponent[0] >> 6];
  1350. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1351. // INCREMENT WITH RESPECT TO INTERLACING
  1352. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1353. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1354. if (ptrPos == pngIHDR.pngWidth) {
  1355. continue;
  1356. }
  1357. pngCurSample = png2BPP[(pngCurComponent[0] >> 4) & 0x3];
  1358. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1359. // INCREMENT WITH RESPECT TO INTERLACING
  1360. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1361. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1362. if (ptrPos == pngIHDR.pngWidth) {
  1363. continue;
  1364. }
  1365. pngCurSample = png2BPP[(pngCurComponent[0] >> 2) & 0x3];
  1366. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1367. // INCREMENT WITH RESPECT TO INTERLACING
  1368. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1369. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1370. if (ptrPos == pngIHDR.pngWidth) {
  1371. continue;
  1372. }
  1373. pngCurSample = png2BPP[pngCurComponent[0] & 0x3];
  1374. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1375. // INCREMENT WITH RESPECT TO INTERLACING
  1376. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1377. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1378. continue;
  1379. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_GREYSCALE_4BPP:
  1380. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1381. decoderState = pngFilterState;
  1382. // WE WILL ADD 2 PIXELS, IF WE CAN
  1383. pngCurSample = png4BPP[pngCurComponent[0] >> 4];
  1384. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1385. // INCREMENT WITH RESPECT TO INTERLACING
  1386. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1387. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1388. if (ptrPos == pngIHDR.pngWidth) {
  1389. continue;
  1390. }
  1391. pngCurSample = png4BPP[pngCurComponent[0] & 0xF];
  1392. ptr[0] = view.rgbTouint(pngCurSample, pngCurSample, pngCurSample);
  1393. // INCREMENT WITH RESPECT TO INTERLACING
  1394. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1395. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1396. continue;
  1397. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_GREYSCALE_8BPP:
  1398. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1399. decoderState = pngFilterState;
  1400. // JUST ADD THE PIXEL
  1401. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0]);
  1402. // INCREMENT WITH RESPECT TO INTERLACING
  1403. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1404. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1405. continue;
  1406. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_GREYSCALE_16BPP:
  1407. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1408. decoderState = pngFilterState;
  1409. // JUST OUTPUT THE PIXEL
  1410. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1411. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0]);
  1412. // INCREMENT WITH RESPECT TO INTERLACING
  1413. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1414. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1415. continue;
  1416. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_TRUECOLOUR_8BPP:
  1417. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1418. decoderState = pngFilterState;
  1419. // JUST OUTPUT THE PIXEL
  1420. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[1], pngCurComponent[2]);
  1421. // INCREMENT WITH RESPECT TO INTERLACING
  1422. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1423. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1424. continue;
  1425. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_TRUECOLOUR_16BPP:
  1426. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1427. decoderState = pngFilterState;
  1428. // JUST OUTPUT THE PIXEL
  1429. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1430. pngCurComponent[1] = cast(ubyte)(((cast(float)((pngCurComponent[2] << 8) | pngCurComponent[3]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1431. pngCurComponent[2] = cast(ubyte)(((cast(float)((pngCurComponent[4] << 8) | pngCurComponent[5]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1432. ptr[0] = view.rgbTouint(pngCurComponent[0], pngCurComponent[1], pngCurComponent[2]);
  1433. // INCREMENT WITH RESPECT TO INTERLACING
  1434. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1435. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1436. continue;
  1437. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_INDEXED_COLOUR_1BPP:
  1438. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1439. decoderState = pngFilterState;
  1440. // WE WILL ADD 8 PIXELS, IF WE CAN
  1441. pngCurSample = pngCurComponent[0] >> 7;
  1442. if (pngCurSample >= pngPaletteCount) {
  1443. pngCurSample = 0;
  1444. }
  1445. ptr[0] = pngPaletteRealized[pngCurSample];
  1446. // INCREMENT WITH RESPECT TO INTERLACING
  1447. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1448. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1449. if (ptrPos == pngIHDR.pngWidth) {
  1450. continue;
  1451. }
  1452. pngCurSample = (pngCurComponent[0] >> 6) & 1;
  1453. if (pngCurSample >= pngPaletteCount) {
  1454. pngCurSample = 0;
  1455. }
  1456. ptr[0] = pngPaletteRealized[pngCurSample];
  1457. // INCREMENT WITH RESPECT TO INTERLACING
  1458. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1459. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1460. if (ptrPos == pngIHDR.pngWidth) {
  1461. continue;
  1462. }
  1463. pngCurSample = (pngCurComponent[0] >> 5) & 1;
  1464. if (pngCurSample >= pngPaletteCount) {
  1465. pngCurSample = 0;
  1466. }
  1467. ptr[0] = pngPaletteRealized[pngCurSample];
  1468. // INCREMENT WITH RESPECT TO INTERLACING
  1469. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1470. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1471. if (ptrPos == pngIHDR.pngWidth) {
  1472. continue;
  1473. }
  1474. pngCurSample = (pngCurComponent[0] >> 4) & 1;
  1475. if (pngCurSample >= pngPaletteCount) {
  1476. pngCurSample = 0;
  1477. }
  1478. ptr[0] = pngPaletteRealized[pngCurSample];
  1479. // INCREMENT WITH RESPECT TO INTERLACING
  1480. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1481. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1482. if (ptrPos == pngIHDR.pngWidth) {
  1483. continue;
  1484. }
  1485. pngCurSample = (pngCurComponent[0] >> 3) & 1;
  1486. if (pngCurSample >= pngPaletteCount) {
  1487. pngCurSample = 0;
  1488. }
  1489. ptr[0] = pngPaletteRealized[pngCurSample];
  1490. // INCREMENT WITH RESPECT TO INTERLACING
  1491. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1492. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1493. if (ptrPos == pngIHDR.pngWidth) {
  1494. continue;
  1495. }
  1496. pngCurSample = (pngCurComponent[0] >> 2) & 1;
  1497. if (pngCurSample >= pngPaletteCount) {
  1498. pngCurSample = 0;
  1499. }
  1500. ptr[0] = pngPaletteRealized[pngCurSample];
  1501. // INCREMENT WITH RESPECT TO INTERLACING
  1502. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1503. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1504. if (ptrPos == pngIHDR.pngWidth) {
  1505. continue;
  1506. }
  1507. pngCurSample = (pngCurComponent[0] >> 1) & 1;
  1508. if (pngCurSample >= pngPaletteCount) {
  1509. pngCurSample = 0;
  1510. }
  1511. ptr[0] = pngPaletteRealized[pngCurSample];
  1512. // INCREMENT WITH RESPECT TO INTERLACING
  1513. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1514. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1515. if (ptrPos == pngIHDR.pngWidth) {
  1516. continue;
  1517. }
  1518. pngCurSample = pngCurComponent[0] & 1;
  1519. if (pngCurSample >= pngPaletteCount) {
  1520. pngCurSample = 0;
  1521. }
  1522. ptr[0] = pngPaletteRealized[pngCurSample];
  1523. // INCREMENT WITH RESPECT TO INTERLACING
  1524. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1525. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1526. continue;
  1527. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_INDEXED_COLOUR_2BPP:
  1528. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1529. decoderState = pngFilterState;
  1530. // WE WILL ADD 4 PIXELS, IF WE CAN
  1531. pngCurSample = pngCurComponent[0] >> 6;
  1532. if (pngCurSample >= pngPaletteCount) {
  1533. pngCurSample = 0;
  1534. }
  1535. ptr[0] = pngPaletteRealized[pngCurSample];
  1536. // INCREMENT WITH RESPECT TO INTERLACING
  1537. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1538. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1539. if (ptrPos == pngIHDR.pngWidth) {
  1540. continue;
  1541. }
  1542. pngCurSample = (pngCurComponent[0] >> 4) & 0x3;
  1543. if (pngCurSample >= pngPaletteCount) {
  1544. pngCurSample = 0;
  1545. }
  1546. ptr[0] = pngPaletteRealized[pngCurSample];
  1547. // INCREMENT WITH RESPECT TO INTERLACING
  1548. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1549. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1550. if (ptrPos == pngIHDR.pngWidth) {
  1551. continue;
  1552. }
  1553. pngCurSample = (pngCurComponent[0] >> 2) & 0x3;
  1554. if (pngCurSample >= pngPaletteCount) {
  1555. pngCurSample = 0;
  1556. }
  1557. ptr[0] = pngPaletteRealized[pngCurSample];
  1558. // INCREMENT WITH RESPECT TO INTERLACING
  1559. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1560. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1561. if (ptrPos == pngIHDR.pngWidth) {
  1562. continue;
  1563. }
  1564. pngCurSample = pngCurComponent[0] & 0x3;
  1565. if (pngCurSample >= pngPaletteCount) {
  1566. pngCurSample = 0;
  1567. }
  1568. ptr[0] = pngPaletteRealized[pngCurSample];
  1569. // INCREMENT WITH RESPECT TO INTERLACING
  1570. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1571. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1572. continue;
  1573. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_INDEXED_COLOUR_4BPP:
  1574. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1575. decoderState = pngFilterState;
  1576. // WE WILL ADD 2 PIXELS, IF WE CAN
  1577. pngCurSample = pngCurComponent[0] >> 4;
  1578. if (pngCurSample >= pngPaletteCount) {
  1579. pngCurSample = 0;
  1580. }
  1581. ptr[0] = pngPaletteRealized[pngCurSample];
  1582. // INCREMENT WITH RESPECT TO INTERLACING
  1583. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1584. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1585. if (ptrPos == pngIHDR.pngWidth) {
  1586. continue;
  1587. }
  1588. pngCurSample = pngCurComponent[0] & 0xF;
  1589. if (pngCurSample >= pngPaletteCount) {
  1590. pngCurSample = 0;
  1591. }
  1592. ptr[0] = pngPaletteRealized[pngCurSample];
  1593. // INCREMENT WITH RESPECT TO INTERLACING
  1594. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1595. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1596. continue;
  1597. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_INDEXED_COLOUR_8BPP:
  1598. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1599. decoderState = pngFilterState;
  1600. if (pngCurComponent[0] >= pngPaletteCount) {
  1601. pngCurComponent[0] = 0;
  1602. }
  1603. ptr[0] = pngPaletteRealized[pngCurComponent[0]];
  1604. // INCREMENT WITH RESPECT TO INTERLACING
  1605. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1606. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1607. continue;
  1608. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_GREYSCALE_ALPHA_8BPP:
  1609. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1610. decoderState = pngFilterState;
  1611. // JUST ADD THE PIXEL
  1612. // PRE MULTIPLY ALPHA WITH R, G, B
  1613. a = cast(float)pngCurComponent[1];
  1614. a /= cast(float)0xFF;
  1615. a *= pngCurComponent[0];
  1616. pngCurComponent[0] = cast(ubyte)a;
  1617. pngCurComponent[0] %= 256;
  1618. ptr[0] = (pngCurComponent[0]) | (pngCurComponent[0] << 8) | (pngCurComponent[0] << 16) | (pngCurComponent[1] << 24);
  1619. // INCREMENT WITH RESPECT TO INTERLACING
  1620. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1621. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1622. continue;
  1623. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_GREYSCALE_ALPHA_16BPP:
  1624. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1625. decoderState = pngFilterState;
  1626. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1627. pngCurComponent[1] = cast(ubyte)(((cast(float)((pngCurComponent[2] << 8) | pngCurComponent[3]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1628. // JUST ADD THE PIXEL
  1629. // PRE MULTIPLY ALPHA WITH R, G, B
  1630. a = cast(float)pngCurComponent[1];
  1631. a /= cast(float)0xFF;
  1632. a *= pngCurComponent[0];
  1633. pngCurComponent[0] = cast(ubyte)a;
  1634. pngCurComponent[0] %= 256;
  1635. ptr[0] = view.rgbaTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0], pngCurComponent[1]);
  1636. // INCREMENT WITH RESPECT TO INTERLACING
  1637. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1638. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1639. continue;
  1640. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_TRUECOLOUR_ALPHA_8BPP:
  1641. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1642. decoderState = pngFilterState;
  1643. // PRE MULTIPLY ALPHA WITH R, G, B
  1644. a = cast(float)pngCurComponent[3];
  1645. a /= cast(float)0xFF;
  1646. pngCurComponent[0] = cast(ubyte)(a * cast(float)pngCurComponent[0]);
  1647. pngCurComponent[1] = cast(ubyte)(a * cast(float)pngCurComponent[1]);
  1648. pngCurComponent[2] = cast(ubyte)(a * cast(float)pngCurComponent[2]);
  1649. ptr[0] = view.rgbaTouint(pngCurComponent[0], pngCurComponent[0], pngCurComponent[0], pngCurComponent[1]);
  1650. // INCREMENT WITH RESPECT TO INTERLACING
  1651. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1652. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1653. continue;
  1654. case PNG_STATE_RENDER_INTERLACED_STATE_BASE + PNG_TRUECOLOUR_ALPHA_16BPP:
  1655. // GO BACK TO FILTER ANOTHER PIECE OF THE DATA
  1656. decoderState = pngFilterState;
  1657. // combine the components and place them in 0, 1, 2, and 3 only
  1658. // that is, truncate every two bytes into 4 bytes
  1659. pngCurComponent[0] = cast(ubyte)(((cast(float)((pngCurComponent[0] << 8) | pngCurComponent[1]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1660. pngCurComponent[1] = cast(ubyte)(((cast(float)((pngCurComponent[2] << 8) | pngCurComponent[3]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1661. pngCurComponent[2] = cast(ubyte)(((cast(float)((pngCurComponent[4] << 8) | pngCurComponent[5]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1662. pngCurComponent[3] = cast(ubyte)(((cast(float)((pngCurComponent[6] << 8) | pngCurComponent[7]) / cast(float)0xFFFF) * cast(float)0xFF) + 0.5);
  1663. //view.unlockBuffer();
  1664. //return StreamData.Complete;
  1665. // PRE MULTIPLY ALPHA WITH R, G, B
  1666. ptr[0] = view.rgbaTouint(pngCurComponent[0], pngCurComponent[1], pngCurComponent[2], pngCurComponent[3]);
  1667. /*a = pngCurComponent[3];
  1668. a /= (float)0xFF;
  1669. pngCurComponent[0] = (ubyte)(a * (float)pngCurComponent[0]);
  1670. pngCurComponent[1] = (ubyte)(a * (float)pngCurComponent[1]);
  1671. pngCurComponent[2] = (ubyte)(a * (float)pngCurComponent[2]);
  1672. ptr[0] = (pngCurComponent[2]) | (pngCurComponent[1] << 8) | (pngCurComponent[0] << 16) | (pngCurComponent[3] << 24);*/
  1673. // INCREMENT WITH RESPECT TO INTERLACING
  1674. ptr += pngInterlaceIncrementsX[pngInterlacePass];
  1675. ptrPos += pngInterlaceIncrementsX[pngInterlacePass];
  1676. continue;
  1677. // DECODE //
  1678. case PNG_STATE_INTERPRET_IDAT:
  1679. continue;
  1680. // FINISH //
  1681. case PNG_STATE_DONE_IDAT:
  1682. decoderState = PNG_STATE_READ_CHUNK_CRC;
  1683. continue;
  1684. default:
  1685. break;
  1686. }
  1687. break;
  1688. }
  1689. return StreamData.Invalid;
  1690. }
  1691. private:
  1692. align (1) struct _djehuty_image_png_chunk_header {
  1693. uint pngChunkLength;
  1694. uint pngChunkType;
  1695. }
  1696. align (1) struct _djehuty_image_png_ihdr {
  1697. uint pngWidth;
  1698. uint pngHeight;
  1699. ubyte pngBitDepth;
  1700. ubyte pngColorType;
  1701. ubyte pngCompressionMethod;
  1702. ubyte pngFilterMethod;
  1703. ubyte pngInterlaceMethod;
  1704. }
  1705. align (1) struct _djehuty_image_png_color {
  1706. ubyte red;
  1707. ubyte green;
  1708. ubyte blue;
  1709. }
  1710. ubyte pngHeader[8];
  1711. _djehuty_image_png_chunk_header pngChunkHeader;
  1712. uint pngChunkCRC;
  1713. ubyte* pngChunkData;
  1714. uint pngRunningCRC;
  1715. int pngCounter;
  1716. //palette (PLTE)
  1717. uint pngPaletteCount;
  1718. _djehuty_image_png_color pngPalette[256];
  1719. uint pngPaletteRealized[256]; // palette with system calculated colors
  1720. //chunks
  1721. _djehuty_image_png_ihdr pngIHDR; //IHDR
  1722. //IDAT
  1723. //ZLIB, et al
  1724. Stream pngUncompressedData = null;
  1725. ZLIBDecoder zlibDecompressor = null;
  1726. // FOR UNFILTERING
  1727. ubyte pngFilterType;
  1728. uint pngImageType;
  1729. ubyte pngNumSamples; //samples per pixel ( at least one byte ) (maximum = 8)
  1730. uint pngCurSample; //current sample we are filtering
  1731. int pngExpectedBytes;
  1732. // arrays for such things
  1733. ubyte[] pngBytes[8] = [null, null, null, null, null, null, null, null]; // array for holding the prior scanline's decoded bytes
  1734. ubyte pngPriorScannedByte[8];
  1735. ubyte pngPriorPixel[8];
  1736. uint pngCurComponent[8];
  1737. ubyte pngPriorScannedComp;
  1738. // Paeth Filtering
  1739. ubyte pngPaethPredictor;
  1740. // image position
  1741. uint ptrLine;
  1742. uint ptrPos;
  1743. // for interlacing
  1744. uint pngInterlaceWidths[7]; //width of subimage
  1745. uint pngInterlaceHeights[7]; //height of subimage
  1746. uint pngInterlacePass; //the current interlace pass
  1747. uint pngInterlaceCurLine; //the current scanline of the current pass
  1748. // for the decoder state
  1749. uint pngFilterState; //the filter state
  1750. uint pngRenderState; //the render state per image type
  1751. // States
  1752. const auto PNG_STATE_INIT_PROGRESS = 0;
  1753. const auto PNG_STATE_INIT = 1;
  1754. const auto PNG_STATE_READ_CHUNK_HEADER = 2;
  1755. const auto PNG_STATE_READ_CHUNK_CRC = 3;
  1756. const auto PNG_STATE_SKIP_CHUNK = 4;
  1757. const auto PNG_STATE_READ_IHDR = 5;
  1758. const auto PNG_STATE_READ_PLTE = 6;
  1759. const auto PNG_STATE_READ_IDAT = 7;
  1760. const auto PNG_STATE_READ_IEND = 8;
  1761. const auto PNG_STATE_READ_PLTE_ENTRIES = 9;
  1762. const auto PNG_STATE_INTERPRET_IDAT = 10;
  1763. const auto PNG_STATE_FILL_IDAT = 11;
  1764. const auto PNG_STATE_DONE_IDAT = 12;
  1765. const auto PNG_STATE_DECODE_READ_FILTER_TYPE = 13;
  1766. // FILTER STATES //
  1767. const auto PNG_STATE_UNFILTER_NONE = 14;
  1768. const auto PNG_STATE_UNFILTER_SUB = 15;
  1769. const auto PNG_STATE_UNFILTER_UP = 16;
  1770. const auto PNG_STATE_UNFILTER_AVERAGE = 17;
  1771. const auto PNG_STATE_UNFILTER_PAETH = 18;
  1772. // RENDER STATES //
  1773. const auto PNG_STATE_RENDER_STATE_BASE = 32; // defines 32...32 + PNG_TRUECOLOUR_ALPHA_16BPP for the byte renderers
  1774. // Chunk Type Definitions //
  1775. const auto PNG_CHUNK_IHDR = 0x52444849;
  1776. const auto PNG_CHUNK_PLTE = 0x45544C50;
  1777. const auto PNG_CHUNK_IDAT = 0x54414449;
  1778. const auto PNG_CHUNK_IEND = 0x444E4549;
  1779. // Image Types //
  1780. const auto PNG_GREYSCALE_1BPP = ((1 << 16) + 1);
  1781. const auto PNG_GREYSCALE_2BPP = ((1 << 16) + 2);
  1782. const auto PNG_GREYSCALE_4BPP = ((1 << 16) + 4);
  1783. const auto PNG_GREYSCALE_8BPP = ((1 << 16) + 8);
  1784. const auto PNG_GREYSCALE_16BPP = ((1 << 16) + 16);
  1785. const auto PNG_TRUECOLOUR_8BPP = ((3 << 16) + 8);
  1786. const auto PNG_TRUECOLOUR_16BPP = ((3 << 16) + 16);
  1787. const auto PNG_INDEXED_COLOUR_1BPP = ((4 << 16) + 1);
  1788. const auto PNG_INDEXED_COLOUR_2BPP = ((4 << 16) + 2);
  1789. const auto PNG_INDEXED_COLOUR_4BPP = ((4 << 16) + 4);
  1790. const auto PNG_INDEXED_COLOUR_8BPP = ((4 << 16) + 8);
  1791. const auto PNG_GREYSCALE_ALPHA_8BPP = ((5 << 16) + 8);
  1792. const auto PNG_GREYSCALE_ALPHA_16BPP = ((5 << 16) + 16);
  1793. const auto PNG_TRUECOLOUR_ALPHA_8BPP = ((7 << 16) + 8);
  1794. const auto PNG_TRUECOLOUR_ALPHA_16BPP = ((7 << 16) + 16);
  1795. // defines the rest of the states for interlaced rendering
  1796. const auto PNG_STATE_RENDER_INTERLACED_STATE_BASE = (PNG_STATE_RENDER_STATE_BASE + PNG_TRUECOLOUR_ALPHA_16BPP);
  1797. //GLOBAL STATIC CONSTANTS
  1798. //------------------------
  1799. static const uint pngInterlaceIncrementsX[7] = (8, 8, 4, 4, 2, 2, 1);
  1800. static const uint pngInterlaceIncrementsY[7] = (8, 8, 8, 4, 4, 2, 2);
  1801. static const uint pngInterlaceStartsX[7] = (0, 4, 0, 2, 0, 1, 0);
  1802. static const uint pngInterlaceStartsY[7] = (0, 0, 4, 0, 2, 0, 1);
  1803. // for low bpp color conversion
  1804. static const ubyte png1BPP[2] = (0, 255);
  1805. static const ubyte png2BPP[4] = (0, 85, 170, 255);
  1806. static const ubyte png4BPP[16] = (0, 17, 34, 51, 68, 85, 102, 119, 136, 153, 170, 187, 204, 221, 238, 255);
  1807. }