PageRenderTime 32ms CodeModel.GetById 22ms RepoModel.GetById 0ms app.codeStats 0ms

/astropy/io/fits/tests/test_image.py

https://gitlab.com/Rockyspade/astropy
Python | 1148 lines | 843 code | 132 blank | 173 comment | 44 complexity | 169f28ac0ebc581cb973ca41edddd15f MD5 | raw file
  1. # Licensed under a 3-clause BSD style license - see PYFITS.rst
  2. from __future__ import division, with_statement
  3. import math
  4. import os
  5. import time
  6. import warnings
  7. import numpy as np
  8. from ....io import fits
  9. from ....utils.exceptions import (AstropyDeprecationWarning,
  10. AstropyPendingDeprecationWarning)
  11. from ....tests.helper import pytest, raises, catch_warnings
  12. from ..hdu.compressed import SUBTRACTIVE_DITHER_1, DITHER_SEED_CHECKSUM
  13. from .test_table import comparerecords
  14. from . import FitsTestCase
  15. from .util import ignore_warnings
  16. class TestImageFunctions(FitsTestCase):
  17. def test_constructor_name_arg(self):
  18. """Like the test of the same name in test_table.py"""
  19. hdu = fits.ImageHDU()
  20. assert hdu.name == ''
  21. assert 'EXTNAME' not in hdu.header
  22. hdu.name = 'FOO'
  23. assert hdu.name == 'FOO'
  24. assert hdu.header['EXTNAME'] == 'FOO'
  25. # Passing name to constructor
  26. hdu = fits.ImageHDU(name='FOO')
  27. assert hdu.name == 'FOO'
  28. assert hdu.header['EXTNAME'] == 'FOO'
  29. # And overriding a header with a different extname
  30. hdr = fits.Header()
  31. hdr['EXTNAME'] = 'EVENTS'
  32. hdu = fits.ImageHDU(header=hdr, name='FOO')
  33. assert hdu.name == 'FOO'
  34. assert hdu.header['EXTNAME'] == 'FOO'
  35. def test_constructor_copies_header(self):
  36. """
  37. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/153
  38. Ensure that a header from one HDU is copied when used to initialize new
  39. HDU.
  40. """
  41. ifd = fits.HDUList(fits.PrimaryHDU())
  42. phdr = ifd[0].header
  43. phdr['FILENAME'] = 'labq01i3q_rawtag.fits'
  44. primary_hdu = fits.PrimaryHDU(header=phdr)
  45. ofd = fits.HDUList(primary_hdu)
  46. ofd[0].header['FILENAME'] = 'labq01i3q_flt.fits'
  47. # Original header should be unchanged
  48. assert phdr['FILENAME'] == 'labq01i3q_rawtag.fits'
  49. @raises(ValueError)
  50. def test_open(self):
  51. # The function "open" reads a FITS file into an HDUList object. There
  52. # are three modes to open: "readonly" (the default), "append", and
  53. # "update".
  54. # Open a file read-only (the default mode), the content of the FITS
  55. # file are read into memory.
  56. r = fits.open(self.data('test0.fits')) # readonly
  57. # data parts are latent instantiation, so if we close the HDUList
  58. # without touching data, data can not be accessed.
  59. r.close()
  60. r[1].data[:2, :2]
  61. def test_open_2(self):
  62. r = fits.open(self.data('test0.fits'))
  63. info = ([(0, 'PRIMARY', 'PrimaryHDU', 138, (), '', '')] +
  64. [(x, 'SCI', 'ImageHDU', 61, (40, 40), 'int16', '')
  65. for x in range(1, 5)])
  66. try:
  67. assert r.info(output=False) == info
  68. finally:
  69. r.close()
  70. def test_primary_with_extname(self):
  71. """Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/151
  72. Tests that the EXTNAME keyword works with Primary HDUs as well, and
  73. interacts properly with the .name attribute. For convenience
  74. hdulist['PRIMARY'] will still refer to the first HDU even if it has an
  75. EXTNAME not equal to 'PRIMARY'.
  76. """
  77. prihdr = fits.Header([('EXTNAME', 'XPRIMARY'), ('EXTVER', 1)])
  78. hdul = fits.HDUList([fits.PrimaryHDU(header=prihdr)])
  79. assert 'EXTNAME' in hdul[0].header
  80. assert hdul[0].name == 'XPRIMARY'
  81. assert hdul[0].name == hdul[0].header['EXTNAME']
  82. info = [(0, 'XPRIMARY', 'PrimaryHDU', 5, (), '', '')]
  83. assert hdul.info(output=False) == info
  84. assert hdul['PRIMARY'] is hdul['XPRIMARY']
  85. assert hdul['PRIMARY'] is hdul[('XPRIMARY', 1)]
  86. hdul[0].name = 'XPRIMARY2'
  87. assert hdul[0].header['EXTNAME'] == 'XPRIMARY2'
  88. hdul.writeto(self.temp('test.fits'))
  89. with fits.open(self.temp('test.fits')) as hdul:
  90. assert hdul[0].name == 'XPRIMARY2'
  91. @ignore_warnings(AstropyDeprecationWarning)
  92. def test_io_manipulation(self):
  93. # This legacy test also tests numerous deprecated interfaces for
  94. # backwards compatibility
  95. # Get a keyword value. An extension can be referred by name or by
  96. # number. Both extension and keyword names are case insensitive.
  97. with fits.open(self.data('test0.fits')) as r:
  98. assert r['primary'].header['naxis'] == 0
  99. assert r[0].header['naxis'] == 0
  100. # If there are more than one extension with the same EXTNAME value,
  101. # the EXTVER can be used (as the second argument) to distinguish
  102. # the extension.
  103. assert r['sci', 1].header['detector'] == 1
  104. # append (using "update()") a new card
  105. r[0].header['xxx'] = 1.234e56
  106. assert (str(r[0].header.ascard[-3:]) ==
  107. "EXPFLAG = 'NORMAL ' / Exposure interruption indicator \n"
  108. "FILENAME= 'vtest3.fits' / File name \n"
  109. "XXX = 1.234E+56 ")
  110. # rename a keyword
  111. r[0].header.rename_key('filename', 'fname')
  112. pytest.raises(ValueError, r[0].header.rename_key, 'fname',
  113. 'history')
  114. pytest.raises(ValueError, r[0].header.rename_key, 'fname',
  115. 'simple')
  116. r[0].header.rename_key('fname', 'filename')
  117. # get a subsection of data
  118. assert np.array_equal(r[2].data[:3, :3],
  119. np.array([[349, 349, 348],
  120. [349, 349, 347],
  121. [347, 350, 349]], dtype=np.int16))
  122. # We can create a new FITS file by opening a new file with "append"
  123. # mode.
  124. with fits.open(self.temp('test_new.fits'), mode='append') as n:
  125. # Append the primary header and the 2nd extension to the new
  126. # file.
  127. n.append(r[0])
  128. n.append(r[2])
  129. # The flush method will write the current HDUList object back
  130. # to the newly created file on disk. The HDUList is still open
  131. # and can be further operated.
  132. n.flush()
  133. assert n[1].data[1, 1] == 349
  134. # modify a data point
  135. n[1].data[1, 1] = 99
  136. # When the file is closed, the most recent additions of
  137. # extension(s) since last flush() will be appended, but any HDU
  138. # already existed at the last flush will not be modified
  139. del n
  140. # If an existing file is opened with "append" mode, like the
  141. # readonly mode, the HDU's will be read into the HDUList which can
  142. # be modified in memory but can not be written back to the original
  143. # file. A file opened with append mode can only add new HDU's.
  144. os.rename(self.temp('test_new.fits'),
  145. self.temp('test_append.fits'))
  146. with fits.open(self.temp('test_append.fits'), mode='append') as a:
  147. # The above change did not take effect since this was made
  148. # after the flush().
  149. assert a[1].data[1, 1] == 349
  150. a.append(r[1])
  151. del a
  152. # When changes are made to an HDUList which was opened with
  153. # "update" mode, they will be written back to the original file
  154. # when a flush/close is called.
  155. os.rename(self.temp('test_append.fits'),
  156. self.temp('test_update.fits'))
  157. with fits.open(self.temp('test_update.fits'), mode='update') as u:
  158. # When the changes do not alter the size structures of the
  159. # original (or since last flush) HDUList, the changes are
  160. # written back "in place".
  161. assert u[0].header['rootname'] == 'U2EQ0201T'
  162. u[0].header['rootname'] = 'abc'
  163. assert u[1].data[1, 1] == 349
  164. u[1].data[1, 1] = 99
  165. u.flush()
  166. # If the changes affect the size structure, e.g. adding or
  167. # deleting HDU(s), header was expanded or reduced beyond
  168. # existing number of blocks (2880 bytes in each block), or
  169. # change the data size, the HDUList is written to a temporary
  170. # file, the original file is deleted, and the temporary file is
  171. # renamed to the original file name and reopened in the update
  172. # mode. To a user, these two kinds of updating writeback seem
  173. # to be the same, unless the optional argument in flush or
  174. # close is set to 1.
  175. del u[2]
  176. u.flush()
  177. # the write method in HDUList class writes the current HDUList,
  178. # with all changes made up to now, to a new file. This method
  179. # works the same disregard the mode the HDUList was opened
  180. # with.
  181. u.append(r[3])
  182. u.writeto(self.temp('test_new.fits'))
  183. del u
  184. # Another useful new HDUList method is readall. It will "touch" the
  185. # data parts in all HDUs, so even if the HDUList is closed, we can
  186. # still operate on the data.
  187. with fits.open(self.data('test0.fits')) as r:
  188. r.readall()
  189. assert r[1].data[1, 1] == 315
  190. # create an HDU with data only
  191. data = np.ones((3, 5), dtype=np.float32)
  192. hdu = fits.ImageHDU(data=data, name='SCI')
  193. assert np.array_equal(hdu.data,
  194. np.array([[1., 1., 1., 1., 1.],
  195. [1., 1., 1., 1., 1.],
  196. [1., 1., 1., 1., 1.]],
  197. dtype=np.float32))
  198. # create an HDU with header and data
  199. # notice that the header has the right NAXIS's since it is constructed
  200. # with ImageHDU
  201. hdu2 = fits.ImageHDU(header=r[1].header, data=np.array([1, 2],
  202. dtype='int32'))
  203. assert (str(hdu2.header.ascard[1:5]) ==
  204. "BITPIX = 32 / array data type \n"
  205. "NAXIS = 1 / number of array dimensions \n"
  206. "NAXIS1 = 2 \n"
  207. "PCOUNT = 0 / number of parameters ")
  208. def test_memory_mapping(self):
  209. # memory mapping
  210. f1 = fits.open(self.data('test0.fits'), memmap=1)
  211. f1.close()
  212. def test_verification_on_output(self):
  213. # verification on output
  214. # make a defect HDUList first
  215. x = fits.ImageHDU()
  216. hdu = fits.HDUList(x) # HDUList can take a list or one single HDU
  217. with catch_warnings() as w:
  218. hdu.verify()
  219. text = "HDUList's 0th element is not a primary HDU."
  220. assert len(w) == 3
  221. assert text in str(w[1].message)
  222. with catch_warnings() as w:
  223. hdu.writeto(self.temp('test_new2.fits'), 'fix')
  224. text = ("HDUList's 0th element is not a primary HDU. "
  225. "Fixed by inserting one as 0th HDU.")
  226. assert len(w) == 3
  227. assert text in str(w[1].message)
  228. def test_section(self):
  229. # section testing
  230. fs = fits.open(self.data('arange.fits'))
  231. assert np.array_equal(fs[0].section[3, 2, 5], 357)
  232. assert np.array_equal(
  233. fs[0].section[3, 2, :],
  234. np.array([352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362]))
  235. assert np.array_equal(fs[0].section[3, 2, 4:],
  236. np.array([356, 357, 358, 359, 360, 361, 362]))
  237. assert np.array_equal(fs[0].section[3, 2, :8],
  238. np.array([352, 353, 354, 355, 356, 357, 358, 359]))
  239. assert np.array_equal(fs[0].section[3, 2, -8:8],
  240. np.array([355, 356, 357, 358, 359]))
  241. assert np.array_equal(
  242. fs[0].section[3, 2:5, :],
  243. np.array([[352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362],
  244. [363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373],
  245. [374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384]]))
  246. assert np.array_equal(fs[0].section[3, :, :][:3, :3],
  247. np.array([[330, 331, 332],
  248. [341, 342, 343],
  249. [352, 353, 354]]))
  250. dat = fs[0].data
  251. assert np.array_equal(fs[0].section[3, 2:5, :8], dat[3, 2:5, :8])
  252. assert np.array_equal(fs[0].section[3, 2:5, 3], dat[3, 2:5, 3])
  253. assert np.array_equal(fs[0].section[3:6, :, :][:3, :3, :3],
  254. np.array([[[330, 331, 332],
  255. [341, 342, 343],
  256. [352, 353, 354]],
  257. [[440, 441, 442],
  258. [451, 452, 453],
  259. [462, 463, 464]],
  260. [[550, 551, 552],
  261. [561, 562, 563],
  262. [572, 573, 574]]]))
  263. assert np.array_equal(fs[0].section[:, :, :][:3, :2, :2],
  264. np.array([[[0, 1],
  265. [11, 12]],
  266. [[110, 111],
  267. [121, 122]],
  268. [[220, 221],
  269. [231, 232]]]))
  270. assert np.array_equal(fs[0].section[:, 2, :], dat[:, 2, :])
  271. assert np.array_equal(fs[0].section[:, 2:5, :], dat[:, 2:5, :])
  272. assert np.array_equal(fs[0].section[3:6, 3, :], dat[3:6, 3, :])
  273. assert np.array_equal(fs[0].section[3:6, 3:7, :], dat[3:6, 3:7, :])
  274. assert np.array_equal(fs[0].section[:, ::2], dat[:, ::2])
  275. assert np.array_equal(fs[0].section[:, [1, 2, 4], 3],
  276. dat[:, [1, 2, 4], 3])
  277. assert np.array_equal(
  278. fs[0].section[:, np.array([True, False, True]), :],
  279. dat[:, np.array([True, False, True]), :])
  280. assert np.array_equal(
  281. fs[0].section[3:6, 3, :, ...], dat[3:6, 3, :, ...])
  282. assert np.array_equal(fs[0].section[..., ::2], dat[..., ::2])
  283. assert np.array_equal(fs[0].section[..., [1, 2, 4], 3],
  284. dat[..., [1, 2, 4], 3])
  285. def test_section_data_single(self):
  286. a = np.array([1])
  287. hdu = fits.PrimaryHDU(a)
  288. hdu.writeto(self.temp('test_new.fits'))
  289. hdul = fits.open(self.temp('test_new.fits'))
  290. sec = hdul[0].section
  291. dat = hdul[0].data
  292. assert np.array_equal(sec[0], dat[0])
  293. assert np.array_equal(sec[...], dat[...])
  294. assert np.array_equal(sec[..., 0], dat[..., 0])
  295. assert np.array_equal(sec[0, ...], dat[0, ...])
  296. def test_section_data_square(self):
  297. a = np.arange(4).reshape((2, 2))
  298. hdu = fits.PrimaryHDU(a)
  299. hdu.writeto(self.temp('test_new.fits'))
  300. hdul = fits.open(self.temp('test_new.fits'))
  301. d = hdul[0]
  302. dat = hdul[0].data
  303. assert (d.section[:, :] == dat[:, :]).all()
  304. assert (d.section[0, :] == dat[0, :]).all()
  305. assert (d.section[1, :] == dat[1, :]).all()
  306. assert (d.section[:, 0] == dat[:, 0]).all()
  307. assert (d.section[:, 1] == dat[:, 1]).all()
  308. assert (d.section[0, 0] == dat[0, 0]).all()
  309. assert (d.section[0, 1] == dat[0, 1]).all()
  310. assert (d.section[1, 0] == dat[1, 0]).all()
  311. assert (d.section[1, 1] == dat[1, 1]).all()
  312. assert (d.section[0:1, 0:1] == dat[0:1, 0:1]).all()
  313. assert (d.section[0:2, 0:1] == dat[0:2, 0:1]).all()
  314. assert (d.section[0:1, 0:2] == dat[0:1, 0:2]).all()
  315. assert (d.section[0:2, 0:2] == dat[0:2, 0:2]).all()
  316. def test_section_data_cube(self):
  317. a = np.arange(18).reshape((2, 3, 3))
  318. hdu = fits.PrimaryHDU(a)
  319. hdu.writeto(self.temp('test_new.fits'))
  320. hdul = fits.open(self.temp('test_new.fits'))
  321. d = hdul[0]
  322. dat = hdul[0].data
  323. # TODO: Generate these perumtions instead of having them all written
  324. # out, yeesh!
  325. assert (d.section[:, :, :] == dat[:, :, :]).all()
  326. assert (d.section[:, :] == dat[:, :]).all()
  327. assert (d.section[:] == dat[:]).all()
  328. assert (d.section[0, :, :] == dat[0, :, :]).all()
  329. assert (d.section[1, :, :] == dat[1, :, :]).all()
  330. assert (d.section[0, 0, :] == dat[0, 0, :]).all()
  331. assert (d.section[0, 1, :] == dat[0, 1, :]).all()
  332. assert (d.section[0, 2, :] == dat[0, 2, :]).all()
  333. assert (d.section[1, 0, :] == dat[1, 0, :]).all()
  334. assert (d.section[1, 1, :] == dat[1, 1, :]).all()
  335. assert (d.section[1, 2, :] == dat[1, 2, :]).all()
  336. assert (d.section[0, 0, 0] == dat[0, 0, 0]).all()
  337. assert (d.section[0, 0, 1] == dat[0, 0, 1]).all()
  338. assert (d.section[0, 0, 2] == dat[0, 0, 2]).all()
  339. assert (d.section[0, 1, 0] == dat[0, 1, 0]).all()
  340. assert (d.section[0, 1, 1] == dat[0, 1, 1]).all()
  341. assert (d.section[0, 1, 2] == dat[0, 1, 2]).all()
  342. assert (d.section[0, 2, 0] == dat[0, 2, 0]).all()
  343. assert (d.section[0, 2, 1] == dat[0, 2, 1]).all()
  344. assert (d.section[0, 2, 2] == dat[0, 2, 2]).all()
  345. assert (d.section[1, 0, 0] == dat[1, 0, 0]).all()
  346. assert (d.section[1, 0, 1] == dat[1, 0, 1]).all()
  347. assert (d.section[1, 0, 2] == dat[1, 0, 2]).all()
  348. assert (d.section[1, 1, 0] == dat[1, 1, 0]).all()
  349. assert (d.section[1, 1, 1] == dat[1, 1, 1]).all()
  350. assert (d.section[1, 1, 2] == dat[1, 1, 2]).all()
  351. assert (d.section[1, 2, 0] == dat[1, 2, 0]).all()
  352. assert (d.section[1, 2, 1] == dat[1, 2, 1]).all()
  353. assert (d.section[1, 2, 2] == dat[1, 2, 2]).all()
  354. assert (d.section[:, 0, 0] == dat[:, 0, 0]).all()
  355. assert (d.section[:, 0, 1] == dat[:, 0, 1]).all()
  356. assert (d.section[:, 0, 2] == dat[:, 0, 2]).all()
  357. assert (d.section[:, 1, 0] == dat[:, 1, 0]).all()
  358. assert (d.section[:, 1, 1] == dat[:, 1, 1]).all()
  359. assert (d.section[:, 1, 2] == dat[:, 1, 2]).all()
  360. assert (d.section[:, 2, 0] == dat[:, 2, 0]).all()
  361. assert (d.section[:, 2, 1] == dat[:, 2, 1]).all()
  362. assert (d.section[:, 2, 2] == dat[:, 2, 2]).all()
  363. assert (d.section[0, :, 0] == dat[0, :, 0]).all()
  364. assert (d.section[0, :, 1] == dat[0, :, 1]).all()
  365. assert (d.section[0, :, 2] == dat[0, :, 2]).all()
  366. assert (d.section[1, :, 0] == dat[1, :, 0]).all()
  367. assert (d.section[1, :, 1] == dat[1, :, 1]).all()
  368. assert (d.section[1, :, 2] == dat[1, :, 2]).all()
  369. assert (d.section[:, :, 0] == dat[:, :, 0]).all()
  370. assert (d.section[:, :, 1] == dat[:, :, 1]).all()
  371. assert (d.section[:, :, 2] == dat[:, :, 2]).all()
  372. assert (d.section[:, 0, :] == dat[:, 0, :]).all()
  373. assert (d.section[:, 1, :] == dat[:, 1, :]).all()
  374. assert (d.section[:, 2, :] == dat[:, 2, :]).all()
  375. assert (d.section[:, :, 0:1] == dat[:, :, 0:1]).all()
  376. assert (d.section[:, :, 0:2] == dat[:, :, 0:2]).all()
  377. assert (d.section[:, :, 0:3] == dat[:, :, 0:3]).all()
  378. assert (d.section[:, :, 1:2] == dat[:, :, 1:2]).all()
  379. assert (d.section[:, :, 1:3] == dat[:, :, 1:3]).all()
  380. assert (d.section[:, :, 2:3] == dat[:, :, 2:3]).all()
  381. assert (d.section[0:1, 0:1, 0:1] == dat[0:1, 0:1, 0:1]).all()
  382. assert (d.section[0:1, 0:1, 0:2] == dat[0:1, 0:1, 0:2]).all()
  383. assert (d.section[0:1, 0:1, 0:3] == dat[0:1, 0:1, 0:3]).all()
  384. assert (d.section[0:1, 0:1, 1:2] == dat[0:1, 0:1, 1:2]).all()
  385. assert (d.section[0:1, 0:1, 1:3] == dat[0:1, 0:1, 1:3]).all()
  386. assert (d.section[0:1, 0:1, 2:3] == dat[0:1, 0:1, 2:3]).all()
  387. assert (d.section[0:1, 0:2, 0:1] == dat[0:1, 0:2, 0:1]).all()
  388. assert (d.section[0:1, 0:2, 0:2] == dat[0:1, 0:2, 0:2]).all()
  389. assert (d.section[0:1, 0:2, 0:3] == dat[0:1, 0:2, 0:3]).all()
  390. assert (d.section[0:1, 0:2, 1:2] == dat[0:1, 0:2, 1:2]).all()
  391. assert (d.section[0:1, 0:2, 1:3] == dat[0:1, 0:2, 1:3]).all()
  392. assert (d.section[0:1, 0:2, 2:3] == dat[0:1, 0:2, 2:3]).all()
  393. assert (d.section[0:1, 0:3, 0:1] == dat[0:1, 0:3, 0:1]).all()
  394. assert (d.section[0:1, 0:3, 0:2] == dat[0:1, 0:3, 0:2]).all()
  395. assert (d.section[0:1, 0:3, 0:3] == dat[0:1, 0:3, 0:3]).all()
  396. assert (d.section[0:1, 0:3, 1:2] == dat[0:1, 0:3, 1:2]).all()
  397. assert (d.section[0:1, 0:3, 1:3] == dat[0:1, 0:3, 1:3]).all()
  398. assert (d.section[0:1, 0:3, 2:3] == dat[0:1, 0:3, 2:3]).all()
  399. assert (d.section[0:1, 1:2, 0:1] == dat[0:1, 1:2, 0:1]).all()
  400. assert (d.section[0:1, 1:2, 0:2] == dat[0:1, 1:2, 0:2]).all()
  401. assert (d.section[0:1, 1:2, 0:3] == dat[0:1, 1:2, 0:3]).all()
  402. assert (d.section[0:1, 1:2, 1:2] == dat[0:1, 1:2, 1:2]).all()
  403. assert (d.section[0:1, 1:2, 1:3] == dat[0:1, 1:2, 1:3]).all()
  404. assert (d.section[0:1, 1:2, 2:3] == dat[0:1, 1:2, 2:3]).all()
  405. assert (d.section[0:1, 1:3, 0:1] == dat[0:1, 1:3, 0:1]).all()
  406. assert (d.section[0:1, 1:3, 0:2] == dat[0:1, 1:3, 0:2]).all()
  407. assert (d.section[0:1, 1:3, 0:3] == dat[0:1, 1:3, 0:3]).all()
  408. assert (d.section[0:1, 1:3, 1:2] == dat[0:1, 1:3, 1:2]).all()
  409. assert (d.section[0:1, 1:3, 1:3] == dat[0:1, 1:3, 1:3]).all()
  410. assert (d.section[0:1, 1:3, 2:3] == dat[0:1, 1:3, 2:3]).all()
  411. assert (d.section[1:2, 0:1, 0:1] == dat[1:2, 0:1, 0:1]).all()
  412. assert (d.section[1:2, 0:1, 0:2] == dat[1:2, 0:1, 0:2]).all()
  413. assert (d.section[1:2, 0:1, 0:3] == dat[1:2, 0:1, 0:3]).all()
  414. assert (d.section[1:2, 0:1, 1:2] == dat[1:2, 0:1, 1:2]).all()
  415. assert (d.section[1:2, 0:1, 1:3] == dat[1:2, 0:1, 1:3]).all()
  416. assert (d.section[1:2, 0:1, 2:3] == dat[1:2, 0:1, 2:3]).all()
  417. assert (d.section[1:2, 0:2, 0:1] == dat[1:2, 0:2, 0:1]).all()
  418. assert (d.section[1:2, 0:2, 0:2] == dat[1:2, 0:2, 0:2]).all()
  419. assert (d.section[1:2, 0:2, 0:3] == dat[1:2, 0:2, 0:3]).all()
  420. assert (d.section[1:2, 0:2, 1:2] == dat[1:2, 0:2, 1:2]).all()
  421. assert (d.section[1:2, 0:2, 1:3] == dat[1:2, 0:2, 1:3]).all()
  422. assert (d.section[1:2, 0:2, 2:3] == dat[1:2, 0:2, 2:3]).all()
  423. assert (d.section[1:2, 0:3, 0:1] == dat[1:2, 0:3, 0:1]).all()
  424. assert (d.section[1:2, 0:3, 0:2] == dat[1:2, 0:3, 0:2]).all()
  425. assert (d.section[1:2, 0:3, 0:3] == dat[1:2, 0:3, 0:3]).all()
  426. assert (d.section[1:2, 0:3, 1:2] == dat[1:2, 0:3, 1:2]).all()
  427. assert (d.section[1:2, 0:3, 1:3] == dat[1:2, 0:3, 1:3]).all()
  428. assert (d.section[1:2, 0:3, 2:3] == dat[1:2, 0:3, 2:3]).all()
  429. assert (d.section[1:2, 1:2, 0:1] == dat[1:2, 1:2, 0:1]).all()
  430. assert (d.section[1:2, 1:2, 0:2] == dat[1:2, 1:2, 0:2]).all()
  431. assert (d.section[1:2, 1:2, 0:3] == dat[1:2, 1:2, 0:3]).all()
  432. assert (d.section[1:2, 1:2, 1:2] == dat[1:2, 1:2, 1:2]).all()
  433. assert (d.section[1:2, 1:2, 1:3] == dat[1:2, 1:2, 1:3]).all()
  434. assert (d.section[1:2, 1:2, 2:3] == dat[1:2, 1:2, 2:3]).all()
  435. assert (d.section[1:2, 1:3, 0:1] == dat[1:2, 1:3, 0:1]).all()
  436. assert (d.section[1:2, 1:3, 0:2] == dat[1:2, 1:3, 0:2]).all()
  437. assert (d.section[1:2, 1:3, 0:3] == dat[1:2, 1:3, 0:3]).all()
  438. assert (d.section[1:2, 1:3, 1:2] == dat[1:2, 1:3, 1:2]).all()
  439. assert (d.section[1:2, 1:3, 1:3] == dat[1:2, 1:3, 1:3]).all()
  440. assert (d.section[1:2, 1:3, 2:3] == dat[1:2, 1:3, 2:3]).all()
  441. def test_section_data_four(self):
  442. a = np.arange(256).reshape((4, 4, 4, 4))
  443. hdu = fits.PrimaryHDU(a)
  444. hdu.writeto(self.temp('test_new.fits'))
  445. hdul = fits.open(self.temp('test_new.fits'))
  446. d = hdul[0]
  447. dat = hdul[0].data
  448. assert (d.section[:, :, :, :] == dat[:, :, :, :]).all()
  449. assert (d.section[:, :, :] == dat[:, :, :]).all()
  450. assert (d.section[:, :] == dat[:, :]).all()
  451. assert (d.section[:] == dat[:]).all()
  452. assert (d.section[0, :, :, :] == dat[0, :, :, :]).all()
  453. assert (d.section[0, :, 0, :] == dat[0, :, 0, :]).all()
  454. assert (d.section[:, :, 0, :] == dat[:, :, 0, :]).all()
  455. assert (d.section[:, 1, 0, :] == dat[:, 1, 0, :]).all()
  456. assert (d.section[:, :, :, 1] == dat[:, :, :, 1]).all()
  457. def test_section_data_scaled(self):
  458. """
  459. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/143
  460. This is like test_section_data_square but uses a file containing scaled
  461. image data, to test that sections can work correctly with scaled data.
  462. """
  463. hdul = fits.open(self.data('scale.fits'))
  464. d = hdul[0]
  465. dat = hdul[0].data
  466. assert (d.section[:, :] == dat[:, :]).all()
  467. assert (d.section[0, :] == dat[0, :]).all()
  468. assert (d.section[1, :] == dat[1, :]).all()
  469. assert (d.section[:, 0] == dat[:, 0]).all()
  470. assert (d.section[:, 1] == dat[:, 1]).all()
  471. assert (d.section[0, 0] == dat[0, 0]).all()
  472. assert (d.section[0, 1] == dat[0, 1]).all()
  473. assert (d.section[1, 0] == dat[1, 0]).all()
  474. assert (d.section[1, 1] == dat[1, 1]).all()
  475. assert (d.section[0:1, 0:1] == dat[0:1, 0:1]).all()
  476. assert (d.section[0:2, 0:1] == dat[0:2, 0:1]).all()
  477. assert (d.section[0:1, 0:2] == dat[0:1, 0:2]).all()
  478. assert (d.section[0:2, 0:2] == dat[0:2, 0:2]).all()
  479. # Test without having accessed the full data first
  480. hdul = fits.open(self.data('scale.fits'))
  481. d = hdul[0]
  482. assert (d.section[:, :] == dat[:, :]).all()
  483. assert (d.section[0, :] == dat[0, :]).all()
  484. assert (d.section[1, :] == dat[1, :]).all()
  485. assert (d.section[:, 0] == dat[:, 0]).all()
  486. assert (d.section[:, 1] == dat[:, 1]).all()
  487. assert (d.section[0, 0] == dat[0, 0]).all()
  488. assert (d.section[0, 1] == dat[0, 1]).all()
  489. assert (d.section[1, 0] == dat[1, 0]).all()
  490. assert (d.section[1, 1] == dat[1, 1]).all()
  491. assert (d.section[0:1, 0:1] == dat[0:1, 0:1]).all()
  492. assert (d.section[0:2, 0:1] == dat[0:2, 0:1]).all()
  493. assert (d.section[0:1, 0:2] == dat[0:1, 0:2]).all()
  494. assert (d.section[0:2, 0:2] == dat[0:2, 0:2]).all()
  495. assert not d._data_loaded
  496. def test_do_not_scale_image_data(self):
  497. hdul = fits.open(self.data('scale.fits'), do_not_scale_image_data=True)
  498. assert hdul[0].data.dtype == np.dtype('>i2')
  499. hdul = fits.open(self.data('scale.fits'))
  500. assert hdul[0].data.dtype == np.dtype('float32')
  501. def test_append_uint_data(self):
  502. """Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/56
  503. (BZERO and BSCALE added in the wrong location when appending scaled
  504. data)
  505. """
  506. fits.writeto(self.temp('test_new.fits'), data=np.array([],
  507. dtype='uint8'))
  508. d = np.zeros([100, 100]).astype('uint16')
  509. fits.append(self.temp('test_new.fits'), data=d)
  510. f = fits.open(self.temp('test_new.fits'), uint=True)
  511. assert f[1].data.dtype == 'uint16'
  512. def test_uint_header_consistency(self):
  513. """
  514. Regression test for https://github.com/astropy/astropy/issues/2305
  515. This ensures that an HDU containing unsigned integer data always has
  516. the apppriate BZERO value in its header.
  517. """
  518. for int_size in (16, 32, 64):
  519. # Just make an array of some unsigned ints that wouldn't fit in a
  520. # signed int array of the same bit width
  521. max_uint = (2 ** int_size) - 1
  522. if int_size == 64:
  523. # Otherwise may get an overflow error, at least on Python 2
  524. max_uint = np.uint64(int_size)
  525. dtype = 'uint%d' % int_size
  526. arr = np.empty(100, dtype=dtype)
  527. arr.fill(max_uint)
  528. arr -= np.arange(100, dtype=dtype)
  529. uint_hdu = fits.PrimaryHDU(data=arr)
  530. assert np.all(uint_hdu.data == arr)
  531. assert uint_hdu.data.dtype.name == 'uint%d' % int_size
  532. assert 'BZERO' in uint_hdu.header
  533. assert uint_hdu.header['BZERO'] == (2 ** (int_size - 1))
  534. filename = 'uint%d.fits' % int_size
  535. uint_hdu.writeto(self.temp(filename))
  536. with fits.open(self.temp(filename), uint=True) as hdul:
  537. new_uint_hdu = hdul[0]
  538. assert np.all(new_uint_hdu.data == arr)
  539. assert new_uint_hdu.data.dtype.name == 'uint%d' % int_size
  540. assert 'BZERO' in new_uint_hdu.header
  541. assert new_uint_hdu.header['BZERO'] == (2 ** (int_size - 1))
  542. def test_blanks(self):
  543. """Test image data with blank spots in it (which should show up as
  544. NaNs in the data array.
  545. """
  546. arr = np.zeros((10, 10), dtype=np.int32)
  547. # One row will be blanks
  548. arr[1] = 999
  549. hdu = fits.ImageHDU(data=arr)
  550. hdu.header['BLANK'] = 999
  551. hdu.writeto(self.temp('test_new.fits'))
  552. hdul = fits.open(self.temp('test_new.fits'))
  553. assert np.isnan(hdul[1].data[1]).all()
  554. def test_bzero_with_floats(self):
  555. """Test use of the BZERO keyword in an image HDU containing float
  556. data.
  557. """
  558. arr = np.zeros((10, 10)) - 1
  559. hdu = fits.ImageHDU(data=arr)
  560. hdu.header['BZERO'] = 1.0
  561. hdu.writeto(self.temp('test_new.fits'))
  562. hdul = fits.open(self.temp('test_new.fits'))
  563. arr += 1
  564. assert (hdul[1].data == arr).all()
  565. def test_rewriting_large_scaled_image(self):
  566. """Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/84 and
  567. https://aeon.stsci.edu/ssb/trac/pyfits/ticket/101
  568. """
  569. hdul = fits.open(self.data('fixed-1890.fits'))
  570. orig_data = hdul[0].data
  571. with ignore_warnings():
  572. hdul.writeto(self.temp('test_new.fits'), clobber=True)
  573. hdul.close()
  574. hdul = fits.open(self.temp('test_new.fits'))
  575. assert (hdul[0].data == orig_data).all()
  576. hdul.close()
  577. # Just as before, but this time don't touch hdul[0].data before writing
  578. # back out--this is the case that failed in
  579. # https://aeon.stsci.edu/ssb/trac/pyfits/ticket/84
  580. hdul = fits.open(self.data('fixed-1890.fits'))
  581. with ignore_warnings():
  582. hdul.writeto(self.temp('test_new.fits'), clobber=True)
  583. hdul.close()
  584. hdul = fits.open(self.temp('test_new.fits'))
  585. assert (hdul[0].data == orig_data).all()
  586. hdul.close()
  587. # Test opening/closing/reopening a scaled file in update mode
  588. hdul = fits.open(self.data('fixed-1890.fits'),
  589. do_not_scale_image_data=True)
  590. hdul.writeto(self.temp('test_new.fits'), clobber=True,
  591. output_verify='silentfix')
  592. hdul.close()
  593. hdul = fits.open(self.temp('test_new.fits'))
  594. orig_data = hdul[0].data
  595. hdul.close()
  596. hdul = fits.open(self.temp('test_new.fits'), mode='update')
  597. hdul.close()
  598. hdul = fits.open(self.temp('test_new.fits'))
  599. assert (hdul[0].data == orig_data).all()
  600. hdul = fits.open(self.temp('test_new.fits'))
  601. hdul.close()
  602. def test_image_update_header(self):
  603. """
  604. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/105
  605. Replacing the original header to an image HDU and saving should update
  606. the NAXISn keywords appropriately and save the image data correctly.
  607. """
  608. # Copy the original file before saving to it
  609. self.copy_file('test0.fits')
  610. with fits.open(self.temp('test0.fits'), mode='update') as hdul:
  611. orig_data = hdul[1].data.copy()
  612. hdr_copy = hdul[1].header.copy()
  613. del hdr_copy['NAXIS*']
  614. hdul[1].header = hdr_copy
  615. with fits.open(self.temp('test0.fits')) as hdul:
  616. assert (orig_data == hdul[1].data).all()
  617. def test_open_scaled_in_update_mode(self):
  618. """
  619. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/119
  620. (Don't update scaled image data if the data is not read)
  621. This ensures that merely opening and closing a file containing scaled
  622. image data does not cause any change to the data (or the header).
  623. Changes should only occur if the data is accessed.
  624. """
  625. # Copy the original file before making any possible changes to it
  626. self.copy_file('scale.fits')
  627. mtime = os.stat(self.temp('scale.fits')).st_mtime
  628. time.sleep(1)
  629. fits.open(self.temp('scale.fits'), mode='update').close()
  630. # Ensure that no changes were made to the file merely by immediately
  631. # opening and closing it.
  632. assert mtime == os.stat(self.temp('scale.fits')).st_mtime
  633. # Insert a slight delay to ensure the mtime does change when the file
  634. # is changed
  635. time.sleep(1)
  636. hdul = fits.open(self.temp('scale.fits'), 'update')
  637. orig_data = hdul[0].data
  638. hdul.close()
  639. # Now the file should be updated with the rescaled data
  640. assert mtime != os.stat(self.temp('scale.fits')).st_mtime
  641. hdul = fits.open(self.temp('scale.fits'), mode='update')
  642. assert hdul[0].data.dtype == np.dtype('>f4')
  643. assert hdul[0].header['BITPIX'] == -32
  644. assert 'BZERO' not in hdul[0].header
  645. assert 'BSCALE' not in hdul[0].header
  646. assert (orig_data == hdul[0].data).all()
  647. # Try reshaping the data, then closing and reopening the file; let's
  648. # see if all the changes are preseved properly
  649. hdul[0].data.shape = (42, 10)
  650. hdul.close()
  651. hdul = fits.open(self.temp('scale.fits'))
  652. assert hdul[0].shape == (42, 10)
  653. assert hdul[0].data.dtype == np.dtype('>f4')
  654. assert hdul[0].header['BITPIX'] == -32
  655. assert 'BZERO' not in hdul[0].header
  656. assert 'BSCALE' not in hdul[0].header
  657. def test_scale_back(self):
  658. """A simple test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/120
  659. The scale_back feature for image HDUs.
  660. """
  661. self.copy_file('scale.fits')
  662. with fits.open(self.temp('scale.fits'), mode='update',
  663. scale_back=True) as hdul:
  664. orig_bitpix = hdul[0].header['BITPIX']
  665. orig_bzero = hdul[0].header['BZERO']
  666. orig_bscale = hdul[0].header['BSCALE']
  667. orig_data = hdul[0].data.copy()
  668. hdul[0].data[0] = 0
  669. with fits.open(self.temp('scale.fits'),
  670. do_not_scale_image_data=True) as hdul:
  671. assert hdul[0].header['BITPIX'] == orig_bitpix
  672. assert hdul[0].header['BZERO'] == orig_bzero
  673. assert hdul[0].header['BSCALE'] == orig_bscale
  674. zero_point = int(math.floor(-orig_bzero / orig_bscale))
  675. assert (hdul[0].data[0] == zero_point).all()
  676. with fits.open(self.temp('scale.fits')) as hdul:
  677. assert (hdul[0].data[1:] == orig_data[1:]).all()
  678. def test_image_none(self):
  679. """
  680. Regression test for https://github.com/spacetelescope/PyFITS/issues/27
  681. """
  682. with fits.open(self.data('test0.fits')) as h:
  683. h[1].data
  684. h[1].data = None
  685. h[1].writeto(self.temp('test.fits'))
  686. with fits.open(self.temp('test.fits')) as h:
  687. assert h[1].data is None
  688. assert h[1].header['NAXIS'] == 0
  689. assert 'NAXIS1' not in h[1].header
  690. assert 'NAXIS2' not in h[1].header
  691. def test_invalid_blank(self):
  692. """
  693. Regression test for https://github.com/astropy/astropy/issues/2711
  694. If the BLANK keyword contains an invalid value it should be ignored for
  695. any calculations (though a warning should be issued).
  696. """
  697. data = np.arange(100, dtype=np.float64)
  698. hdu = fits.PrimaryHDU(data)
  699. hdu.header['BLANK'] = 'nan'
  700. hdu.writeto(self.temp('test.fits'))
  701. with catch_warnings() as w:
  702. with fits.open(self.temp('test.fits')) as hdul:
  703. assert np.all(hdul[0].data == data)
  704. assert len(w) == 2
  705. msg = "Invalid value for 'BLANK' keyword in header"
  706. assert msg in str(w[0].message)
  707. msg = "Invalid 'BLANK' keyword"
  708. assert msg in str(w[1].message)
  709. def test_scaled_image_fromfile(self):
  710. """
  711. Regression test for https://github.com/astropy/astropy/issues/2710
  712. """
  713. # Make some sample data
  714. a = np.arange(100, dtype=np.float32)
  715. hdu = fits.PrimaryHDU(data=a.copy())
  716. hdu.scale(bscale=1.1)
  717. hdu.writeto(self.temp('test.fits'))
  718. with open(self.temp('test.fits'), 'rb') as f:
  719. file_data = f.read()
  720. hdul = fits.HDUList.fromstring(file_data)
  721. assert np.allclose(hdul[0].data, a)
  722. class TestCompressedImage(FitsTestCase):
  723. def test_empty(self):
  724. """
  725. Regression test for https://github.com/astropy/astropy/issues/2595
  726. """
  727. hdu = fits.CompImageHDU()
  728. assert hdu.data is None
  729. hdu.writeto(self.temp('test.fits'))
  730. with fits.open(self.temp('test.fits'), mode='update') as hdul:
  731. assert len(hdul) == 2
  732. assert isinstance(hdul[1], fits.CompImageHDU)
  733. assert hdul[1].data is None
  734. # Now test replacing the empty data with an array and see what
  735. # happens
  736. hdul[1].data = np.arange(100, dtype=np.int32)
  737. with fits.open(self.temp('test.fits')) as hdul:
  738. assert len(hdul) == 2
  739. assert isinstance(hdul[1], fits.CompImageHDU)
  740. assert np.all(hdul[1].data == np.arange(100, dtype=np.int32))
  741. @pytest.mark.parametrize(
  742. ('data', 'compression_type', 'quantize_level', 'byte_order'),
  743. sum([[(np.zeros((2, 10, 10), dtype=np.float32), 'RICE_1', 16, bo),
  744. (np.zeros((2, 10, 10), dtype=np.float32), 'GZIP_1', -0.01, bo),
  745. (np.zeros((100, 100)) + 1, 'HCOMPRESS_1', 16, bo)]
  746. for bo in ('<', '>')], []))
  747. def test_comp_image(self, data, compression_type, quantize_level,
  748. byte_order):
  749. data = data.newbyteorder(byte_order)
  750. primary_hdu = fits.PrimaryHDU()
  751. ofd = fits.HDUList(primary_hdu)
  752. chdu = fits.CompImageHDU(data, name='SCI',
  753. compressionType=compression_type,
  754. quantizeLevel=quantize_level)
  755. ofd.append(chdu)
  756. ofd.writeto(self.temp('test_new.fits'), clobber=True)
  757. ofd.close()
  758. with fits.open(self.temp('test_new.fits')) as fd:
  759. assert (fd[1].data == data).all()
  760. assert fd[1].header['NAXIS'] == chdu.header['NAXIS']
  761. assert fd[1].header['NAXIS1'] == chdu.header['NAXIS1']
  762. assert fd[1].header['NAXIS2'] == chdu.header['NAXIS2']
  763. assert fd[1].header['BITPIX'] == chdu.header['BITPIX']
  764. @ignore_warnings(AstropyPendingDeprecationWarning)
  765. def test_comp_image_hcompression_1_invalid_data(self):
  766. """
  767. Tests compression with the HCOMPRESS_1 algorithm with data that is
  768. not 2D and has a non-2D tile size.
  769. """
  770. pytest.raises(ValueError, fits.CompImageHDU,
  771. np.zeros((2, 10, 10), dtype=np.float32), name='SCI',
  772. compressionType='HCOMPRESS_1', quantizeLevel=16,
  773. tileSize=[2, 10, 10])
  774. @ignore_warnings(AstropyPendingDeprecationWarning)
  775. def test_comp_image_hcompress_image_stack(self):
  776. """
  777. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/171
  778. Tests that data containing more than two dimensions can be
  779. compressed with HCOMPRESS_1 so long as the user-supplied tile size can
  780. be flattened to two dimensions.
  781. """
  782. cube = np.arange(300, dtype=np.float32).reshape((3, 10, 10))
  783. hdu = fits.CompImageHDU(data=cube, name='SCI',
  784. compressionType='HCOMPRESS_1',
  785. quantizeLevel=16, tileSize=[5, 5, 1])
  786. hdu.writeto(self.temp('test.fits'))
  787. with fits.open(self.temp('test.fits')) as hdul:
  788. assert (hdul['SCI'].data == cube).all()
  789. def test_subtractive_dither_seed(self):
  790. """
  791. Regression test for https://github.com/spacetelescope/PyFITS/issues/32
  792. Ensure that when floating point data is compressed with the
  793. SUBTRACTIVE_DITHER_1 quantization method that the correct ZDITHER0 seed
  794. is added to the header, and that the data can be correctly
  795. decompressed.
  796. """
  797. array = np.arange(100.0).reshape(10, 10)
  798. csum = (array[0].view('uint8').sum() % 10000) + 1
  799. hdu = fits.CompImageHDU(data=array,
  800. quantize_method=SUBTRACTIVE_DITHER_1,
  801. dither_seed=DITHER_SEED_CHECKSUM)
  802. hdu.writeto(self.temp('test.fits'))
  803. with fits.open(self.temp('test.fits')) as hdul:
  804. assert isinstance(hdul[1], fits.CompImageHDU)
  805. assert 'ZQUANTIZ' in hdul[1]._header
  806. assert hdul[1]._header['ZQUANTIZ'] == 'SUBTRACTIVE_DITHER_1'
  807. assert 'ZDITHER0' in hdul[1]._header
  808. assert hdul[1]._header['ZDITHER0'] == csum
  809. assert np.all(hdul[1].data == array)
  810. def test_disable_image_compression(self):
  811. with catch_warnings():
  812. # No warnings should be displayed in this case
  813. warnings.simplefilter('error')
  814. with fits.open(self.data('comp.fits'),
  815. disable_image_compression=True) as hdul:
  816. # The compressed image HDU should show up as a BinTableHDU, but
  817. # *not* a CompImageHDU
  818. assert isinstance(hdul[1], fits.BinTableHDU)
  819. assert not isinstance(hdul[1], fits.CompImageHDU)
  820. with fits.open(self.data('comp.fits')) as hdul:
  821. assert isinstance(hdul[1], fits.CompImageHDU)
  822. def test_open_comp_image_in_update_mode(self):
  823. """
  824. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/167
  825. Similar to test_open_scaled_in_update_mode(), but specifically for
  826. compressed images.
  827. """
  828. # Copy the original file before making any possible changes to it
  829. self.copy_file('comp.fits')
  830. mtime = os.stat(self.temp('comp.fits')).st_mtime
  831. time.sleep(1)
  832. fits.open(self.temp('comp.fits'), mode='update').close()
  833. # Ensure that no changes were made to the file merely by immediately
  834. # opening and closing it.
  835. assert mtime == os.stat(self.temp('comp.fits')).st_mtime
  836. def test_open_scaled_in_update_mode_compressed(self):
  837. """
  838. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/88 2
  839. Identical to test_open_scaled_in_update_mode() but with a compressed
  840. version of the scaled image.
  841. """
  842. # Copy+compress the original file before making any possible changes to
  843. # it
  844. with fits.open(self.data('scale.fits'),
  845. do_not_scale_image_data=True) as hdul:
  846. chdu = fits.CompImageHDU(data=hdul[0].data,
  847. header=hdul[0].header)
  848. chdu.writeto(self.temp('scale.fits'))
  849. mtime = os.stat(self.temp('scale.fits')).st_mtime
  850. time.sleep(1)
  851. fits.open(self.temp('scale.fits'), mode='update').close()
  852. # Ensure that no changes were made to the file merely by immediately
  853. # opening and closing it.
  854. assert mtime == os.stat(self.temp('scale.fits')).st_mtime
  855. # Insert a slight delay to ensure the mtime does change when the file
  856. # is changed
  857. time.sleep(1)
  858. hdul = fits.open(self.temp('scale.fits'), 'update')
  859. hdul[1].data
  860. hdul.close()
  861. # Now the file should be updated with the rescaled data
  862. assert mtime != os.stat(self.temp('scale.fits')).st_mtime
  863. hdul = fits.open(self.temp('scale.fits'), mode='update')
  864. assert hdul[1].data.dtype == np.dtype('float32')
  865. assert hdul[1].header['BITPIX'] == -32
  866. assert 'BZERO' not in hdul[1].header
  867. assert 'BSCALE' not in hdul[1].header
  868. # Try reshaping the data, then closing and reopening the file; let's
  869. # see if all the changes are preseved properly
  870. hdul[1].data.shape = (42, 10)
  871. hdul.close()
  872. hdul = fits.open(self.temp('scale.fits'))
  873. assert hdul[1].shape == (42, 10)
  874. assert hdul[1].data.dtype == np.dtype('float32')
  875. assert hdul[1].header['BITPIX'] == -32
  876. assert 'BZERO' not in hdul[1].header
  877. assert 'BSCALE' not in hdul[1].header
  878. def test_write_comp_hdu_direct_from_existing(self):
  879. with fits.open(self.data('comp.fits')) as hdul:
  880. hdul[1].writeto(self.temp('test.fits'))
  881. with fits.open(self.data('comp.fits')) as hdul1:
  882. with fits.open(self.temp('test.fits')) as hdul2:
  883. assert np.all(hdul1[1].data == hdul2[1].data)
  884. assert comparerecords(hdul1[1].compressed_data,
  885. hdul2[1].compressed_data)
  886. def test_rewriting_large_scaled_image_compressed(self):
  887. """
  888. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/88 1
  889. Identical to test_rewriting_large_scaled_image() but with a compressed
  890. image.
  891. """
  892. with fits.open(self.data('fixed-1890.fits'),
  893. do_not_scale_image_data=True) as hdul:
  894. chdu = fits.CompImageHDU(data=hdul[0].data,
  895. header=hdul[0].header)
  896. chdu.writeto(self.temp('fixed-1890-z.fits'))
  897. hdul = fits.open(self.temp('fixed-1890-z.fits'))
  898. orig_data = hdul[1].data
  899. with ignore_warnings():
  900. hdul.writeto(self.temp('test_new.fits'), clobber=True)
  901. hdul.close()
  902. hdul = fits.open(self.temp('test_new.fits'))
  903. assert (hdul[1].data == orig_data).all()
  904. hdul.close()
  905. # Just as before, but this time don't touch hdul[0].data before writing
  906. # back out--this is the case that failed in
  907. # https://aeon.stsci.edu/ssb/trac/pyfits/ticket/84
  908. hdul = fits.open(self.temp('fixed-1890-z.fits'))
  909. with ignore_warnings():
  910. hdul.writeto(self.temp('test_new.fits'), clobber=True)
  911. hdul.close()
  912. hdul = fits.open(self.temp('test_new.fits'))
  913. assert (hdul[1].data == orig_data).all()
  914. hdul.close()
  915. # Test opening/closing/reopening a scaled file in update mode
  916. hdul = fits.open(self.temp('fixed-1890-z.fits'),
  917. do_not_scale_image_data=True)
  918. hdul.writeto(self.temp('test_new.fits'), clobber=True,
  919. output_verify='silentfix')
  920. hdul.close()
  921. hdul = fits.open(self.temp('test_new.fits'))
  922. orig_data = hdul[1].data
  923. hdul.close()
  924. hdul = fits.open(self.temp('test_new.fits'), mode='update')
  925. hdul.close()
  926. hdul = fits.open(self.temp('test_new.fits'))
  927. assert (hdul[1].data == orig_data).all()
  928. hdul = fits.open(self.temp('test_new.fits'))
  929. hdul.close()
  930. def test_scale_back_compressed(self):
  931. """
  932. Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/88 3
  933. Identical to test_scale_back() but uses a compressed image.
  934. """
  935. # Create a compressed version of the scaled image
  936. with fits.open(self.data('scale.fits'),
  937. do_not_scale_image_data=True) as hdul:
  938. chdu = fits.CompImageHDU(data=hdul[0].data,
  939. header=hdul[0].header)
  940. chdu.writeto(self.temp('scale.fits'))
  941. with fits.open(self.temp('scale.fits'), mode='update',
  942. scale_back=True) as hdul:
  943. orig_bitpix = hdul[1].header['BITPIX']
  944. orig_bzero = hdul[1].header['BZERO']
  945. orig_bscale = hdul[1].header['BSCALE']
  946. orig_data = hdul[1].data.copy()
  947. hdul[1].data[0] = 0
  948. with fits.open(self.temp('scale.fits'),
  949. do_not_scale_image_data=True) as hdul:
  950. assert hdul[1].header['BITPIX'] == orig_bitpix
  951. assert hdul[1].header['BZERO'] == orig_bzero
  952. assert hdul[1].header['BSCALE'] == orig_bscale
  953. zero_point = int(math.floor(-orig_bzero / orig_bscale))
  954. assert (hdul[1].data[0] == zero_point).all()
  955. with fits.open(self.temp('scale.fits')) as hdul:
  956. assert (hdul[1].data[1:] == orig_data[1:]).all()
  957. # Extra test to ensure that after everything the data is still the
  958. # same as in the original uncompressed version of the image
  959. with fits.open(self.data('scale.fits')) as hdul2:
  960. # Recall we made the same modification to the data in hdul
  961. # above
  962. hdul2[0].data[0] = 0
  963. assert (hdul[1].data == hdul2[0].data).all()
  964. def test_lossless_gzip_compression(self):