How to use the astropy.io.fits.ImageHDU function in astropy

To help you get started, we’ve selected a few astropy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gammapy / gammapy / gammapy / image / _old_utils.py View on Github external
def cube_to_image(cube, slicepos=None):
    """ Make an image out of a cube.
    Both in- and output should by fits.HDUs"""
    header = cube.header.copy()
    header['NAXIS'] = 2
    del header['NAXIS3']
    del header['CRVAL3']
    del header['CDELT3']
    del header['CTYPE3']
    del header['CRPIX3']
    del header['CUNIT3']
    if slicepos == None:
        data = cube.data.sum()
    else:
        data = cube.data[slicepos]
    return fits.ImageHDU(data, header)
github gammapy / gammapy / gammapy / image / models / model.py View on Github external
def save(self, filename, **kwargs):
        """Save model image to file."""
        hdu_list = []
        prim_hdu = fits.PrimaryHDU(self.model_image, header=self.header)
        hdu_list.append(prim_hdu)
        fits_hdu_list = fits.HDUList(hdu_list)
        fits_hdu_list.writeto(filename, **kwargs)

        if hasattr(self, 'measurements'):
            hdu_list = []
            prim_hdu = fits.PrimaryHDU(self.measurements[0], header=self.header)
            hdu_list.append(prim_hdu)

            for image in self.measurements[1:]:
                hdu = fits.ImageHDU(image)
                hdu_list.append(hdu)
            fits_hdu_list = fits.HDUList(hdu_list)
            fits_hdu_list.writeto('counts_' + filename, **kwargs)
github gbrammer / grizli / grizli / multifit.py View on Github external
p.header['FCONTAM'] = (fcontam, 'Contamination weight')
    p.header['PIXFRAC'] = (pixfrac, 'Drizzle PIXFRAC')
    p.header['DRIZKRNL'] = (kernel, 'Drizzle kernel')
    
    p.header['NINPUT'] = (len(beams), 'Number of drizzled beams')
    for i, beam in enumerate(beams):
        p.header['FILE{0:04d}'.format(i+1)] = (beam.grism.parent_file, 
                                             'Parent filename')
        p.header['GRIS{0:04d}'.format(i+1)] = (beam.grism.filter, 
                                             'Beam grism element')
        
    h = out_header.copy()
        
    grism_sci = pyfits.ImageHDU(data=outsci, header=h, name='SCI')
    grism_wht = pyfits.ImageHDU(data=outwht, header=h, name='WHT')
    
    hdul = pyfits.HDUList([p, grism_sci, grism_wht])
    
    return hdul
github CIRADA-Tools / RM / RMtools_3D / do_RMclean_3D.py View on Github external
verbose (bool): Verbosity.
        log (function): Which logging function to use.
    """
    # Default data types
    dtFloat = "float" + str(nBits)
    dtComplex = "complex" + str(2*nBits)


    if outDir=='':  #To prevent code breaking if file is in current directory
        outDir='.'
    # Save the clean FDF
    if not write_separate_FDF:
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean.fits"
        if(verbose): log("> %s" % fitsFileOut)
        hdu0 = pf.PrimaryHDU(cleanFDF.real.astype(dtFloat), headtemp)
        hdu1 = pf.ImageHDU(cleanFDF.imag.astype(dtFloat), headtemp)
        hdu2 = pf.ImageHDU(np.abs(cleanFDF).astype(dtFloat), headtemp)
        hduLst = pf.HDUList([hdu0, hdu1, hdu2])
        hduLst.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        hduLst.close()
    else:
        hdu0 = pf.PrimaryHDU(cleanFDF.real.astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean_real.fits"
        hdu0.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)
        hdu1 = pf.PrimaryHDU(cleanFDF.imag.astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean_im.fits"
        hdu1.writeto(fitsFileOut, output_verify="fix", overwrite=True)
        if (verbose): log("> %s" % fitsFileOut)
        hdu2 = pf.PrimaryHDU(np.abs(cleanFDF).astype(dtFloat), headtemp)
        fitsFileOut = outDir + "/" + prefixOut + "FDF_clean_tot.fits"
        hdu2.writeto(fitsFileOut, output_verify="fix", overwrite=True)
github guaix-ucm / megaradrp / megaradrp / processing / wavecalibration.py View on Github external
key = "FIB{:03d}W2".format(fibid)
        fibers_ext_headers[key] =  (upper + 1, "End of spectral coverage")

    # Update KEYWORDS
    # "FIB%03d_V"
    for fibid in solutionwl.error_fitting:
        # Update Fibers
        key = "FIB%03d_V" % fibid
        fibers_ext_headers[key] =  False

    for fibid in solutionwl.missing_fibers:
        # Update Fibers
        key = "FIB%03d_V" % fibid
        fibers_ext_headers[key] =  False

    rss_map = fits.ImageHDU(data=map_data, name='WLMAP')

    rss.append(rss_map)
    return rss
github ACCarnall / bagpipes / bagpipes / models / making / make_cloudy_models.py View on Github external
+ ".neb_cont", contgrid)

    # Nebular grids
    list_of_hdus_lines = [fits.PrimaryHDU()]
    list_of_hdus_cont = [fits.PrimaryHDU()]

    for logU in utils.logU_grid:
        for zmet in utils.zmet_vals[utils.model_type]:

            line_data = np.loadtxt(utils.install_dir
                                   + "/pipes_models/nebular/cloudy_temp_files/"
                                   + utils.model_type + "/grids/zmet_"
                                   + str(zmet)
                                   + "_logU_" + str(logU) + ".neb_lines")

            hdu_line = fits.ImageHDU(name="zmet_" + "%.3f" % zmet + "_logU_"
                                     + "%.1f" % logU, data=line_data)

            cont_data = np.loadtxt(utils.install_dir
                                   + "/pipes_models/nebular/cloudy_temp_files/"
                                   + utils.model_type + "/grids/zmet_"
                                   + str(zmet)
                                   + "_logU_" + str(logU) + ".neb_cont")

            hdu_cont = fits.ImageHDU(name="zmet_" + "%.3f" % zmet + "_logU_"
                                     + "%.1f" % logU, data=cont_data)

            list_of_hdus_lines.append(hdu_line)
            list_of_hdus_cont.append(hdu_cont)

    hdulist_lines = fits.HDUList(hdus=list_of_hdus_lines)
    hdulist_cont = fits.HDUList(hdus=list_of_hdus_cont)
github spacetelescope / asdf / asdf / fits_embed.py View on Github external
if self.blocks.streamed_block is not None:
            raise ValueError(
                "Can not save streamed data to ASDF-in-FITS file.")

        buff = io.BytesIO()
        super(AsdfInFits, self).write_to(
            buff, all_array_storage=all_array_storage,
            all_array_compression=all_array_compression,
            auto_inline=auto_inline, pad_blocks=pad_blocks,
            include_block_index=False)
        array = np.frombuffer(buff.getvalue(), np.uint8)

        try:
            asdf_extension = self._hdulist[ASDF_EXTENSION_NAME]
        except (KeyError, IndexError, AttributeError):
            self._hdulist.append(fits.ImageHDU(array, name=ASDF_EXTENSION_NAME))
        else:
            asdf_extension.data = array
github gammapy / gammapy / gammapy / utils / root / convert.py View on Github external
Examples
    --------
    >>> import ROOT
    >>> from gammapy.utils.root import TH2_to_FITS
    >>> root_hist = ROOT.TH2F()
    >>> fits_hdu = TH2_to_FITS(root_hist)
    >>> fits_hdu.writetofits('my_image.fits')
    """
    header = TH2_to_FITS_header(hist, flipx)
    if header['CDELT1'] > 0:
        warnings.warn('CDELT1 > 0 might not be handled properly.'
                      'A TH2 representing an astro image should have '
                      'a reversed x-axis, i.e. xlow > xhi')
    data = TH2_to_FITS_data(hist, flipx)
    hdu = fits.ImageHDU(data=data, header=header)
    return hdu
github astropy / astropy / astropy / wcs / wcs.py View on Github external
if cpdis is None:
                return

            hdulist[0].header[f'{dist}{num:d}'] = (
                'LOOKUP', 'Prior distortion function type')
            hdulist[0].header[f'{d_kw}{num:d}.EXTVER'] = (
                num, 'Version number of WCSDVARR extension')
            hdulist[0].header[f'{d_kw}{num:d}.NAXES'] = (
                len(cpdis.data.shape), 'Number of independent variables in distortion function')

            for i in range(cpdis.data.ndim):
                hdulist[0].header['{}{:d}.AXIS.{:d}'.format(d_kw, num, i + 1)] = (
                    i + 1,
                    'Axis number of the jth independent variable in a distortion function')

            image = fits.ImageHDU(cpdis.data, name='WCSDVARR')
            header = image.header

            header['CRPIX1'] = (cpdis.crpix[0], 'Coordinate system reference pixel')
            header['CRPIX2'] = (cpdis.crpix[1], 'Coordinate system reference pixel')
            header['CRVAL1'] = (cpdis.crval[0], 'Coordinate system value at reference pixel')
            header['CRVAL2'] = (cpdis.crval[1], 'Coordinate system value at reference pixel')
            header['CDELT1'] = (cpdis.cdelt[0], 'Coordinate increment along axis')
            header['CDELT2'] = (cpdis.cdelt[1], 'Coordinate increment along axis')
            image.ver = int(hdulist[0].header[f'{d_kw}{num:d}.EXTVER'])
            hdulist.append(image)
github KeplerGO / lightkurve / lightkurve / factory.py View on Github external
def _make_aperture_extension(self):
        """Create the aperture mask extension (i.e. extension #2)."""
        mask = 3 * np.ones((self.n_rows, self.n_cols), dtype='int32')
        hdu = fits.ImageHDU(mask)

        # Set the header from the template TPF again
        template = self._header_template(2)
        for kw in template:
            if kw not in ['XTENSION', 'NAXIS1', 'NAXIS2', 'CHECKSUM', 'BITPIX']:
                try:
                    hdu.header[kw] = (self.keywords[kw],
                                      self.keywords.comments[kw])
                except KeyError:
                    hdu.header[kw] = (template[kw],
                                      template.comments[kw])

        # Override the defaults where necessary
        for keyword in ['CTYPE1', 'CTYPE2', 'CRPIX1', 'CRPIX2', 'CRVAL1', 'CRVAL2', 'CUNIT1',
                        'CUNIT2', 'CDELT1', 'CDELT2', 'PC1_1', 'PC1_2', 'PC2_1', 'PC2_2']:
                hdu.header[keyword] = ""  # override wcs keywords