From 217654b2c4c72f4094bc75feb70365fdf4140fec Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 10 Dec 2024 11:38:18 -0500 Subject: [PATCH 01/35] add pyklip dataset Data class --- corgidrp/data.py | 445 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 439 insertions(+), 6 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index 32eff9ef..4bdeb323 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -4,6 +4,9 @@ import astropy.io.fits as fits import astropy.time as time import pandas as pd +import pyklip +from pyklip.instruments.Instrument import Data as pyKLIP_Data +from astropy import wcs import corgidrp @@ -234,9 +237,6 @@ def split_dataset(self, prihdr_keywords=None, exthdr_keywords=None): return split_datasets, unique_vals - - - class Image(): """ Base class for 2-D image data. Data can be created by passing in the data/header explicitly, or @@ -612,7 +612,6 @@ def add_extension_hdu(self, name, data = None, header=None): self.hdu_names.append(name) self.hdu_list.append(new_hdu) - class Dark(Image): """ Dark calibration frame for a given exposure time and EM gain. @@ -734,7 +733,6 @@ def __init__(self, data_or_filepath, pri_hdr=None, ext_hdr=None, input_dataset=N if self.ext_hdr['DATATYPE'] != 'FlatField': raise ValueError("File that was loaded was not a FlatField file.") - class NonLinearityCalibration(Image): """ Class for non-linearity calibration files. Although it's not strictly an image that you might look at, it is a 2D array of data @@ -978,7 +976,6 @@ def save(self, filedir=None, filename=None): hdulist.writeto(self.filepath, overwrite=True) hdulist.close() - class BadPixelMap(Image): """ Class for bad pixel map. The bad pixel map indicates which pixels are hot @@ -1400,6 +1397,442 @@ def __init__(self,data_or_filepath, pri_hdr=None,ext_hdr=None, input_dataset=Non if 'DATATYPE' not in self.ext_hdr or self.ext_hdr['DATATYPE'] != 'TrapCalibration': raise ValueError("File that was loaded was not a TrapCalibration file.") +class PyKLIPDataset(pyKLIP_Data): + """ + A pyKLIP instrument class for Roman Coronagraph Instrument data. + + """ + + #################### + ### Constructors ### + #################### + + def __init__(self, + dataset, + psflib_dataset=None, + highpass=False, + center_include_offset=True): + """ + Initialize the pyKLIP instrument class for space telescope data. + + Parameters + ---------- + filepaths : 1D-array + Paths of the input science observations. + psflib_filepaths : 1D-array, optional + Paths of the input reference observations. The default is None. + center_include_offset : bool + Toggle as to whether the relative header offset values of each + image is applied during image centering. + + Returns + ------- + None. + + """ + + # Initialize pyKLIP Data class. + super(PyKLIPDataset, self).__init__() + + # Set filter wavelengths + # TODO: Add more bandpasses, modes + self.wave_hlc = {1: 0.575} # micron + + # Optional variables + self.center_include_offset = center_include_offset + + # Read science and reference files. + self.readdata(dataset, psflib_dataset, highpass) + + pass + + ################################ + ### Instance Required Fields ### + ################################ + + @property + def input(self): + return self._input + @input.setter + def input(self, newval): + self._input = newval + + @property + def centers(self): + return self._centers + @centers.setter + def centers(self, newval): + self._centers = newval + + @property + def filenums(self): + return self._filenums + @filenums.setter + def filenums(self, newval): + self._filenums = newval + + @property + def filenames(self): + return self._filenames + @filenames.setter + def filenames(self, newval): + self._filenames = newval + + @property + def PAs(self): + return self._PAs + @PAs.setter + def PAs(self, newval): + self._PAs = newval + + @property + def wvs(self): + return self._wvs + @wvs.setter + def wvs(self, newval): + self._wvs = newval + + @property + def wcs(self): + return self._wcs + @wcs.setter + def wcs(self, newval): + self._wcs = newval + + @property + def IWA(self): + return self._IWA + @IWA.setter + def IWA(self, newval): + self._IWA = newval + + @property + def OWA(self): + return self._OWA + @OWA.setter + def OWA(self, newval): + self._OWA = newval + + @property + def psflib(self): + return self._psflib + @psflib.setter + def psflib(self, newval): + self._psflib = newval + + @property + def output(self): + return self._output + @output.setter + def output(self, newval): + self._output = newval + + ############### + ### Methods ### + ############### + + def readdata(self, + dataset, + psflib_dataset, + highpass=False): + """ + Read the input science observations. + + Parameters + ---------- + filepaths : 1D-array + Paths of the input science observations. + + Returns + ------- + None. + + """ + + # Check input. + if not isinstance(dataset, corgidrp.data.Dataset): + raise UserWarning('Input dataset is not a corgidrp Dataset object.') + if len(dataset) == 0: + raise UserWarning('No science frames in the input dataset.') + + if not psflib_dataset is None: + if not isinstance(psflib_dataset, corgidrp.data.Dataset): + raise UserWarning('Input psflib_dataset is not a corgidrp Dataset object.') + + # Loop through frames. + input_all = [] + centers_all = [] # pix + filenames_all = [] + PAs_all = [] # deg + wvs_all = [] # m + wcs_all = [] + PIXSCALE = [] # arcsec + + psflib_data_all = [] + psflib_centers_all = [] # pix + psflib_filenames_all = [] + + # Iterate over frames in dataset + + for i, frame in enumerate(dataset): + + phead = frame.pri_hdr + shead = frame.ext_hdr + + TELESCOP = phead['TELESCOP'] + INSTRUME = phead['INSTRUME'] + MODE = phead['MODE'] + data = frame.data + try: + pxdq = frame.dq + except: + pxdq = np.zeros_like(data).astype('int') + if data.ndim == 2: + data = data[np.newaxis, :] + pxdq = pxdq[np.newaxis, :] + if data.ndim != 3: + raise UserWarning('Requires 2D/3D data cube') + NINTS = data.shape[0] + pix_scale = shead['PIXSCALE'] # arcsec + PIXSCALE += [pix_scale] + + # TODO: Do we need to do this? + # Nan out non-science pixels. + # data[pxdq & 512 == 512] = np.nan + + # TODO: are centers xy or yx? + # Get centers. + if self.center_include_offset == True: + # Use the offset values from the header to adjust the center + centers = np.array([shead['STARLOCX'] - 1 + phead['XOFFSET'] / pix_scale, + shead['STARLOCY'] - 1 + phead['YOFFSET'] / pix_scale] * NINTS) + else: + # Assume the STARLOC define the correct center for each image + centers = np.array([shead['STARLOCX'] - 1, shead['STARLOCY'] - 1] * NINTS) + + # Get metadata. + input_all += [data] + centers_all += [centers] + filenames_all += [os.path.split(phead['FILENAME'])[1] + '_INT%.0f' % (j + 1) for j in range(NINTS)] + PAs_all += [shead['ROLL']] * NINTS + + if INSTRUME == 'CGI': + if MODE == 'HLC': + CWAVEL = self.wave_hlc[phead['BAND']] + else: + raise UserWarning('Unknown Roman CGI instrument mode.') + else: + raise UserWarning('Data originates from unknown Roman instrument') + wvs_all += [1e-6 * CWAVEL] * NINTS + + # TODO: Figure out actual WCS info + wcs_hdr = wcs.WCS(header=shead, naxis=shead['WCSAXES']) + for j in range(NINTS): + wcs_all += [wcs_hdr.deepcopy()] + + input_all = np.concatenate(input_all) + if input_all.ndim != 3: + raise UserWarning('Some science files do not have matching image shapes') + centers_all = np.concatenate(centers_all).reshape(-1, 2) + filenames_all = np.array(filenames_all) + filenums_all = np.array(range(len(filenames_all))) + PAs_all = np.array(PAs_all) + wvs_all = np.array(wvs_all) + wcs_all = np.array(wcs_all) + PIXSCALE = np.unique(np.array(PIXSCALE)) + if len(PIXSCALE) != 1: + raise UserWarning('Some science files do not have matching pixel scales') + if TELESCOP == 'ROMAN' and INSTRUME == 'CGI' and MODE == 'HLC': + iwa_all = np.min(wvs_all) / 6.5 * 180. / np.pi * 3600. / PIXSCALE[0] # pix + else: + iwa_all = 1. # pix + owa_all = np.sum(np.array(input_all.shape[1:]) / 2.) # pix + + # Recenter science images so that the star is at the center of the array. + new_center = (np.array(data.shape[1:])-1)/ 2. + new_center = new_center[::-1] + for i, image in enumerate(input_all): + recentered_image = pyklip.klip.align_and_scale(image, new_center=new_center, old_center=centers_all[i]) + input_all[i] = recentered_image + centers_all[i] = new_center + + # Assign pyKLIP variables. + self._input = input_all + self._centers = centers_all + self._filenames = filenames_all + self._filenums = filenums_all + self._PAs = PAs_all + self._wvs = wvs_all + self._wcs = wcs_all + self._IWA = iwa_all + self._OWA = owa_all + + # Prepare reference library + if not psflib_dataset is None: + psflib_data_all = [] + psflib_centers_all = [] # pix + psflib_filenames_all = [] + + for i, frame in enumerate(psflib_dataset): + + phead = frame.pri_hdr + shead = frame.ext_hdr + + data = frame.data + try: + pxdq = frame.dq + except: + pxdq = np.zeros_like(data).astype('int') + if data.ndim == 2: + data = data[np.newaxis, :] + pxdq = pxdq[np.newaxis, :] + if data.ndim != 3: + raise UserWarning('Requires 2D/3D data cube') + NINTS = data.shape[0] + pix_scale = shead['PIXSCALE'] # arcsec + PIXSCALE += [pix_scale] + + # TODO: Do we need to do this? + # Nan out non-science pixels. + # data[pxdq & 512 == 512] = np.nan + + # TODO: are centers xy or yx? + # Get centers. + if self.center_include_offset == True: + # Use the offset values from the header to adjust the center + centers = np.array([shead['STARLOCX'] - 1 + phead['XOFFSET'] / pix_scale, + shead['STARLOCY'] - 1 + phead['YOFFSET'] / pix_scale] * NINTS) + else: + # Assume the STARLOC define the correct center for each image + centers = np.array([shead['STARLOCX'] - 1, shead['STARLOCY'] - 1] * NINTS) + + psflib_data_all += [data] + psflib_centers_all += [centers] + psflib_filenames_all += [os.path.split(phead['FILENAME'])[1] + '_INT%.0f' % (j + 1) for j in range(NINTS)] + + psflib_data_all = np.concatenate(psflib_data_all) + if psflib_data_all.ndim != 3: + raise UserWarning('Some reference files do not have matching image shapes') + psflib_centers_all = np.concatenate(psflib_centers_all).reshape(-1, 2) + psflib_filenames_all = np.array(psflib_filenames_all) + + # Recenter reference images. + new_center = (np.array(data.shape[1:])-1)/ 2. + new_center = new_center[::-1] + for i, image in enumerate(psflib_data_all): + recentered_image = pyklip.klip.align_and_scale(image, new_center=new_center, old_center=psflib_centers_all[i]) + psflib_data_all[i] = recentered_image + psflib_centers_all[i] = new_center + + # Append science data. + psflib_data_all = np.append(psflib_data_all, self._input, axis=0) + psflib_centers_all = np.append(psflib_centers_all, self._centers, axis=0) + psflib_filenames_all = np.append(psflib_filenames_all, self._filenames, axis=0) + + # Initialize PSF library. + psflib = pyklip.rdi.PSFLibrary(psflib_data_all, new_center, psflib_filenames_all, compute_correlation=True, highpass=highpass) + + # Prepare PSF library. + psflib.prepare_library(self) + + # Assign pyKLIP variables. + self._psflib = psflib + + else: + self._psflib = None + + pass + + def savedata(self, + filepath, + data, + klipparams=None, + filetype='', + zaxis=None, + more_keywords=None): + """ + Function to save the data products that will be called internally by + pyKLIP. + + Parameters + ---------- + filepath : path + Path of the output FITS file. + data : 3D-array + KLIP-subtracted data of shape (nkl, ny, nx). + klipparams : str, optional + PyKLIP keyword arguments used for the KLIP subtraction. The default + is None. + filetype : str, optional + Data type of the pyKLIP product. The default is ''. + zaxis : list, optional + List of KL modes used for the KLIP subtraction. The default is + None. + more_keywords : dict, optional + Dictionary of additional header keywords to be written to the + output FITS file. The default is None. + + Returns + ------- + None. + + """ + + # Make FITS file. + hdul = fits.HDUList() + hdul.append(fits.PrimaryHDU(data)) + + # Write all used files to header. Ignore duplicates. + filenames = np.unique(self.filenames) + Nfiles = np.size(filenames) + hdul[0].header['DRPNFILE'] = (Nfiles, 'Num raw files used in pyKLIP') + for i, filename in enumerate(filenames): + if i < 1000: + hdul[0].header['FILE_{0}'.format(i)] = filename + '.fits' + else: + print('WARNING: Too many files to be written to header, skipping') + break + + # Write PSF subtraction parameters and pyKLIP version to header. + try: + pyklipver = pyklip.__version__ + except: + pyklipver = 'unknown' + hdul[0].header['PSFSUB'] = ('pyKLIP', 'PSF Subtraction Algo') + hdul[0].header.add_history('Reduced with pyKLIP using commit {0}'.format(pyklipver)) + hdul[0].header['CREATOR'] = 'pyKLIP-{0}'.format(pyklipver) + hdul[0].header['pyklipv'] = (pyklipver, 'pyKLIP version that was used') + if klipparams is not None: + hdul[0].header['PSFPARAM'] = (klipparams, 'KLIP parameters') + hdul[0].header.add_history('pyKLIP reduction with parameters {0}'.format(klipparams)) + + # Write z-axis units to header if necessary. + if zaxis is not None: + if 'KL Mode' in filetype: + hdul[0].header['CTYPE3'] = 'KLMODES' + for i, klmode in enumerate(zaxis): + hdul[0].header['KLMODE{0}'.format(i)] = (klmode, 'KL Mode of slice {0}'.format(i)) + + # Write extra keywords to header if necessary. + if more_keywords is not None: + for hdr_key in more_keywords: + hdul[0].header[hdr_key] = more_keywords[hdr_key] + + # Update image center. + center = self.output_centers[0] + hdul[0].header.update({'PSFCENTX': center[0], 'PSFCENTY': center[1]}) + hdul[0].header.update({'CRPIX1': center[0] + 1, 'CRPIX2': center[1] + 1}) + hdul[0].header.add_history('Image recentered to {0}'.format(str(center))) + + # Write FITS file. + try: + hdul.writeto(filepath, overwrite=True) + except TypeError: + hdul.writeto(filepath, clobber=True) + hdul.close() + + pass + datatypes = { "Image" : Image, "Dark" : Dark, "NonLinearityCalibration" : NonLinearityCalibration, From 7694fe7b13f214d96740416a38ee6c4288979b26 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 12 Dec 2024 12:04:03 -0500 Subject: [PATCH 02/35] create mock psf subraction datasets --- corgidrp/mocks.py | 152 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 151 insertions(+), 1 deletion(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index cf278aae..5f10add7 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -271,7 +271,6 @@ def create_simflat_dataset(filedir=None, numfiles=10): dataset = data.Dataset(frames) return dataset - def create_raster(mask,data,dither_sizex=None,dither_sizey=None,row_cent = None,col_cent = None,n_dith=None,mask_size=420,snr=250,planet=None, band=None, radius=None, snr_constant=None): """Performs raster scan of Neptune or Uranus images @@ -1857,3 +1856,154 @@ def add_defect(sch_imgs, prob, ori, temp): hdul.writeto(str(filename)[:-4]+'_'+str(mult_counter)+'.fits', overwrite = True) else: hdul.writeto(filename, overwrite = True) + +default_wcs_string = """WCSAXES = 2 / Number of coordinate axes +CRPIX1 = 0.0 / Pixel coordinate of reference point +CRPIX2 = 0.0 / Pixel coordinate of reference point +CDELT1 = 1.0 / Coordinate increment at reference point +CDELT2 = 1.0 / Coordinate increment at reference point +CRVAL1 = 0.0 / Coordinate value at reference point +CRVAL2 = 0.0 / Coordinate value at reference point +LATPOLE = 90.0 / [deg] Native latitude of celestial pole +MJDREF = 0.0 / [d] MJD of fiducial time END""" + +def gaussian_array(array_size=50,sigma=5.,amp=1.): + """Generate a 2D square array with a centered gaussian surface (for mock PSF data). + + Args: + array_size (int, optional): Width of desired array in pixels. Defaults to 50. + sigma (float, optional): Standard deviation of the gaussian curve, in pixels. Defaults to 5. + amp (float,optional): Amplitude of gaussian curve. Defaults to 1. + + Returns: + np.array: 2D array of a gaussian surface. + """ + x, y = np.meshgrid(np.linspace(-array_size, array_size, array_size), + np.linspace(-array_size, array_size, array_size)) + dst = np.sqrt(x**2+y**2) + + # lower normal part of gaussian + normal = 1/(2.0 * np.pi * sigma**2) + + # Calculating Gaussian filter + gauss = np.exp(-((dst)**2 / (2.0 * sigma**2))) * normal * amp + + return gauss + +def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhole_reffiles=None, + wcs_header = None, + data_shape = [1024,1024], + outdir = None): + """Generate a mock science and reference dataset ready for the PSF subtraction step. + TODO: reference a central pixscale number, rather than hard code. + TODO: save default WCS header in a smarter way (dict?) + + Args: + n_sci (int): number of science frames, must be >= 1. + n_ref (int): nummber of reference frames, must be >= 0. + roll_angles (list-like): list of the roll angles of each science and reference + frame, with the science frames listed first. + darkhole_scifiles (list of str, optional): Filepaths to the darkhole science frames. If not provided, + a noisy 2D gaussian will be used instead. Defaults to None. + darkhole_reffiles (list of str, optional): Filepaths to the darkhole reference frames. If not provided, + a noisy 2D gaussian will be used instead. Defaults to None. + wcs_header (astropy.fits.Header, optional): Fits header object containing WCS information. If not provided, + a mock header will be created. Defaults to None. + data_shape (list-like): desired shape of data array. Must have length 2. Defaults to [1024,1024]. + outdir (str, optional): Desired output directory. If not provided, data will not be saved. Defaults to None. + + Returns: + tuple: corgiDRP science Dataset object and reference Dataset object. + """ + + assert len(data_shape) == 2 + + if roll_angles is None: + roll_angles = [0.] * (n_sci+n_ref) + + mask_center = np.array(data_shape)/2 + star_pos = mask_center + pixscale = 0.0218 # arcsec + + # Build each science/reference frame + sci_frames = [] + ref_frames = [] + for i in range(n_sci+n_ref): + + # Create default headers + prihdr, exthdr = create_default_headers() + + # Read in darkhole data, if provided + if i=n_sci and not darkhole_reffiles is None: + fpath = darkhole_reffiles[i-n_sci] + _,fname = os.path.split(fpath) + darkhole = fits.getdata(fpath) + + # Otherwise generate a 2D gaussian for a fake PSF + else: + fname = None + darkhole = gaussian_array() + + # Darkhole image shape must be even + assert(np.all(np.array(darkhole.shape) % 2 == 0)) + + fill_value = np.nanmin(darkhole) + img_data = np.full(data_shape,fill_value) + + # Overwrite center of array with the darkhole data + y_start,x_start = np.array(mask_center) - (np.array(darkhole.shape) / 2) + y_end,x_end = y_start+darkhole.shape[0], x_start+darkhole.shape[1] + + img_data[y_start:y_end,x_start:x_end] = darkhole + + # Add necessary header keys + prihdr['TELESCOP'] = 'ROMAN' + prihdr['INSTRUME'] = 'CGI' + prihdr['XOFFSET'] = 0.0 + prihdr['YOFFSET'] = 0.0 + prihdr['FILENAME'] = fname + prihdr["MODE"] = 'HLC' + prihdr["BAND"] = 1 + + exthdr['MASKLOCX'] = mask_center[1] + exthdr['MASKLOCY'] = mask_center[0] + exthdr['STARLOCX'] = star_pos[1] + exthdr['STARLOCY'] = star_pos[0] + exthdr['PIXSCALE'] = pixscale + exthdr["ROLL"] = roll_angles[i] + + # Add WCS header info, if provided + if wcs_header is None: + wcs_header = fits.header.Header.fromstring(default_wcs_string,sep='\n') + exthdr.extend(wcs_header) + + # Make a corgiDRP Image frame + frame = data.Image(img_data, pri_hdr=prihdr, ext_hdr=exthdr) + + # Add it to the correct dataset + if i < n_sci: + sci_frames.append(frame) + else: + ref_frames.append(frame) + + sci_dataset = data.Dataset(sci_frames) + + if len(ref_frames) > 0: + ref_dataset = data.Dataset(ref_frames) + else: + ref_dataset = None + + # Save datasets if outdir was provided + if not outdir is None: + if not os.path.exists(outdir): + os.makedirs(outdir) + + sci_dataset.save(filedir=outdir, filenames=['mock_psfsub_L2b_sci_input_dataset.fits']) + if len(ref_frames) > 0: + ref_dataset.save(filedir=outdir, filenames=['mock_psfsub_L2b_ref_input_dataset.fits']) + + return sci_dataset,ref_dataset \ No newline at end of file From 63ed53423951b1ff0352c425cab1bbf3b5f010f9 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Mon, 30 Dec 2024 12:22:15 -0500 Subject: [PATCH 03/35] add psf subtraction step --- corgidrp/l3_to_l4.py | 46 ++++++++++++++++++++++++++++++++++++++++++-- corgidrp/mocks.py | 32 ++++++++++++++++-------------- 2 files changed, 62 insertions(+), 16 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 3593d4b2..946df0ee 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -6,6 +6,8 @@ from scipy.ndimage import shift import numpy as np import glob +import pyklip.rdi +import os def distortion_correction(input_dataset, distortion_calibration): """ @@ -36,19 +38,59 @@ def find_star(input_dataset): return input_dataset.copy() -def do_psf_subtraction(input_dataset, reference_star_dataset=None): +def do_psf_subtraction(input_dataset, reference_star_dataset=None, + mode=None, annuli=1,subsections=1,movement=1, + numbasis = [1,4,8,16],outdir='KLIP_SUB',fileprefix="" + ): """ Perform PSF subtraction on the dataset. Optionally using a reference star dataset. - + TODO: Handle nans & propagate DQ array + Crop data to darkhole size ~(60x60) centered on nearest pixel + format output dataset as corgiDRP dataset + overwrite header kws related to psf/star centers + overwite KLIP mode headers + which header kws are still relevant? + which are new? Args: input_dataset (corgidrp.data.Dataset): a dataset of Images (L3-level) reference_star_dataset (corgidrp.data.Dataset): a dataset of Images of the reference star [optional] Returns: corgidrp.data.Dataset: a version of the input dataset with the PSF subtraction applied + """ + sci_dataset = input_dataset.copy() + + assert len(sci_dataset) > 0, "Science dataset has no data." + + pyklip_dataset = data.PyKLIPDataset(sci_dataset,psflib_dataset=reference_star_dataset) + + # Choose PSF subtraction mode if unspecified + if mode is None: + + if not reference_star_dataset is None and len(sci_dataset)==1: + mode = 'RDI' + elif not reference_star_dataset is None: + mode = 'ADI+RDI' + else: + mode = 'ADI' + + else: assert mode in ['RDI','ADI+RDI','ADI'], f"Mode {mode} is not configured." + + outdir = os.path.join(outdir,mode) + + if not os.path.exists(outdir): + os.makedirs(outdir) + + pyklip.parallelized.klip_dataset(pyklip_dataset, outputdir=outdir, + annuli=annuli, subsections=subsections, movement=movement, numbasis=numbasis, + calibrate_flux=False, mode=mode,psf_library=pyklip_dataset._psflib, + fileprefix=fileprefix) + + + return input_dataset.copy() def northup(input_dataset,correct_wcs=False): diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index 5f10add7..34f43d27 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -1865,7 +1865,8 @@ def add_defect(sch_imgs, prob, ori, temp): CRVAL1 = 0.0 / Coordinate value at reference point CRVAL2 = 0.0 / Coordinate value at reference point LATPOLE = 90.0 / [deg] Native latitude of celestial pole -MJDREF = 0.0 / [d] MJD of fiducial time END""" +MJDREF = 0.0 / [d] MJD of fiducial time +""" def gaussian_array(array_size=50,sigma=5.,amp=1.): """Generate a 2D square array with a centered gaussian surface (for mock PSF data). @@ -1921,8 +1922,8 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol if roll_angles is None: roll_angles = [0.] * (n_sci+n_ref) - mask_center = np.array(data_shape)/2 - star_pos = mask_center + # mask_center = np.array(data_shape)/2 + # star_pos = mask_center pixscale = 0.0218 # arcsec # Build each science/reference frame @@ -1946,19 +1947,21 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol # Otherwise generate a 2D gaussian for a fake PSF else: fname = None - darkhole = gaussian_array() - - # Darkhole image shape must be even - assert(np.all(np.array(darkhole.shape) % 2 == 0)) + darkhole = gaussian_array(array_size=55) fill_value = np.nanmin(darkhole) img_data = np.full(data_shape,fill_value) # Overwrite center of array with the darkhole data - y_start,x_start = np.array(mask_center) - (np.array(darkhole.shape) / 2) - y_end,x_end = y_start+darkhole.shape[0], x_start+darkhole.shape[1] + cr_psf_pix = np.array(darkhole.shape) / 2 - 0.5 + + full_arr_center = np.array(img_data.shape) // 2 + + start_psf_ind = full_arr_center - np.array(darkhole.shape) // 2 + + img_data[start_psf_ind[0]:start_psf_ind[0]+darkhole.shape[0],start_psf_ind[1]:start_psf_ind[1]+darkhole.shape[1]] = darkhole - img_data[y_start:y_end,x_start:x_end] = darkhole + psfcenty, psfcentx = cr_psf_pix + start_psf_ind # Add necessary header keys prihdr['TELESCOP'] = 'ROMAN' @@ -1969,16 +1972,17 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol prihdr["MODE"] = 'HLC' prihdr["BAND"] = 1 - exthdr['MASKLOCX'] = mask_center[1] - exthdr['MASKLOCY'] = mask_center[0] - exthdr['STARLOCX'] = star_pos[1] - exthdr['STARLOCY'] = star_pos[0] + exthdr['MASKLOCX'] = psfcentx + exthdr['MASKLOCY'] = psfcenty + exthdr['STARLOCX'] = psfcentx + exthdr['STARLOCY'] = psfcenty exthdr['PIXSCALE'] = pixscale exthdr["ROLL"] = roll_angles[i] # Add WCS header info, if provided if wcs_header is None: wcs_header = fits.header.Header.fromstring(default_wcs_string,sep='\n') + # wcs_header._cards = wcs_header._cards[-1] exthdr.extend(wcs_header) # Make a corgiDRP Image frame From 94239565ea20e153dd138641d491376b50e81980 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Wed, 1 Jan 2025 12:56:52 -0500 Subject: [PATCH 04/35] format L4 headers --- corgidrp/l3_to_l4.py | 72 ++++++++++++++++++++++++++++++++++++++------ corgidrp/mocks.py | 19 +++++++++++- 2 files changed, 80 insertions(+), 11 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 946df0ee..f9f54373 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -8,6 +8,7 @@ import glob import pyklip.rdi import os +from astropy.io import fits def distortion_correction(input_dataset, distortion_calibration): """ @@ -45,19 +46,20 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, """ Perform PSF subtraction on the dataset. Optionally using a reference star dataset. - TODO: Handle nans & propagate DQ array - Crop data to darkhole size ~(60x60) centered on nearest pixel - format output dataset as corgiDRP dataset - overwrite header kws related to psf/star centers - overwite KLIP mode headers - which header kws are still relevant? - which are new? + TODO: + Handle nans & propagate DQ array + Crop data to darkhole size ~(60x60) centered on nearest pixel (waiting on crop step function) + Rotate north at the end + Do frame combine before PSF subtraction? + What info is missing from output dataset headers? + Add comments to new ext header cards + Args: input_dataset (corgidrp.data.Dataset): a dataset of Images (L3-level) reference_star_dataset (corgidrp.data.Dataset): a dataset of Images of the reference star [optional] Returns: - corgidrp.data.Dataset: a version of the input dataset with the PSF subtraction applied + corgidrp.data.Dataset: a version of the input dataset with the PSF subtraction applied (L4-level) """ @@ -84,14 +86,64 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, if not os.path.exists(outdir): os.makedirs(outdir) + # TODO: Crop data (make sure psf center is updated) + + # TODO: Mask data where DQ > 0, let pyklip deal with the nans + pyklip.parallelized.klip_dataset(pyklip_dataset, outputdir=outdir, annuli=annuli, subsections=subsections, movement=movement, numbasis=numbasis, calibrate_flux=False, mode=mode,psf_library=pyklip_dataset._psflib, fileprefix=fileprefix) - + # Construct corgiDRP dataset from pyKLIP result + result_fpath = os.path.join(outdir,f'{fileprefix}-KLmodes-all.fits') + pyklip_data = fits.getdata(result_fpath) + pyklip_hdr = fits.getheader(result_fpath) + + frames = [] + for i,frame_data in enumerate(pyklip_data): + + # TODO: Handle DQ & errors correctly + err = np.zeros_like(frame_data) + dq = np.zeros_like(frame_data) + + # Clean up primary header + pri_hdr = pyklip_hdr.copy() + naxis1 = pri_hdr['NAXIS1'] + naxis2 = pri_hdr['NAXIS2'] + del pri_hdr['NAXIS1'] + del pri_hdr['NAXIS2'] + del pri_hdr['NAXIS3'] + pri_hdr['NAXIS'] = 0 + + # Add observation info from input dataset + pri_hdr['TELESCOP'] = input_dataset[0].pri_hdr['TELESCOP'] + pri_hdr['INSTRUME'] = input_dataset[0].pri_hdr['INSTRUME'] + pri_hdr['MODE'] = input_dataset[0].pri_hdr['MODE'] + pri_hdr['BAND'] = input_dataset[0].pri_hdr['BAND'] + pri_hdr['TELESCOP'] = input_dataset[0].pri_hdr['TELESCOP'] + + # Make extension header + ext_hdr = fits.Header() + ext_hdr['NAXIS'] = 2 + ext_hdr['NAXIS1'] = naxis1 + ext_hdr['NAXIS2'] = naxis2 + ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] + ext_hdr['PIXSCALE'] = input_dataset[0].ext_hdr['PIXSCALE'] + ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] + ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] + + frame = data.Image(frame_data, + pri_hdr=pri_hdr, ext_hdr=ext_hdr, + err=err, dq=dq) + + frames.append(frame) - return input_dataset.copy() + dataset_out = data.Dataset(frames) + + # TODO: Update DQ to 1 where there are nans + + return dataset_out def northup(input_dataset,correct_wcs=False): """ diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index 34f43d27..805e5f38 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -641,11 +641,26 @@ def create_default_headers(arrtype="SCI", vistype="TDEMO"): NAXIS2 = 2200 # fill in prihdr + prihdr['AUXFILE'] = 'mock_auxfile.fits' prihdr['OBSID'] = 0 prihdr['BUILD'] = 0 # prihdr['OBSTYPE'] = arrtype prihdr['VISTYPE'] = vistype prihdr['MOCK'] = True + prihdr['INSTRUME'] = 'CGI' + prihdr['OBSNAME'] = 'MOCK' + prihdr['TARGET'] = 'MOCK' + prihdr['OBSNUM'] = '000' + prihdr['CAMPAIGN'] = '000' + prihdr['PROGNUM'] = '00000' + prihdr['SEGMENT'] = '000' + prihdr['VISNUM'] = '000' + prihdr['EXECNUM'] = '00' + prihdr['VISITID'] = prihdr['PROGNUM'] + prihdr['EXECNUM'] + prihdr['CAMPAIGN'] + prihdr['SEGMENT'] + prihdr['OBSNUM'] + prihdr['VISNUM'] + prihdr['PSFREF'] = False + prihdr['SIMPLE'] = True + prihdr['NAXIS'] = 0 + # fill in exthdr exthdr['NAXIS'] = 2 @@ -693,6 +708,7 @@ def create_default_headers(arrtype="SCI", vistype="TDEMO"): exthdr['MISSING'] = False return prihdr, exthdr + def create_badpixelmap_files(filedir=None, col_bp=None, row_bp=None): """ Create simulated bad pixel map data. Code value is 4. @@ -1897,7 +1913,6 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol outdir = None): """Generate a mock science and reference dataset ready for the PSF subtraction step. TODO: reference a central pixscale number, rather than hard code. - TODO: save default WCS header in a smarter way (dict?) Args: n_sci (int): number of science frames, must be >= 1. @@ -1972,12 +1987,14 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol prihdr["MODE"] = 'HLC' prihdr["BAND"] = 1 + exthdr['BUNIT'] = 'MJy/sr' exthdr['MASKLOCX'] = psfcentx exthdr['MASKLOCY'] = psfcenty exthdr['STARLOCX'] = psfcentx exthdr['STARLOCY'] = psfcenty exthdr['PIXSCALE'] = pixscale exthdr["ROLL"] = roll_angles[i] + exthdr["HIERARCH DATA_LEVEL"] = 'L3' # Add WCS header info, if provided if wcs_header is None: From d2fc1f17b1f63dcb6628f9b3541e8a8d01a19f40 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Fri, 3 Jan 2025 10:26:20 -0500 Subject: [PATCH 05/35] minor edits to psf subtraction --- corgidrp/l3_to_l4.py | 44 +++++++++++++++++++++++++++++++++----------- corgidrp/mocks.py | 1 + 2 files changed, 34 insertions(+), 11 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index f9f54373..3c97f686 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -9,6 +9,7 @@ import pyklip.rdi import os from astropy.io import fits +import warnings def distortion_correction(input_dataset, distortion_calibration): """ @@ -35,6 +36,7 @@ def find_star(input_dataset): Returns: corgidrp.data.Dataset: a version of the input dataset with the stars identified + in ext_hdr["STARLOCX/Y"] """ return input_dataset.copy() @@ -48,15 +50,23 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, Perform PSF subtraction on the dataset. Optionally using a reference star dataset. TODO: Handle nans & propagate DQ array - Crop data to darkhole size ~(60x60) centered on nearest pixel (waiting on crop step function) + Crop data to darkhole size ~(60x60) centered on nearest pixel (waiting on crop step function PR) Rotate north at the end Do frame combine before PSF subtraction? What info is missing from output dataset headers? Add comments to new ext header cards + Figure out output roll angle. Args: input_dataset (corgidrp.data.Dataset): a dataset of Images (L3-level) reference_star_dataset (corgidrp.data.Dataset): a dataset of Images of the reference star [optional] + mode (str): pyKLIP PSF subraction mode, e.g. ADI/RDI/ADI+RDI. Mode will be chosen autonomously if not specified. + annuli (int): number of concentric annuli to run separate subtractions on. Defaults to 1. + subsections (int): number of angular subsections to run separate subtractions on. Defaults to 1. + movement (int): KLIP movement parameter. Defaults to 1. + numbasis (int or list of int): number of KLIP modes to retain. Defaults to [1,4,8,16]. + outdir (str or path): path to output directory. Defaults to "KLIP_SUB". + fileprefix (str): prefix of saved output files. Defaults to "". Returns: corgidrp.data.Dataset: a version of the input dataset with the PSF subtraction applied (L4-level) @@ -64,25 +74,28 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, """ sci_dataset = input_dataset.copy() + ref_dataset = reference_star_dataset.copy() assert len(sci_dataset) > 0, "Science dataset has no data." - pyklip_dataset = data.PyKLIPDataset(sci_dataset,psflib_dataset=reference_star_dataset) - # Choose PSF subtraction mode if unspecified if mode is None: - if not reference_star_dataset is None and len(sci_dataset)==1: + if not ref_dataset is None and len(sci_dataset)==1: mode = 'RDI' - elif not reference_star_dataset is None: + elif not ref_dataset is None: mode = 'ADI+RDI' else: mode = 'ADI' else: assert mode in ['RDI','ADI+RDI','ADI'], f"Mode {mode} is not configured." - outdir = os.path.join(outdir,mode) + # Format numbases + if isinstance(numbasis,int): + numbasis = [numbasis] + # Set up outdir + outdir = os.path.join(outdir,mode) if not os.path.exists(outdir): os.makedirs(outdir) @@ -90,6 +103,8 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, # TODO: Mask data where DQ > 0, let pyklip deal with the nans + # Run pyklip + pyklip_dataset = data.PyKLIPDataset(sci_dataset,psflib_dataset=ref_dataset) pyklip.parallelized.klip_dataset(pyklip_dataset, outputdir=outdir, annuli=annuli, subsections=subsections, movement=movement, numbasis=numbasis, calibrate_flux=False, mode=mode,psf_library=pyklip_dataset._psflib, @@ -121,18 +136,20 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, pri_hdr['INSTRUME'] = input_dataset[0].pri_hdr['INSTRUME'] pri_hdr['MODE'] = input_dataset[0].pri_hdr['MODE'] pri_hdr['BAND'] = input_dataset[0].pri_hdr['BAND'] - pri_hdr['TELESCOP'] = input_dataset[0].pri_hdr['TELESCOP'] - + # Make extension header ext_hdr = fits.Header() ext_hdr['NAXIS'] = 2 ext_hdr['NAXIS1'] = naxis1 ext_hdr['NAXIS2'] = naxis2 - ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] + ext_hdr['BUNIT'] = input_dataset[0].ext_hdr['BUNIT'] ext_hdr['PIXSCALE'] = input_dataset[0].ext_hdr['PIXSCALE'] + ext_hdr['KLIP_ALG'] = mode + ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] - + ext_hdr['HISTORY'] = input_dataset[0].ext_hdr['HISTORY'] + frame = data.Image(frame_data, pri_hdr=pri_hdr, ext_hdr=ext_hdr, err=err, dq=dq) @@ -141,6 +158,10 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, dataset_out = data.Dataset(frames) + history_msg = f'PSF subtracted via pyKLIP {mode}.' + + dataset_out.update_after_processing_step(history_msg) + # TODO: Update DQ to 1 where there are nans return dataset_out @@ -175,8 +196,9 @@ def northup(input_dataset,correct_wcs=False): # define the center for rotation try: - xcen, ycen = im_hd['PSFCENTX'], im_hd['PSFCENTY'] # TBU, after concluding the header keyword + xcen, ycen = im_hd['STARLOCX'], im_hd['STARLOCY'] except KeyError: + warnings.warn('"STARLOCX/Y" missing from ext_hdr. Rotating about center of array.') xcen, ycen = xlen/2, ylen/2 # look for WCS solutions diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index 805e5f38..2456c1ca 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -647,6 +647,7 @@ def create_default_headers(arrtype="SCI", vistype="TDEMO"): # prihdr['OBSTYPE'] = arrtype prihdr['VISTYPE'] = vistype prihdr['MOCK'] = True + prihdr['TELESCOP'] = 'ROMAN' prihdr['INSTRUME'] = 'CGI' prihdr['OBSNAME'] = 'MOCK' prihdr['TARGET'] = 'MOCK' From 2e48241f6bc36f87938644109423662633c46d6f Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Fri, 3 Jan 2025 10:26:42 -0500 Subject: [PATCH 06/35] stub functions for nan_flags and flag_nans --- corgidrp/detector.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/corgidrp/detector.py b/corgidrp/detector.py index d8b8dbfc..922582e6 100644 --- a/corgidrp/detector.py +++ b/corgidrp/detector.py @@ -817,3 +817,27 @@ def create_onsky_flatfield(dataset, planet=None,band=None,up_radius=55,im_size=N return(onsky_flatfield) +def nan_flags(dataset,threshold=1): + """Replaces each DQ-flagged pixel (above the given threshold) in the dataset with np.nan. + + Args: + dataset (corgidrp.data.Dataset): input dataset. + threshold (int, optional): DQ threshold to replace with nans. Defaults to 1. + + Returns: + corgidrp.data.Dataset: dataset with flagged pixels replaced. + """ + + return dataset + +def flag_nans(dataset,flag_val=1): + """Assigns a DQ flag to each nan pixel in the dataset. + + Args: + dataset (corgidrp.data.Dataset): input dataset. + flag_val (int, optional): DQ value to assign. Defaults to 1. + + Returns: + corgidrp.data.Dataset: dataset with nan values flagged. + """ + return dataset \ No newline at end of file From 3abcb5611d373fc43f953e5728566f2031086266 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Mon, 6 Jan 2025 09:30:50 -0500 Subject: [PATCH 07/35] handle dq array in psf subtraction and create tests --- corgidrp/data.py | 31 +++---------- corgidrp/detector.py | 20 ++++++-- corgidrp/l3_to_l4.py | 36 ++++++++++----- corgidrp/mocks.py | 70 +++++++++++++++++++--------- tests/test_psfsub.py | 108 +++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 205 insertions(+), 60 deletions(-) create mode 100644 tests/test_psfsub.py diff --git a/corgidrp/data.py b/corgidrp/data.py index c4983623..ba421c75 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -1445,32 +1445,22 @@ def readdata(self, INSTRUME = phead['INSTRUME'] MODE = phead['MODE'] data = frame.data - try: - pxdq = frame.dq - except: - pxdq = np.zeros_like(data).astype('int') if data.ndim == 2: data = data[np.newaxis, :] - pxdq = pxdq[np.newaxis, :] if data.ndim != 3: raise UserWarning('Requires 2D/3D data cube') NINTS = data.shape[0] pix_scale = shead['PIXSCALE'] # arcsec PIXSCALE += [pix_scale] - # TODO: Do we need to do this? - # Nan out non-science pixels. - # data[pxdq & 512 == 512] = np.nan - - # TODO: are centers xy or yx? # Get centers. if self.center_include_offset == True: # Use the offset values from the header to adjust the center - centers = np.array([shead['STARLOCX'] - 1 + phead['XOFFSET'] / pix_scale, - shead['STARLOCY'] - 1 + phead['YOFFSET'] / pix_scale] * NINTS) + centers = np.array([shead['STARLOCX'] + phead['XOFFSET'] / pix_scale, + shead['STARLOCY'] + phead['YOFFSET'] / pix_scale] * NINTS) else: # Assume the STARLOC define the correct center for each image - centers = np.array([shead['STARLOCX'] - 1, shead['STARLOCY'] - 1] * NINTS) + centers = np.array([shead['STARLOCX'], shead['STARLOCY']] * NINTS) # Get metadata. input_all += [data] @@ -1541,32 +1531,23 @@ def readdata(self, shead = frame.ext_hdr data = frame.data - try: - pxdq = frame.dq - except: - pxdq = np.zeros_like(data).astype('int') if data.ndim == 2: data = data[np.newaxis, :] - pxdq = pxdq[np.newaxis, :] if data.ndim != 3: raise UserWarning('Requires 2D/3D data cube') NINTS = data.shape[0] pix_scale = shead['PIXSCALE'] # arcsec PIXSCALE += [pix_scale] - # TODO: Do we need to do this? - # Nan out non-science pixels. - # data[pxdq & 512 == 512] = np.nan - # TODO: are centers xy or yx? # Get centers. if self.center_include_offset == True: # Use the offset values from the header to adjust the center - centers = np.array([shead['STARLOCX'] - 1 + phead['XOFFSET'] / pix_scale, - shead['STARLOCY'] - 1 + phead['YOFFSET'] / pix_scale] * NINTS) + centers = np.array([shead['STARLOCX'] + phead['XOFFSET'] / pix_scale, + shead['STARLOCY'] + phead['YOFFSET'] / pix_scale] * NINTS) else: # Assume the STARLOC define the correct center for each image - centers = np.array([shead['STARLOCX'] - 1, shead['STARLOCY'] - 1] * NINTS) + centers = np.array([shead['STARLOCX'], shead['STARLOCY']] * NINTS) psflib_data_all += [data] psflib_centers_all += [centers] diff --git a/corgidrp/detector.py b/corgidrp/detector.py index 922582e6..649c82a3 100644 --- a/corgidrp/detector.py +++ b/corgidrp/detector.py @@ -818,7 +818,7 @@ def create_onsky_flatfield(dataset, planet=None,band=None,up_radius=55,im_size=N return(onsky_flatfield) def nan_flags(dataset,threshold=1): - """Replaces each DQ-flagged pixel (above the given threshold) in the dataset with np.nan. + """Replaces each DQ-flagged pixel (>= the given threshold) in the dataset with np.nan. Args: dataset (corgidrp.data.Dataset): input dataset. @@ -828,7 +828,14 @@ def nan_flags(dataset,threshold=1): corgidrp.data.Dataset: dataset with flagged pixels replaced. """ - return dataset + dataset_out = dataset.copy() + + # mask bad pixels + bad = np.where(dataset_out.all_dq >= threshold) + dataset_out.all_data[bad] = np.nan + dataset_out.all_err[bad[0],:,bad[1],bad[2]] = np.nan + + return dataset_out def flag_nans(dataset,flag_val=1): """Assigns a DQ flag to each nan pixel in the dataset. @@ -840,4 +847,11 @@ def flag_nans(dataset,flag_val=1): Returns: corgidrp.data.Dataset: dataset with nan values flagged. """ - return dataset \ No newline at end of file + + dataset_out = dataset.copy() + + # mask bad pixels + bad = np.isnan(dataset_out.all_data) + dataset_out.all_dq[bad] = flag_val + + return dataset_out diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 3c97f686..a6a253a9 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -2,6 +2,7 @@ from pyklip.klip import rotate from corgidrp import data +from corgidrp.detector import flag_nans,nan_flags from scipy.ndimage import rotate as rotate_scipy # to avoid duplicated name from scipy.ndimage import shift import numpy as np @@ -56,6 +57,7 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, What info is missing from output dataset headers? Add comments to new ext header cards Figure out output roll angle. + How to populate HISTORY header kw? Args: input_dataset (corgidrp.data.Dataset): a dataset of Images (L3-level) @@ -74,7 +76,10 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, """ sci_dataset = input_dataset.copy() - ref_dataset = reference_star_dataset.copy() + if not reference_star_dataset is None: + ref_dataset = reference_star_dataset.copy() + else: + ref_dataset = None assert len(sci_dataset) > 0, "Science dataset has no data." @@ -101,10 +106,12 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, # TODO: Crop data (make sure psf center is updated) - # TODO: Mask data where DQ > 0, let pyklip deal with the nans + # Mask data where DQ > 0, let pyklip deal with the nans + sci_dataset_masked = nan_flags(sci_dataset) + ref_dataset_masked = None if ref_dataset is None else nan_flags(ref_dataset) # Run pyklip - pyklip_dataset = data.PyKLIPDataset(sci_dataset,psflib_dataset=ref_dataset) + pyklip_dataset = data.PyKLIPDataset(sci_dataset_masked,psflib_dataset=ref_dataset_masked) pyklip.parallelized.klip_dataset(pyklip_dataset, outputdir=outdir, annuli=annuli, subsections=subsections, movement=movement, numbasis=numbasis, calibrate_flux=False, mode=mode,psf_library=pyklip_dataset._psflib, @@ -120,7 +127,7 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, # TODO: Handle DQ & errors correctly err = np.zeros_like(frame_data) - dq = np.zeros_like(frame_data) + dq = np.zeros_like(frame_data) # This will get filled out later # Clean up primary header pri_hdr = pyklip_hdr.copy() @@ -132,23 +139,26 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, pri_hdr['NAXIS'] = 0 # Add observation info from input dataset - pri_hdr['TELESCOP'] = input_dataset[0].pri_hdr['TELESCOP'] - pri_hdr['INSTRUME'] = input_dataset[0].pri_hdr['INSTRUME'] - pri_hdr['MODE'] = input_dataset[0].pri_hdr['MODE'] - pri_hdr['BAND'] = input_dataset[0].pri_hdr['BAND'] + pri_hdr['TELESCOP'] = sci_dataset[0].pri_hdr['TELESCOP'] + pri_hdr['INSTRUME'] = sci_dataset[0].pri_hdr['INSTRUME'] + pri_hdr['MODE'] = sci_dataset[0].pri_hdr['MODE'] + pri_hdr['BAND'] = sci_dataset[0].pri_hdr['BAND'] # Make extension header ext_hdr = fits.Header() ext_hdr['NAXIS'] = 2 ext_hdr['NAXIS1'] = naxis1 ext_hdr['NAXIS2'] = naxis2 - ext_hdr['BUNIT'] = input_dataset[0].ext_hdr['BUNIT'] - ext_hdr['PIXSCALE'] = input_dataset[0].ext_hdr['PIXSCALE'] + ext_hdr['BUNIT'] = sci_dataset[0].ext_hdr['BUNIT'] + ext_hdr['PIXSCALE'] = sci_dataset[0].ext_hdr['PIXSCALE'] ext_hdr['KLIP_ALG'] = mode ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] - ext_hdr['HISTORY'] = input_dataset[0].ext_hdr['HISTORY'] + ext_hdr['PSFCENTX'] = pyklip_hdr['PSFCENTX'] + ext_hdr['PSFCENTY'] = pyklip_hdr['PSFCENTY'] + if "HISTORY" in sci_dataset[0].ext_hdr.keys(): + ext_hdr['HISTORY'] = sci_dataset[0].ext_hdr['HISTORY'] frame = data.Image(frame_data, pri_hdr=pri_hdr, ext_hdr=ext_hdr, @@ -158,6 +168,10 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, dataset_out = data.Dataset(frames) + # Flag nans in the dq array and then add nans to the error array + dataset_out = flag_nans(dataset_out,flag_val=1) + dataset_out = nan_flags(dataset_out,threshold=1) + history_msg = f'PSF subtracted via pyKLIP {mode}.' dataset_out.update_after_processing_step(history_msg) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index 2456c1ca..bd6264d3 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -1885,20 +1885,22 @@ def add_defect(sch_imgs, prob, ori, temp): MJDREF = 0.0 / [d] MJD of fiducial time """ -def gaussian_array(array_size=50,sigma=5.,amp=1.): +def gaussian_array(array_shape=[50,50],sigma=2.5,amp=1.,xoffset=0.,yoffset=0.): """Generate a 2D square array with a centered gaussian surface (for mock PSF data). Args: - array_size (int, optional): Width of desired array in pixels. Defaults to 50. + array_shape (int, optional): Shape of desired array in pixels. Defaults to [50,50]. sigma (float, optional): Standard deviation of the gaussian curve, in pixels. Defaults to 5. amp (float,optional): Amplitude of gaussian curve. Defaults to 1. + xoffset (float,optional): x offset of gaussian from array center. Defaults to 0. + yoffset (float,optional): y offset of gaussian from array center. Defaults to 0. Returns: np.array: 2D array of a gaussian surface. """ - x, y = np.meshgrid(np.linspace(-array_size, array_size, array_size), - np.linspace(-array_size, array_size, array_size)) - dst = np.sqrt(x**2+y**2) + x, y = np.meshgrid(np.linspace(-array_shape[0]/2, array_shape[0]/2, array_shape[0]), + np.linspace(-array_shape[1]/2, array_shape[1]/2, array_shape[1])) + dst = np.sqrt((x-xoffset)**2+(y-yoffset)**2) # lower normal part of gaussian normal = 1/(2.0 * np.pi * sigma**2) @@ -1910,7 +1912,7 @@ def gaussian_array(array_size=50,sigma=5.,amp=1.): def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhole_reffiles=None, wcs_header = None, - data_shape = [1024,1024], + data_shape = [60,60], outdir = None): """Generate a mock science and reference dataset ready for the PSF subtraction step. TODO: reference a central pixscale number, rather than hard code. @@ -1955,29 +1957,54 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol fpath = darkhole_scifiles[i] _,fname = os.path.split(fpath) darkhole = fits.getdata(fpath) + + fill_value = np.nanmin(darkhole) + img_data = np.full(data_shape,fill_value) + + # Overwrite center of array with the darkhole data + cr_psf_pix = np.array(darkhole.shape) / 2 - 0.5 + full_arr_center = np.array(img_data.shape) // 2 + start_psf_ind = full_arr_center - np.array(darkhole.shape) // 2 + img_data[start_psf_ind[0]:start_psf_ind[0]+darkhole.shape[0],start_psf_ind[1]:start_psf_ind[1]+darkhole.shape[1]] = darkhole + psfcenty, psfcentx = cr_psf_pix + start_psf_ind + elif i>=n_sci and not darkhole_reffiles is None: fpath = darkhole_reffiles[i-n_sci] _,fname = os.path.split(fpath) darkhole = fits.getdata(fpath) + fill_value = np.nanmin(darkhole) + img_data = np.full(data_shape,fill_value) + + # Overwrite center of array with the darkhole data + cr_psf_pix = np.array(darkhole.shape) / 2 - 0.5 + full_arr_center = np.array(img_data.shape) // 2 + start_psf_ind = full_arr_center - np.array(darkhole.shape) // 2 + img_data[start_psf_ind[0]:start_psf_ind[0]+darkhole.shape[0],start_psf_ind[1]:start_psf_ind[1]+darkhole.shape[1]] = darkhole + psfcenty, psfcentx = cr_psf_pix + start_psf_ind # Otherwise generate a 2D gaussian for a fake PSF else: - fname = None - darkhole = gaussian_array(array_size=55) - - fill_value = np.nanmin(darkhole) - img_data = np.full(data_shape,fill_value) - - # Overwrite center of array with the darkhole data - cr_psf_pix = np.array(darkhole.shape) / 2 - 0.5 - - full_arr_center = np.array(img_data.shape) // 2 + label = 'ref' if i>= n_sci else 'sci' + fname = f'MOCK_{label}_roll{roll_angles[i]}.fits' + img_data = gaussian_array(array_shape=data_shape) + psfcentx,psfcenty = np.array(img_data.shape) / 2 - 0.5 + + # Add some noise + rng = np.random.default_rng(seed=None) + noise = rng.normal(0,1e-11,img_data.shape) + img_data += noise + + # Add fake planet to sci files + if i np.nansum(frame.data): + print(f'sum input: {np.sum(mock_sci[0].data)}') + print(f'sum output: {np.sum(frame.data)}') + + raise Exception(f"ADI subtraction resulted in increased counts for frame {i}.") + + if not frame.ext_hdr['KLIP_ALG'] == 'ADI': + raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") + + # Check expected data shape + expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) + if not result.all_data.shape == expected_data_shape: + raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") + +def test_psf_sub_RDI(): + + numbasis=[1,2,4] + rolls = [13,0,90] + mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls) + + result = do_psf_subtraction(mock_sci,mock_ref, + numbasis=numbasis, + fileprefix='test_RDI') + + for i,frame in enumerate(result): + + # import matplotlib.pyplot as plt + # plt.imshow(frame.data,origin='lower') + # plt.colorbar() + # plt.title(f'{frame.ext_hdr["KLIP_ALG"]}, {frame.ext_hdr["KLMODES"]} KL MODE(S)') + # plt.scatter(frame.ext_hdr['PSFCENTX'],frame.ext_hdr['PSFCENTY']) + # plt.show() + # plt.close() + + # Overall counts should decrease + if not np.nansum(mock_sci[0].data) > np.nansum(frame.data): + raise Exception(f"RDI subtraction resulted in increased counts for frame {i}.") + + if not frame.ext_hdr['KLIP_ALG'] == 'RDI': + raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'RDI' mode when provided 1 science image and 1 reference.") + + # Check expected data shape + expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) + if not result.all_data.shape == expected_data_shape: + raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after RDI subtraction.") + +def test_psf_sub_ADIRDI(): + + numbasis = [1,2,4] + rolls = [13,-13,0,0] + mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls) + + result = do_psf_subtraction(mock_sci,mock_ref, + numbasis=numbasis, + fileprefix='test_ADI+RDI') + + for i,frame in enumerate(result): + + # import matplotlib.pyplot as plt + # plt.imshow(frame.data,origin='lower') + # plt.colorbar() + # plt.title(f'{frame.ext_hdr["KLIP_ALG"]}, {frame.ext_hdr["KLMODES"]} KL MODE(S)') + # plt.scatter(frame.ext_hdr['PSFCENTX'],frame.ext_hdr['PSFCENTY']) + # plt.show() + # plt.close() + + # Overall counts should decrease + if not np.nansum(mock_sci[0].data) > np.nansum(frame.data): + raise Exception(f"ADI+RDI subtraction resulted in increased counts for frame {i}.") + + if not frame.ext_hdr['KLIP_ALG'] == 'ADI+RDI': + raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI+RDI' mode when provided 2 science images and 1 reference.") + + # Check expected data shape + expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) + if not result.all_data.shape == expected_data_shape: + raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI+RDI subtraction.") + +if __name__ == '__main__': + test_psf_sub_ADI() + test_psf_sub_RDI() + test_psf_sub_ADIRDI() From 03d080061ae0e0c7a01d82a13c5203a1a88657f1 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Mon, 6 Jan 2025 11:31:54 -0500 Subject: [PATCH 08/35] new psfsub tests --- corgidrp/data.py | 39 +++++++------- corgidrp/mocks.py | 29 +++++++--- tests/test_psfsub.py | 123 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 163 insertions(+), 28 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index ba421c75..92fd3307 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -1262,7 +1262,7 @@ def __init__(self,data_or_filepath, pri_hdr=None,ext_hdr=None, input_dataset=Non class PyKLIPDataset(pyKLIP_Data): """ A pyKLIP instrument class for Roman Coronagraph Instrument data. - + # TODO: Add more bandpasses, modes to self.wave_hlc """ #################### @@ -1277,27 +1277,22 @@ def __init__(self, """ Initialize the pyKLIP instrument class for space telescope data. - Parameters - ---------- - filepaths : 1D-array - Paths of the input science observations. - psflib_filepaths : 1D-array, optional - Paths of the input reference observations. The default is None. - center_include_offset : bool - Toggle as to whether the relative header offset values of each - image is applied during image centering. - - Returns - ------- - None. - + Args: + dataset (corgidrp.data.Dataset): + Dataset containing input science observations. + psflib_dataset (corgidrp.data.Dataset, optional): + Dataset containing input reference observations. The default is None. + highpass (bool, optional): + Toggle to do highpass filtering. Defaults fo False. + center_include_offset (bool, optional): + Toggle as to whether the relative header offset values of each + image is applied during image centering. Defaults to True. """ # Initialize pyKLIP Data class. super(PyKLIPDataset, self).__init__() # Set filter wavelengths - # TODO: Add more bandpasses, modes self.wave_hlc = {1: 0.575} # micron # Optional variables @@ -1468,13 +1463,15 @@ def readdata(self, filenames_all += [os.path.split(phead['FILENAME'])[1] + '_INT%.0f' % (j + 1) for j in range(NINTS)] PAs_all += [shead['ROLL']] * NINTS + if TELESCOP != "ROMAN": + raise UserWarning('Data is not from Roman Space Telescope.') if INSTRUME == 'CGI': if MODE == 'HLC': CWAVEL = self.wave_hlc[phead['BAND']] else: raise UserWarning('Unknown Roman CGI instrument mode.') else: - raise UserWarning('Data originates from unknown Roman instrument') + raise UserWarning('Data originates from unknown Roman instrument.') wvs_all += [1e-6 * CWAVEL] * NINTS # TODO: Figure out actual WCS info @@ -1482,9 +1479,10 @@ def readdata(self, for j in range(NINTS): wcs_all += [wcs_hdr.deepcopy()] - input_all = np.concatenate(input_all) - if input_all.ndim != 3: - raise UserWarning('Some science files do not have matching image shapes') + try: + input_all = np.concatenate(input_all) + except ValueError: + raise UserWarning('Unable to concatenate images. Some science files do not have matching image shapes') centers_all = np.concatenate(centers_all).reshape(-1, 2) filenames_all = np.array(filenames_all) filenums_all = np.array(range(len(filenames_all))) @@ -1539,7 +1537,6 @@ def readdata(self, pix_scale = shead['PIXSCALE'] # arcsec PIXSCALE += [pix_scale] - # TODO: are centers xy or yx? # Get centers. if self.center_include_offset == True: # Use the offset values from the header to adjust the center diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index bd6264d3..a3863bac 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -1913,6 +1913,7 @@ def gaussian_array(array_shape=[50,50],sigma=2.5,amp=1.,xoffset=0.,yoffset=0.): def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhole_reffiles=None, wcs_header = None, data_shape = [60,60], + centerxy = None, outdir = None): """Generate a mock science and reference dataset ready for the PSF subtraction step. TODO: reference a central pixscale number, rather than hard code. @@ -1963,7 +1964,10 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol # Overwrite center of array with the darkhole data cr_psf_pix = np.array(darkhole.shape) / 2 - 0.5 - full_arr_center = np.array(img_data.shape) // 2 + if centerxy is None: + full_arr_center = np.array(img_data.shape) // 2 + else: + full_arr_center = (centerxy[1],centerxy[0]) start_psf_ind = full_arr_center - np.array(darkhole.shape) // 2 img_data[start_psf_ind[0]:start_psf_ind[0]+darkhole.shape[0],start_psf_ind[1]:start_psf_ind[1]+darkhole.shape[1]] = darkhole psfcenty, psfcentx = cr_psf_pix + start_psf_ind @@ -1977,7 +1981,10 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol # Overwrite center of array with the darkhole data cr_psf_pix = np.array(darkhole.shape) / 2 - 0.5 - full_arr_center = np.array(img_data.shape) // 2 + if centerxy is None: + full_arr_center = np.array(img_data.shape) // 2 + else: + full_arr_center = (centerxy[1],centerxy[0]) start_psf_ind = full_arr_center - np.array(darkhole.shape) // 2 img_data[start_psf_ind[0]:start_psf_ind[0]+darkhole.shape[0],start_psf_ind[1]:start_psf_ind[1]+darkhole.shape[1]] = darkhole psfcenty, psfcentx = cr_psf_pix + start_psf_ind @@ -1986,9 +1993,17 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol else: label = 'ref' if i>= n_sci else 'sci' fname = f'MOCK_{label}_roll{roll_angles[i]}.fits' - img_data = gaussian_array(array_shape=data_shape) - psfcentx,psfcenty = np.array(img_data.shape) / 2 - 0.5 - + arr_center = np.array(data_shape) / 2 - 0.5 + if centerxy is None: + psfcenty,psfcentx = arr_center + else: + psfcentx,psfcenty = centerxy + + psf_off_xy = (psfcentx-arr_center[1],psfcenty-arr_center[0]) + img_data = gaussian_array(array_shape=data_shape, + xoffset=psf_off_xy[0], + yoffset=psf_off_xy[1]) + # Add some noise rng = np.random.default_rng(seed=None) noise = rng.normal(0,1e-11,img_data.shape) @@ -2001,8 +2016,8 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol xoff,yoff = sep_pix * np.array([-np.sin(np.radians(pa_deg)),np.cos(np.radians(pa_deg))]) planet_psf = gaussian_array(array_shape=data_shape, amp=1e-6, - xoffset=xoff, - yoffset=yoff) + xoffset=xoff+psf_off_xy[0], + yoffset=yoff+psf_off_xy[1]) img_data += planet_psf diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 5166a1c0..999c6d68 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -1,7 +1,121 @@ from corgidrp.mocks import create_psfsub_dataset from corgidrp.l3_to_l4 import do_psf_subtraction +from corgidrp.data import PyKLIPDataset +import pytest import numpy as np +def test_pyklipdata_ADI(): + + rolls = [0,90] + # Init with center shifted by 1 pixel + mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls,centerxy=(30.5,30.5)) + + pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + + # Check image is centered properly + for i,image in enumerate(pyklip_dataset._input): + + assert mock_sci.all_data[i,1:,1:] == pytest.approx(image[:-1,:-1]), f"Frame {i} centered improperly." + + # Check roll assignments and filenames match up for sci dataset + for r,roll in enumerate(pyklip_dataset._PAs): + assert pyklip_dataset._filenames[r] == f'MOCK_sci_roll{roll}.fits_INT1', f"Incorrect roll assignment for frame {r}." + + + # Check ref library is None + assert pyklip_dataset.psflib is None, "pyklip_dataset.psflib is not None, even though no reference dataset was provided." + +def test_pyklipdata_RDI(): + # TODO: + # checks for reference library + # what is pyklip_dataset.psflib.isgoodpsf for? + + rolls = [45,180] + # Init with center shifted by 1 pixel + mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls,centerxy=(30.5,30.5)) + + pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + + # Check image is centered properly + for i,image in enumerate(pyklip_dataset._input): + + assert mock_sci.all_data[i,1:,1:] == pytest.approx(image[:-1,:-1]), f"Frame {i} centered improperly." + + # Check roll assignments and filenames match up for sci dataset + for r,roll in enumerate(pyklip_dataset._PAs): + assert pyklip_dataset._filenames[r] == f'MOCK_sci_roll{roll}.fits_INT1', f"Incorrect roll assignment for frame {r}." + + + # Check ref library + +def test_pyklipdata_ADIRDI(): + # TODO: checks for reference library + + rolls = [45,-45,180] + # Init with center shifted by 1 pixel + mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls,centerxy=(30.5,30.5)) + + pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + + # Check image is centered properly + for i,image in enumerate(pyklip_dataset._input): + + assert mock_sci.all_data[i,1:,1:] == pytest.approx(image[:-1,:-1]), f"Frame {i} centered improperly." + + # Check roll assignments and filenames match up for sci dataset + for r,roll in enumerate(pyklip_dataset._PAs): + assert pyklip_dataset._filenames[r] == f'MOCK_sci_roll{roll}.fits_INT1', f"Incorrect roll assignment for frame {r}." + + + # Check ref library + +def test_pyklipdata_badtelescope(): + mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) + mock_sci[0].pri_hdr['TELESCOP'] = "HUBBLE" + + with pytest.raises(UserWarning): + _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + +def test_pyklipdata_badinstrument(): + mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) + mock_sci[0].pri_hdr['INSTRUME'] = "WFI" + + with pytest.raises(UserWarning): + _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + + +def test_pyklipdata_badcgimode(): + mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) + mock_sci[0].pri_hdr['MODE'] = "SPC" + + with pytest.raises(UserWarning): + _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + +def test_pyklipdata_notdataset(): + mock_sci,mock_ref = create_psfsub_dataset(1,0,[0]) + mock_ref = [] + with pytest.raises(UserWarning): + _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + + mock_sci = [] + mock_ref = None + with pytest.raises(UserWarning): + _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + + +def test_pyklipdata_badimgshapes(): + mock_sci,mock_ref = create_psfsub_dataset(2,0,[0,0]) + + mock_sci[0].data = np.zeros((5,5)) + with pytest.raises(UserWarning): + _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) + +def test_pyklipdata_multiplepixscales(): + mock_sci,mock_ref = create_psfsub_dataset(2,0,[0,0]) + + mock_sci[0].ext_hdr["PIXSCALE"] = 10 + with pytest.raises(UserWarning): + _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) def test_psf_sub_ADI(): @@ -106,3 +220,12 @@ def test_psf_sub_ADIRDI(): test_psf_sub_ADI() test_psf_sub_RDI() test_psf_sub_ADIRDI() + test_pyklipdata_ADI() + test_pyklipdata_RDI() + test_pyklipdata_ADIRDI() + test_pyklipdata_badtelescope() + test_pyklipdata_badinstrument() + test_pyklipdata_badcgimode() + test_pyklipdata_notdataset() + test_pyklipdata_badimgshapes() + test_pyklipdata_multiplepixscales() From eee53a6b50a462a9fc22a49fd75e418222676da2 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Mon, 27 Jan 2025 14:19:41 -0500 Subject: [PATCH 09/35] make psfsub mock dataset more flexible --- corgidrp/mocks.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index a3863bac..31ceceff 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -1885,7 +1885,7 @@ def add_defect(sch_imgs, prob, ori, temp): MJDREF = 0.0 / [d] MJD of fiducial time """ -def gaussian_array(array_shape=[50,50],sigma=2.5,amp=1.,xoffset=0.,yoffset=0.): +def gaussian_array(array_shape=[50,50],sigma=2.5,amp=100.,xoffset=0.,yoffset=0.): """Generate a 2D square array with a centered gaussian surface (for mock PSF data). Args: @@ -1914,7 +1914,11 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol wcs_header = None, data_shape = [60,60], centerxy = None, - outdir = None): + outdir = None, + noise_amp = 1e-11, + ref_psf_spread=1. , + pl_contrast=1e-5 + ): """Generate a mock science and reference dataset ready for the PSF subtraction step. TODO: reference a central pixscale number, rather than hard code. @@ -1991,7 +1995,13 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol # Otherwise generate a 2D gaussian for a fake PSF else: + sci_sigma = 2.5 + ref_sigma = sci_sigma * ref_psf_spread + amp = 100 + pl_amp = amp * pl_contrast + label = 'ref' if i>= n_sci else 'sci' + sigma = ref_sigma if i>= n_sci else sci_sigma fname = f'MOCK_{label}_roll{roll_angles[i]}.fits' arr_center = np.array(data_shape) / 2 - 0.5 if centerxy is None: @@ -2002,11 +2012,13 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol psf_off_xy = (psfcentx-arr_center[1],psfcenty-arr_center[0]) img_data = gaussian_array(array_shape=data_shape, xoffset=psf_off_xy[0], - yoffset=psf_off_xy[1]) + yoffset=psf_off_xy[1], + sigma=sigma, + amp=amp) # Add some noise - rng = np.random.default_rng(seed=None) - noise = rng.normal(0,1e-11,img_data.shape) + rng = np.random.default_rng(seed=123+2*i) + noise = rng.normal(0,noise_amp,img_data.shape) img_data += noise # Add fake planet to sci files @@ -2015,7 +2027,7 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol sep_pix = 10 xoff,yoff = sep_pix * np.array([-np.sin(np.radians(pa_deg)),np.cos(np.radians(pa_deg))]) planet_psf = gaussian_array(array_shape=data_shape, - amp=1e-6, + amp=pl_amp, xoffset=xoff+psf_off_xy[0], yoffset=yoff+psf_off_xy[1]) img_data += planet_psf From d55795b09a7cd49d843a7a9b6504757cb8d87cff Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 30 Jan 2025 10:39:39 -0500 Subject: [PATCH 10/35] Update default headers with PAM position names --- corgidrp/mocks.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index 1fbeb373..ffd39e67 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -705,6 +705,15 @@ def create_default_headers(arrtype="SCI", vistype="TDEMO"): exthdr['CFAM_V'] = 1.0 exthdr['DPAM_H'] = 1.0 exthdr['DPAM_V'] = 1.0 + exthdr['CFAMNAME'] = '1F' # Color filter for band 1 + exthdr['DPAMNAME'] = 'IMAGING' + exthdr['FPAMNAME'] = 'HLC12_C2R1' # Focal plane mask for NFOV + exthdr['FSAMNAME'] = 'R1C1' # Circular field stop for NFOV + exthdr['LSAMNAME'] = 'NFOV' # Lyot stop for NFOV observations + exthdr['SPAMNAME'] = 'OPEN' # Used for NFOV observations + + + exthdr['DATETIME'] = '2024-01-01T11:00:00.000Z' exthdr['HIERARCH DATA_LEVEL'] = "L1" exthdr['MISSING'] = False @@ -2144,9 +2153,7 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol prihdr['XOFFSET'] = 0.0 prihdr['YOFFSET'] = 0.0 prihdr['FILENAME'] = fname - prihdr["MODE"] = 'HLC' - prihdr["BAND"] = 1 - + exthdr['BUNIT'] = 'MJy/sr' exthdr['MASKLOCX'] = psfcentx exthdr['MASKLOCY'] = psfcenty From 7e2d92212afe9eee122c5a023ba38511d60788cd Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 30 Jan 2025 11:03:40 -0500 Subject: [PATCH 11/35] docstring edits --- corgidrp/data.py | 73 +++++++++++++++++++++++++---------------------- corgidrp/mocks.py | 30 ++++++++++++------- 2 files changed, 59 insertions(+), 44 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index b32bb0da..f1fcb224 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -648,7 +648,6 @@ def __init__(self, data_or_filepath, pri_hdr=None, ext_hdr=None, input_dataset=N if self.ext_hdr['DATATYPE'] != 'Dark': raise ValueError("File that was loaded was not a Dark file.") - class FlatField(Image): """ Master flat generated from raster scan of uranus or Neptune. @@ -942,7 +941,6 @@ def __init__(self, data_or_filepath, pri_hdr=None, ext_hdr=None, input_dataset=N if self.ext_hdr['DATATYPE'] != 'BadPixelMap': raise ValueError("File that was loaded was not a BadPixelMap file.") - class DetectorNoiseMaps(Image): """ Class for DetectorNoiseMaps calibration file. The data is a 3-D stack of 3 frames, each of which is a full SCI frame of fitted @@ -1345,11 +1343,26 @@ def __init__(self, data_or_filepath, err = None, pri_hdr=None, ext_hdr=None, err self.filedir = "." self.filename = "{0}_FluxcalFactor_{1}_{2}.fits".format(orig_input_filename, self.filter, self.nd_filter) - class PyKLIPDataset(pyKLIP_Data): """ A pyKLIP instrument class for Roman Coronagraph Instrument data. + # TODO: Add more bandpasses, modes to self.wave_hlc + # Clarify attribute descriptions + + Attrs: + input: Input corgiDRP dataset. + centers: PSF center locations (double check this). + filenums: file numbers. + filenames: file names. + PAs: position angles. + wvs: wavelengths. + wcs: WCS header information. + IWA: inner working angle. + OWA: outer working angle. + psflib: corgiDRP dataset containing reference PSF observations. + output: PSF subtracted pyKLIP dataset + """ #################### @@ -1482,15 +1495,13 @@ def readdata(self, """ Read the input science observations. - Parameters - ---------- - filepaths : 1D-array - Paths of the input science observations. - - Returns - ------- - None. - + Args: + dataset (corgidrp.data.Dataset): + Dataset containing input science observations. + psflib_dataset (corgidrp.data.Dataset, optional): + Dataset containing input reference observations. The default is None. + highpass (bool, optional): + Toggle to do highpass filtering. Defaults fo False. """ # Check input. @@ -1681,28 +1692,22 @@ def savedata(self, Function to save the data products that will be called internally by pyKLIP. - Parameters - ---------- - filepath : path - Path of the output FITS file. - data : 3D-array - KLIP-subtracted data of shape (nkl, ny, nx). - klipparams : str, optional - PyKLIP keyword arguments used for the KLIP subtraction. The default - is None. - filetype : str, optional - Data type of the pyKLIP product. The default is ''. - zaxis : list, optional - List of KL modes used for the KLIP subtraction. The default is - None. - more_keywords : dict, optional - Dictionary of additional header keywords to be written to the - output FITS file. The default is None. - - Returns - ------- - None. - + Args: + filepath (path): + Path of the output FITS file. + data (3D-array): + KLIP-subtracted data of shape (nkl, ny, nx). + klipparams (str, optional): + PyKLIP keyword arguments used for the KLIP subtraction. The default + is None. + filetype (str, optional): + Data type of the pyKLIP product. The default is ''. + zaxis (list, optional): + List of KL modes used for the KLIP subtraction. The default is + None. + more_keywords (dict, optional): + Dictionary of additional header keywords to be written to the + output FITS file. The default is None. """ # Make FITS file. diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index ffd39e67..e62648b3 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2026,12 +2026,12 @@ def gaussian_array(array_shape=[50,50],sigma=2.5,amp=100.,xoffset=0.,yoffset=0.) def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhole_reffiles=None, wcs_header = None, - data_shape = [60,60], + data_shape = [100,100], centerxy = None, outdir = None, noise_amp = 1e-11, ref_psf_spread=1. , - pl_contrast=1e-5 + pl_contrast=1e-3 ): """Generate a mock science and reference dataset ready for the PSF subtraction step. TODO: reference a central pixscale number, rather than hard code. @@ -2041,15 +2041,25 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol n_ref (int): nummber of reference frames, must be >= 0. roll_angles (list-like): list of the roll angles of each science and reference frame, with the science frames listed first. - darkhole_scifiles (list of str, optional): Filepaths to the darkhole science frames. If not provided, - a noisy 2D gaussian will be used instead. Defaults to None. - darkhole_reffiles (list of str, optional): Filepaths to the darkhole reference frames. If not provided, - a noisy 2D gaussian will be used instead. Defaults to None. - wcs_header (astropy.fits.Header, optional): Fits header object containing WCS information. If not provided, - a mock header will be created. Defaults to None. - data_shape (list-like): desired shape of data array. Must have length 2. Defaults to [1024,1024]. - outdir (str, optional): Desired output directory. If not provided, data will not be saved. Defaults to None. + darkhole_scifiles (list of str, optional): Filepaths to the darkhole science frames. + If not provided, a noisy 2D gaussian will be used instead. Defaults to None. + darkhole_reffiles (list of str, optional): Filepaths to the darkhole reference frames. + If not provided, a noisy 2D gaussian will be used instead. Defaults to None. + wcs_header (astropy.fits.Header, optional): Fits header object containing WCS + information. If not provided, a mock header will be created. Defaults to None. + data_shape (list of int): desired shape of data array. Must have length 2. Defaults to + [1024,1024]. + centerxy (list of float): Desired PSF center in xy order. Must have length 2. Defaults + to image center. + outdir (str, optional): Desired output directory. If not provided, data will not be + saved. Defaults to None. + noise_amp (float): Amplitude of gaussian noise added to fake data. Defaults to 1e-11. + ref_psf_spread (float): Fractional increase in gaussian PSF width between science and + reference PSFs. Defaults to 1. + pl_contrast (float): Flux ratio between planet and starlight incident on the detector. + Defaults to 1e-3. + Returns: tuple: corgiDRP science Dataset object and reference Dataset object. """ From 29c20b5d72a5f8a9676838f9ff476ec39d85ad13 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 30 Jan 2025 11:09:09 -0500 Subject: [PATCH 12/35] add tqdm to requirements.txt --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 71d4d114..9b69cd8a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,4 @@ photutils >= 2.0.0 statsmodels pyklip emccd-detect +tqdm From 4858ec383152c8aac88d9490387d90d048b6ae00 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 30 Jan 2025 11:50:01 -0500 Subject: [PATCH 13/35] use correct header kws to determine filter/mode --- corgidrp/data.py | 31 +++++++++++++++---------------- corgidrp/l3_to_l4.py | 23 +++++++++++++++-------- tests/test_psfsub.py | 12 ++++++------ 3 files changed, 36 insertions(+), 30 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index f1fcb224..89294cd4 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -1376,6 +1376,10 @@ def __init__(self, center_include_offset=True): """ Initialize the pyKLIP instrument class for space telescope data. + # TODO: Figure out how to input and test WCS data + Determine inner working angle based on PAM positions + - Inner working angle based on Focal plane mask (starts with HLC) + color filter ('1F') for primary mode + - Outer working angle based on field stop? (should be R1C1 or R1C3 for primary mode) Args: dataset (corgidrp.data.Dataset): @@ -1393,7 +1397,7 @@ def __init__(self, super(PyKLIPDataset, self).__init__() # Set filter wavelengths - self.wave_hlc = {1: 0.575} # micron + self.wave_hlc = {'1F': 0.575} # micron # Optional variables self.center_include_offset = center_include_offset @@ -1536,7 +1540,7 @@ def readdata(self, TELESCOP = phead['TELESCOP'] INSTRUME = phead['INSTRUME'] - MODE = phead['MODE'] + CFAMNAME = shead['CFAMNAME'] data = frame.data if data.ndim == 2: data = data[np.newaxis, :] @@ -1561,18 +1565,16 @@ def readdata(self, filenames_all += [os.path.split(phead['FILENAME'])[1] + '_INT%.0f' % (j + 1) for j in range(NINTS)] PAs_all += [shead['ROLL']] * NINTS - if TELESCOP != "ROMAN": - raise UserWarning('Data is not from Roman Space Telescope.') - if INSTRUME == 'CGI': - if MODE == 'HLC': - CWAVEL = self.wave_hlc[phead['BAND']] - else: - raise UserWarning('Unknown Roman CGI instrument mode.') - else: - raise UserWarning('Data originates from unknown Roman instrument.') + if TELESCOP != "ROMAN" or INSTRUME != "CGI": + raise UserWarning('Data is not from Roman Space Telescope Coronagraph Instrument.') + + # Get center wavelengths + try: + CWAVEL = self.wave_hlc[CFAMNAME] + except: + raise UserWarning(f'CFAM position {CFAMNAME} is not configured in corgidrp.data.PyKLIPDataset .') wvs_all += [1e-6 * CWAVEL] * NINTS - # TODO: Figure out actual WCS info wcs_hdr = wcs.WCS(header=shead, naxis=shead['WCSAXES']) for j in range(NINTS): wcs_all += [wcs_hdr.deepcopy()] @@ -1590,10 +1592,7 @@ def readdata(self, PIXSCALE = np.unique(np.array(PIXSCALE)) if len(PIXSCALE) != 1: raise UserWarning('Some science files do not have matching pixel scales') - if TELESCOP == 'ROMAN' and INSTRUME == 'CGI' and MODE == 'HLC': - iwa_all = np.min(wvs_all) / 6.5 * 180. / np.pi * 3600. / PIXSCALE[0] # pix - else: - iwa_all = 1. # pix + iwa_all = np.min(wvs_all) / 6.5 * 180. / np.pi * 3600. / PIXSCALE[0] # pix owa_all = np.sum(np.array(input_all.shape[1:]) / 2.) # pix # Recenter science images so that the star is at the center of the array. diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 79c8cf4c..7186f2a4 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -270,18 +270,24 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, pri_hdr['NAXIS'] = 0 # Add observation info from input dataset - pri_hdr['TELESCOP'] = sci_dataset[0].pri_hdr['TELESCOP'] - pri_hdr['INSTRUME'] = sci_dataset[0].pri_hdr['INSTRUME'] - pri_hdr['MODE'] = sci_dataset[0].pri_hdr['MODE'] - pri_hdr['BAND'] = sci_dataset[0].pri_hdr['BAND'] + pri_hdr_keys = ['TELESCOP','INSTRUME'] + for kw in pri_hdr_keys: + pri_hdr[kw] = sci_dataset[0].pri_hdr[kw] # Make extension header ext_hdr = fits.Header() ext_hdr['NAXIS'] = 2 ext_hdr['NAXIS1'] = naxis1 ext_hdr['NAXIS2'] = naxis2 - ext_hdr['BUNIT'] = sci_dataset[0].ext_hdr['BUNIT'] - ext_hdr['PIXSCALE'] = sci_dataset[0].ext_hdr['PIXSCALE'] + + # Add info from input dataset + ext_hdr_keys = ['BUNIT','PIXSCALE','CFAMNAME', + 'DPAMNAME','FPAMNAME','FSAMNAME', + 'LSAMNAME','SPAMNAME'] + for kw in ext_hdr_keys: + ext_hdr[kw] = sci_dataset[0].ext_hdr[kw] + + # Add info from pyklip ext_hdr['KLIP_ALG'] = mode ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] @@ -291,9 +297,10 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, if "HISTORY" in sci_dataset[0].ext_hdr.keys(): ext_hdr['HISTORY'] = sci_dataset[0].ext_hdr['HISTORY'] + # Construct Image object and add to list frame = data.Image(frame_data, - pri_hdr=pri_hdr, ext_hdr=ext_hdr, - err=err, dq=dq) + pri_hdr=pri_hdr, ext_hdr=ext_hdr, + err=err, dq=dq) frames.append(frame) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 999c6d68..90204d4b 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -8,7 +8,7 @@ def test_pyklipdata_ADI(): rolls = [0,90] # Init with center shifted by 1 pixel - mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls,centerxy=(30.5,30.5)) + mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls,centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) @@ -32,7 +32,7 @@ def test_pyklipdata_RDI(): rolls = [45,180] # Init with center shifted by 1 pixel - mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls,centerxy=(30.5,30.5)) + mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls,centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) @@ -53,7 +53,7 @@ def test_pyklipdata_ADIRDI(): rolls = [45,-45,180] # Init with center shifted by 1 pixel - mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls,centerxy=(30.5,30.5)) + mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls,centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) @@ -84,9 +84,9 @@ def test_pyklipdata_badinstrument(): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) -def test_pyklipdata_badcgimode(): +def test_pyklipdata_badcfamname(): mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) - mock_sci[0].pri_hdr['MODE'] = "SPC" + mock_sci[0].ext_hdr['CFAMNAME'] = "BAD" with pytest.raises(UserWarning): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) @@ -225,7 +225,7 @@ def test_psf_sub_ADIRDI(): test_pyklipdata_ADIRDI() test_pyklipdata_badtelescope() test_pyklipdata_badinstrument() - test_pyklipdata_badcgimode() + test_pyklipdata_badcfamname() test_pyklipdata_notdataset() test_pyklipdata_badimgshapes() test_pyklipdata_multiplepixscales() From a95ab915067596e77e04cd2a6179ecc26c726992 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 30 Jan 2025 16:24:34 -0500 Subject: [PATCH 14/35] PSF subtraction step now includes cropping before subtraction by default --- corgidrp/l3_to_l4.py | 46 +++++++++++++++++++++++++------------------- tests/test_crop.py | 2 +- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 7186f2a4..a6771499 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -84,9 +84,9 @@ def crop(input_dataset,sizexy=None,centerxy=None): dqhdr = frame.dq_hdr errhdr = frame.err_hdr - # Pick default crop size based on the size of the effective field of view (determined by the Lyot stop) + # Pick default crop size based on the size of the effective field of view if sizexy is None: - if prihdr['LSAMNAME'] == 'NFOV': + if exthdr['LSAMNAME'] == 'NFOV': sizexy = 60 else: raise UserWarning('Crop function is currently only configured for NFOV (narrow field-of-view) observations if sizexy is not provided.') @@ -166,40 +166,42 @@ def crop(input_dataset,sizexy=None,centerxy=None): output_dataset = data.Dataset(frames_out) - history_msg = f"""Frames cropped to new shape {output_dataset[0].data.shape} on center {centerxy}.\ - Updated header kws: {", ".join(updated_hdrs)}.""" - - output_dataset.update_after_processing_step(history_msg) + history_msg1 = f"""Frames cropped to new shape {list(output_dataset[0].data.shape)} on center {list(centerxy)}. Updated header kws: {", ".join(updated_hdrs)}.""" + output_dataset.update_after_processing_step(history_msg1) return output_dataset def do_psf_subtraction(input_dataset, reference_star_dataset=None, mode=None, annuli=1,subsections=1,movement=1, - numbasis = [1,4,8,16],outdir='KLIP_SUB',fileprefix="" + numbasis=[1,4,8,16],outdir='KLIP_SUB',fileprefix="", + do_crop=True, + crop_sizexy=None ): """ Perform PSF subtraction on the dataset. Optionally using a reference star dataset. TODO: Handle nans & propagate DQ array - Crop data to darkhole size ~(60x60) centered on nearest pixel (waiting on crop step function PR) Rotate north at the end - Do frame combine before PSF subtraction? What info is missing from output dataset headers? Add comments to new ext header cards - Figure out output roll angle. How to populate HISTORY header kw? Args: input_dataset (corgidrp.data.Dataset): a dataset of Images (L3-level) - reference_star_dataset (corgidrp.data.Dataset): a dataset of Images of the reference star [optional] - mode (str): pyKLIP PSF subraction mode, e.g. ADI/RDI/ADI+RDI. Mode will be chosen autonomously if not specified. - annuli (int): number of concentric annuli to run separate subtractions on. Defaults to 1. - subsections (int): number of angular subsections to run separate subtractions on. Defaults to 1. - movement (int): KLIP movement parameter. Defaults to 1. - numbasis (int or list of int): number of KLIP modes to retain. Defaults to [1,4,8,16]. - outdir (str or path): path to output directory. Defaults to "KLIP_SUB". - fileprefix (str): prefix of saved output files. Defaults to "". + reference_star_dataset (corgidrp.data.Dataset, optional): a dataset of Images of the reference + star [optional] + mode (str, optional): pyKLIP PSF subraction mode, e.g. ADI/RDI/ADI+RDI. Mode will be chosen autonomously + if not specified. + annuli (int, optional): number of concentric annuli to run separate subtractions on. Defaults to 1. + subsections (int, optional): number of angular subsections to run separate subtractions on. Defaults to 1. + movement (int, optional): KLIP movement parameter. Defaults to 1. + numbasis (int or list of int, optional): number of KLIP modes to retain. Defaults to [1,4,8,16]. + outdir (str or path, optional): path to output directory. Defaults to "KLIP_SUB". + fileprefix (str, optional): prefix of saved output files. Defaults to "". + do_crop (bool): whether to crop data before PSF subtraction. Defaults to True. + crop_sizexy (list of int, optional): Desired size to crop the images to before PSF subtraction. Defaults to + None, which results in the step choosing a crop size based on the imaging mode. Returns: corgidrp.data.Dataset: a version of the input dataset with the PSF subtraction applied (L4-level) @@ -235,7 +237,10 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, if not os.path.exists(outdir): os.makedirs(outdir) - # TODO: Crop data (make sure psf center is updated) + # Crop data + if do_crop: + sci_dataset = crop(sci_dataset,sizexy=crop_sizexy) + ref_dataset = None if ref_dataset is None else crop(ref_dataset,sizexy=crop_sizexy) # Mask data where DQ > 0, let pyklip deal with the nans sci_dataset_masked = nan_flags(sci_dataset) @@ -295,7 +300,8 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, ext_hdr['PSFCENTX'] = pyklip_hdr['PSFCENTX'] ext_hdr['PSFCENTY'] = pyklip_hdr['PSFCENTY'] if "HISTORY" in sci_dataset[0].ext_hdr.keys(): - ext_hdr['HISTORY'] = sci_dataset[0].ext_hdr['HISTORY'] + history_str = str(sci_dataset[0].ext_hdr['HISTORY']) + ext_hdr['HISTORY'] = ''.join(history_str.split('\n')) # Construct Image object and add to list frame = data.Image(frame_data, diff --git a/tests/test_crop.py b/tests/test_crop.py index 67fd8ce2..867d8856 100644 --- a/tests/test_crop.py +++ b/tests/test_crop.py @@ -218,7 +218,7 @@ def test_non_nfov_input(): test_dataset = make_test_dataset(shape=[100,100],centxy=[50.5,50.5]) for frame in test_dataset: - frame.pri_hdr['LSAMNAME'] = 'WFOV' + frame.ext_hdr['LSAMNAME'] = 'WFOV' try: _ = crop(test_dataset,sizexy=20,centerxy=None) From 067ae5cf04bd29450e4cb8ea502638c6463464ec Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 30 Jan 2025 16:25:14 -0500 Subject: [PATCH 15/35] add tests to compare psf subtraction with analytical expectation --- tests/test_psfsub.py | 217 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 181 insertions(+), 36 deletions(-) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 90204d4b..126ec600 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -1,9 +1,49 @@ from corgidrp.mocks import create_psfsub_dataset from corgidrp.l3_to_l4 import do_psf_subtraction from corgidrp.data import PyKLIPDataset +from scipy.ndimage import shift, rotate import pytest import numpy as np +## Helper functions/quantities + +def create_circular_mask(h, w, center=None, r=None): + """Creates a circular mask + + Args: + h (int): array height + w (int): array width + center (list of float, optional): Center of mask. Defaults to the + center of the array. + r (float, optional): radius of mask. Defaults to the minimum distance + from the center to the edge of the array. + + Returns: + np.array: boolean array with True inside the circle, False outside. + """ + + if center is None: # use the middle of the image + center = (w/2, h/2) + if r is None: # use the smallest distance between the center and image walls + r = min(center[0], center[1], w-center[0], h-center[1]) + + Y, X = np.ogrid[:h, :w] + dist_from_center = np.sqrt((X - center[0])**2 + (Y-center[1])**2) + + mask = dist_from_center <= r + return mask + +iwa_lod = 3. +owa_lod = 9.7 +d = 2.36 #m +lam = 573.8e-9 #m +pixscale_arcsec = 0.0218 + +iwa_pix = iwa_lod * lam / d * 206265 / pixscale_arcsec +owa_pix = owa_lod * lam / d * 206265 / pixscale_arcsec + +## pyKLIP data class tests + def test_pyklipdata_ADI(): rolls = [0,90] @@ -83,7 +123,6 @@ def test_pyklipdata_badinstrument(): with pytest.raises(UserWarning): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - def test_pyklipdata_badcfamname(): mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) mock_sci[0].ext_hdr['CFAMNAME'] = "BAD" @@ -102,7 +141,6 @@ def test_pyklipdata_notdataset(): with pytest.raises(UserWarning): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - def test_pyklipdata_badimgshapes(): mock_sci,mock_ref = create_psfsub_dataset(2,0,[0,0]) @@ -117,33 +155,56 @@ def test_pyklipdata_multiplepixscales(): with pytest.raises(UserWarning): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) -def test_psf_sub_ADI(): +## PSF subtraction step tests + +def test_psf_sub_ADI_nocrop(): - numbasis = [1,2,4] + numbasis = [1] rolls = [270+13,270-13] - mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls) + mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls,pl_contrast=1e-3) result = do_psf_subtraction(mock_sci,mock_ref, numbasis=numbasis, - fileprefix='test_ADI') + fileprefix='test_ADI', + do_crop=False) + + analytical_result = shift((rotate(mock_sci[0].data - mock_sci[1].data,-rolls[0],reshape=False,cval=0) + rotate(mock_sci[1].data - mock_sci[0].data,-rolls[1],reshape=False,cval=0)) / 2, + [0.5,0.5], + cval=np.nan) for i,frame in enumerate(result): # import matplotlib.pyplot as plt - # plt.imshow(frame.data,origin='lower') - # plt.colorbar() - # plt.title(f'{frame.ext_hdr["KLIP_ALG"]}, {frame.ext_hdr["KLMODES"]} KL MODE(S)') - # plt.scatter(frame.ext_hdr['PSFCENTX'],frame.ext_hdr['PSFCENTY']) + + # fig,axes = plt.subplots(1,3,sharey=True,layout='constrained',figsize=(12,3)) + # im0 = axes[0].imshow(frame.data,origin='lower') + # plt.colorbar(im0,ax=axes[0],shrink=0.8) + # axes[0].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[0].set_title(f'PSF Sub Result ({numbasis[i]} KL Modes)') + + # im1 = axes[1].imshow(analytical_result,origin='lower') + # plt.colorbar(im1,ax=axes[1],shrink=0.8) + # axes[1].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[1].set_title('Analytical result') + + # im2 = axes[2].imshow(frame.data - analytical_result,origin='lower') + # plt.colorbar(im2,ax=axes[2],shrink=0.8) + # axes[2].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[2].set_title('Difference') + + # fig.suptitle('ADI') + # plt.show() # plt.close() # Overall counts should decrease if not np.nansum(mock_sci[0].data) > np.nansum(frame.data): - print(f'sum input: {np.sum(mock_sci[0].data)}') - print(f'sum output: {np.sum(frame.data)}') - raise Exception(f"ADI subtraction resulted in increased counts for frame {i}.") + # Result should match analytical result + if np.nanmax(np.abs(frame.data - analytical_result)) > 1e-5: + raise Exception(f"Absolute difference between ADI result and analytical result is greater then 1e-5.") + if not frame.ext_hdr['KLIP_ALG'] == 'ADI': raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") @@ -151,56 +212,112 @@ def test_psf_sub_ADI(): expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") - -def test_psf_sub_RDI(): - numbasis=[1,2,4] - rolls = [13,0,90] - mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls) +def test_psf_sub_RDI_nocrop(): + + numbasis = [1] + rolls = [13,0] + noise_amp=1e-8 + pl_contrast=1e-3 + mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls,ref_psf_spread=1., + pl_contrast=pl_contrast, + noise_amp=noise_amp) result = do_psf_subtraction(mock_sci,mock_ref, numbasis=numbasis, - fileprefix='test_RDI') + fileprefix='test_RDI', + do_crop=False + ) + analytical_result = rotate(mock_sci[0].data - mock_ref[0].data,-rolls[0],reshape=False,cval=np.nan) for i,frame in enumerate(result): + mask = create_circular_mask(*frame.data.shape,r=iwa_pix,center=(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY'])) + masked_frame = np.where(mask,np.nan,frame.data) + # import matplotlib.pyplot as plt - # plt.imshow(frame.data,origin='lower') - # plt.colorbar() - # plt.title(f'{frame.ext_hdr["KLIP_ALG"]}, {frame.ext_hdr["KLMODES"]} KL MODE(S)') - # plt.scatter(frame.ext_hdr['PSFCENTX'],frame.ext_hdr['PSFCENTY']) + + # fig,axes = plt.subplots(1,3,sharey=True,layout='constrained',figsize=(12,3)) + # im0 = axes[0].imshow(frame.data - np.nanmedian(frame.data),origin='lower') + # plt.colorbar(im0,ax=axes[0],shrink=0.8) + # axes[0].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[0].set_title(f'PSF Sub Result ({numbasis[i]} KL Modes, Median Subtracted)') + + # im1 = axes[1].imshow(analytical_result,origin='lower') + # plt.colorbar(im1,ax=axes[1],shrink=0.8) + # axes[1].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[1].set_title('Analytical result') + + # im2 = axes[2].imshow(masked_frame - np.nanmedian(frame.data) - analytical_result,origin='lower') + # plt.colorbar(im2,ax=axes[2],shrink=0.8) + # axes[2].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[2].set_title('Difference') + + # fig.suptitle('RDI') + # plt.show() # plt.close() # Overall counts should decrease if not np.nansum(mock_sci[0].data) > np.nansum(frame.data): raise Exception(f"RDI subtraction resulted in increased counts for frame {i}.") - + + # The step should choose mode RDI based on having 1 roll and 1 reference. if not frame.ext_hdr['KLIP_ALG'] == 'RDI': raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'RDI' mode when provided 1 science image and 1 reference.") + + # Frame should match analytical result outside of the IWA (after correcting for the median offset) + if not np.nanmax(np.abs((masked_frame - np.nanmedian(frame.data)) - analytical_result)) < 1e-5: + raise Exception("RDI subtraction did not produce expected analytical result.") # Check expected data shape expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after RDI subtraction.") -def test_psf_sub_ADIRDI(): +def test_psf_sub_ADIRDI_nocrop(): - numbasis = [1,2,4] - rolls = [13,-13,0,0] - mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls) + numbasis = [1,2] + rolls = [13,-13,0] + mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls, + pl_contrast=1e-3) + + analytical_result1 = (rotate(mock_sci[0].data - (mock_sci[1].data/2+mock_ref[0].data/2),-rolls[0],reshape=False,cval=0) + rotate(mock_sci[1].data - (mock_sci[0].data/2+mock_ref[0].data/2),-rolls[1],reshape=False,cval=0)) / 2 + analytical_result2 = (rotate(mock_sci[0].data - mock_sci[1].data,-rolls[0],reshape=False,cval=0) + rotate(mock_sci[1].data - mock_sci[0].data,-rolls[1],reshape=False,cval=0)) / 2 + analytical_results = [analytical_result1,analytical_result2] + result = do_psf_subtraction(mock_sci,mock_ref, numbasis=numbasis, - fileprefix='test_ADI+RDI') + fileprefix='test_ADI+RDI', + do_crop=False) for i,frame in enumerate(result): + + mask = create_circular_mask(*frame.data.shape,r=iwa_pix,center=(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY'])) + masked_frame = np.where(mask,np.nan,frame.data) + # import matplotlib.pyplot as plt - # plt.imshow(frame.data,origin='lower') - # plt.colorbar() - # plt.title(f'{frame.ext_hdr["KLIP_ALG"]}, {frame.ext_hdr["KLMODES"]} KL MODE(S)') - # plt.scatter(frame.ext_hdr['PSFCENTX'],frame.ext_hdr['PSFCENTY']) + + # fig,axes = plt.subplots(1,3,sharey=True,layout='constrained',figsize=(12,3)) + # im0 = axes[0].imshow(frame.data - np.nanmedian(frame.data),origin='lower') + # plt.colorbar(im0,ax=axes[0],shrink=0.8) + # axes[0].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[0].set_title(f'PSF Sub Result ({numbasis[i]} KL Modes, Median Subtracted)') + + # im1 = axes[1].imshow(analytical_results[i],origin='lower') + # plt.colorbar(im1,ax=axes[1],shrink=0.8) + # axes[1].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[1].set_title('Analytical result') + + # im2 = axes[2].imshow(masked_frame - np.nanmedian(frame.data) - analytical_results[i],origin='lower') + # plt.colorbar(im2,ax=axes[2],shrink=0.8) + # axes[2].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) + # axes[2].set_title('Difference') + + # fig.suptitle('ADI+RDI') + # plt.show() # plt.close() @@ -208,18 +325,46 @@ def test_psf_sub_ADIRDI(): if not np.nansum(mock_sci[0].data) > np.nansum(frame.data): raise Exception(f"ADI+RDI subtraction resulted in increased counts for frame {i}.") + # Corgidrp should know to choose ADI+RDI mode if not frame.ext_hdr['KLIP_ALG'] == 'ADI+RDI': raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI+RDI' mode when provided 2 science images and 1 reference.") + + # Frame should match analytical result outside of the IWA (after correcting for the median offset) for KL mode 1 + if i==0: + if not np.nanmax(np.abs((masked_frame - np.nanmedian(frame.data)) - analytical_results[i])) < 1e-5: + raise Exception("ADI+RDI subtraction did not produce expected analytical result.") # Check expected data shape expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI+RDI subtraction.") +def test_psf_sub_withcrop(): + + numbasis = [1,2] + rolls = [270+13,270-13] + mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls,pl_contrast=1e-3) + + result = do_psf_subtraction(mock_sci,mock_ref, + numbasis=numbasis, + fileprefix='test_withcrop') + + for i,frame in enumerate(result): + + # Overall counts should decrease + if not np.nansum(mock_sci[0].data) > np.nansum(frame.data): + raise Exception(f"ADI subtraction resulted in increased counts for frame {i}.") + + # Check expected data shape + expected_data_shape = (len(numbasis),60,60) + if not result.all_data.shape == expected_data_shape: + raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") + if __name__ == '__main__': - test_psf_sub_ADI() - test_psf_sub_RDI() - test_psf_sub_ADIRDI() + test_psf_sub_ADI_nocrop() + test_psf_sub_RDI_nocrop() + test_psf_sub_ADIRDI_nocrop() + test_psf_sub_withcrop() test_pyklipdata_ADI() test_pyklipdata_RDI() test_pyklipdata_ADIRDI() From 2813b03751b5542d97f54356e677dc35ef29d1eb Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Thu, 30 Jan 2025 17:18:58 -0500 Subject: [PATCH 16/35] post psf-subtraction header edits --- corgidrp/l3_to_l4.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index a6771499..c9e9a17f 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -269,11 +269,13 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, pri_hdr = pyklip_hdr.copy() naxis1 = pri_hdr['NAXIS1'] naxis2 = pri_hdr['NAXIS2'] - del pri_hdr['NAXIS1'] - del pri_hdr['NAXIS2'] - del pri_hdr['NAXIS3'] pri_hdr['NAXIS'] = 0 + remove_kws = ['NAXIS1','NAXIS2','NAXIS3', + 'CRPIX1','CRPIX2'] + for kw in remove_kws: + del pri_hdr[kw] + # Add observation info from input dataset pri_hdr_keys = ['TELESCOP','INSTRUME'] for kw in pri_hdr_keys: @@ -297,8 +299,6 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] - ext_hdr['PSFCENTX'] = pyklip_hdr['PSFCENTX'] - ext_hdr['PSFCENTY'] = pyklip_hdr['PSFCENTY'] if "HISTORY" in sci_dataset[0].ext_hdr.keys(): history_str = str(sci_dataset[0].ext_hdr['HISTORY']) ext_hdr['HISTORY'] = ''.join(history_str.split('\n')) From d219b909559877f1dcb5a1299a7d7513ada678e1 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Sat, 8 Feb 2025 14:44:06 -0500 Subject: [PATCH 17/35] wcs header cd fix --- corgidrp/data.py | 19 +++++++++++++------ corgidrp/l3_to_l4.py | 4 +--- corgidrp/mocks.py | 6 +++++- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index 89294cd4..65999904 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -1,4 +1,5 @@ import os +import warnings import numpy as np import numpy.ma as ma import astropy.io.fits as fits @@ -6,6 +7,7 @@ import pandas as pd import pyklip from pyklip.instruments.Instrument import Data as pyKLIP_Data +from pyklip.instruments.utils.wcsgen import generate_wcs from astropy import wcs import copy import corgidrp @@ -1349,6 +1351,7 @@ class PyKLIPDataset(pyKLIP_Data): # TODO: Add more bandpasses, modes to self.wave_hlc # Clarify attribute descriptions + # Add wcs header info! Attrs: input: Input corgiDRP dataset. @@ -1357,7 +1360,7 @@ class PyKLIPDataset(pyKLIP_Data): filenames: file names. PAs: position angles. wvs: wavelengths. - wcs: WCS header information. + wcs: WCS header information. Currently None. IWA: inner working angle. OWA: outer working angle. psflib: corgiDRP dataset containing reference PSF observations. @@ -1376,8 +1379,7 @@ def __init__(self, center_include_offset=True): """ Initialize the pyKLIP instrument class for space telescope data. - # TODO: Figure out how to input and test WCS data - Determine inner working angle based on PAM positions + # TODO: Determine inner working angle based on PAM positions - Inner working angle based on Focal plane mask (starts with HLC) + color filter ('1F') for primary mode - Outer working angle based on field stop? (should be R1C1 or R1C3 for primary mode) @@ -1575,10 +1577,15 @@ def readdata(self, raise UserWarning(f'CFAM position {CFAMNAME} is not configured in corgidrp.data.PyKLIPDataset .') wvs_all += [1e-6 * CWAVEL] * NINTS - wcs_hdr = wcs.WCS(header=shead, naxis=shead['WCSAXES']) - for j in range(NINTS): - wcs_all += [wcs_hdr.deepcopy()] + # pyklip will look for wcs.cd, so make sure that attribute exists + wcs_obj = wcs.WCS(header=shead, naxis=shead['WCSAXES']) + if not hasattr(wcs_obj.wcs,'cd'): + wcs_obj.wcs.cd = wcs_obj.wcs.pc * wcs_obj.wcs.cdelt + + for j in range(NINTS): + wcs_all += [wcs_obj.deepcopy()] + try: input_all = np.concatenate(input_all) except ValueError: diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index c9e9a17f..150b99cd 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -182,11 +182,9 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, Perform PSF subtraction on the dataset. Optionally using a reference star dataset. TODO: Handle nans & propagate DQ array - Rotate north at the end What info is missing from output dataset headers? Add comments to new ext header cards - How to populate HISTORY header kw? - + Args: input_dataset (corgidrp.data.Dataset): a dataset of Images (L3-level) reference_star_dataset (corgidrp.data.Dataset, optional): a dataset of Images of the reference diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index e62648b3..dcec3d1a 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -17,6 +17,7 @@ import corgidrp.detector as detector from corgidrp.detector import imaging_area_geom, unpack_geom from corgidrp.pump_trap_calibration import (P1, P1_P1, P1_P2, P2, P2_P2, P3, P2_P3, P3_P3, tau_temp) +from pyklip.instruments.utils.wcsgen import generate_wcs from emccd_detect.emccd_detect import EMCCDDetect from emccd_detect.util.read_metadata_wrapper import MetadataWrapper @@ -2176,7 +2177,10 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol # Add WCS header info, if provided if wcs_header is None: - wcs_header = fits.header.Header.fromstring(default_wcs_string,sep='\n') + wcs_header = generate_wcs(roll_angles[i], + [psfcentx,psfcenty], + platescale=0.0218).to_header() + # wcs_header._cards = wcs_header._cards[-1] exthdr.extend(wcs_header) From bf82287807c91a832282776f4b0ca02ee475f158 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Sun, 9 Feb 2025 11:42:56 -0500 Subject: [PATCH 18/35] rounding error bug fix --- corgidrp/data.py | 8 ++-- corgidrp/mocks.py | 9 ++-- tests/test_psfsub.py | 111 ++++++++++++++++++++++++++++++------------- 3 files changed, 87 insertions(+), 41 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index 65999904..d5364387 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -1399,7 +1399,7 @@ def __init__(self, super(PyKLIPDataset, self).__init__() # Set filter wavelengths - self.wave_hlc = {'1F': 0.575} # micron + self.wave_hlc = {'1F': 575e-9} # meters # Optional variables self.center_include_offset = center_include_offset @@ -1575,7 +1575,9 @@ def readdata(self, CWAVEL = self.wave_hlc[CFAMNAME] except: raise UserWarning(f'CFAM position {CFAMNAME} is not configured in corgidrp.data.PyKLIPDataset .') - wvs_all += [1e-6 * CWAVEL] * NINTS + + # Rounding error introduced here? + wvs_all += [CWAVEL] * NINTS # pyklip will look for wcs.cd, so make sure that attribute exists wcs_obj = wcs.WCS(header=shead, naxis=shead['WCSAXES']) @@ -1594,7 +1596,7 @@ def readdata(self, filenames_all = np.array(filenames_all) filenums_all = np.array(range(len(filenames_all))) PAs_all = np.array(PAs_all) - wvs_all = np.array(wvs_all) + wvs_all = np.array(wvs_all).astype(np.float32) wcs_all = np.array(wcs_all) PIXSCALE = np.unique(np.array(PIXSCALE)) if len(PIXSCALE) != 1: diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index dcec3d1a..806023bd 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2030,7 +2030,8 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol data_shape = [100,100], centerxy = None, outdir = None, - noise_amp = 1e-11, + st_amp = 100., + noise_amp = 1., ref_psf_spread=1. , pl_contrast=1e-3 ): @@ -2054,6 +2055,7 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol to image center. outdir (str, optional): Desired output directory. If not provided, data will not be saved. Defaults to None. + st_amp (float): Amplitude of stellar psf added to fake data. Defaults to 10000. noise_amp (float): Amplitude of gaussian noise added to fake data. Defaults to 1e-11. ref_psf_spread (float): Fractional increase in gaussian PSF width between science and reference PSFs. Defaults to 1. @@ -2122,8 +2124,7 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol else: sci_sigma = 2.5 ref_sigma = sci_sigma * ref_psf_spread - amp = 100 - pl_amp = amp * pl_contrast + pl_amp = st_amp * pl_contrast label = 'ref' if i>= n_sci else 'sci' sigma = ref_sigma if i>= n_sci else sci_sigma @@ -2139,7 +2140,7 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol xoffset=psf_off_xy[0], yoffset=psf_off_xy[1], sigma=sigma, - amp=amp) + amp=st_amp) # Add some noise rng = np.random.default_rng(seed=123+2*i) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 126ec600..d276963b 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -4,6 +4,12 @@ from scipy.ndimage import shift, rotate import pytest import numpy as np +import glob +import os +from astropy.io import fits +from pyklip.instruments import Instrument +from pyklip import parallelized +from matplotlib.colors import LogNorm ## Helper functions/quantities @@ -42,16 +48,21 @@ def create_circular_mask(h, w, center=None, r=None): iwa_pix = iwa_lod * lam / d * 206265 / pixscale_arcsec owa_pix = owa_lod * lam / d * 206265 / pixscale_arcsec +st_amp = 100. +noise_amp=1e-11 +pl_contrast=1e-4 + ## pyKLIP data class tests def test_pyklipdata_ADI(): rolls = [0,90] # Init with center shifted by 1 pixel - mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls,centerxy=(50.5,50.5)) + mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls, + centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - + pass # Check image is centered properly for i,image in enumerate(pyklip_dataset._input): @@ -59,23 +70,23 @@ def test_pyklipdata_ADI(): # Check roll assignments and filenames match up for sci dataset for r,roll in enumerate(pyklip_dataset._PAs): + assert roll == rolls[r] assert pyklip_dataset._filenames[r] == f'MOCK_sci_roll{roll}.fits_INT1', f"Incorrect roll assignment for frame {r}." - # Check ref library is None assert pyklip_dataset.psflib is None, "pyklip_dataset.psflib is not None, even though no reference dataset was provided." def test_pyklipdata_RDI(): - # TODO: - # checks for reference library - # what is pyklip_dataset.psflib.isgoodpsf for? rolls = [45,180] + n_sci = 1 + n_ref = 1 # Init with center shifted by 1 pixel - mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls,centerxy=(50.5,50.5)) + mock_sci,mock_ref = create_psfsub_dataset(n_sci,n_ref,rolls,centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - + pass + # Check image is centered properly for i,image in enumerate(pyklip_dataset._input): @@ -83,31 +94,35 @@ def test_pyklipdata_RDI(): # Check roll assignments and filenames match up for sci dataset for r,roll in enumerate(pyklip_dataset._PAs): + assert roll == rolls[r] assert pyklip_dataset._filenames[r] == f'MOCK_sci_roll{roll}.fits_INT1', f"Incorrect roll assignment for frame {r}." - - # Check ref library - + # Check ref library shape + assert pyklip_dataset._psflib.master_library.shape[0] == n_sci+n_ref + def test_pyklipdata_ADIRDI(): - # TODO: checks for reference library - + rolls = [45,-45,180] + n_sci = 2 + n_ref = 1 # Init with center shifted by 1 pixel - mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls,centerxy=(50.5,50.5)) + mock_sci,mock_ref = create_psfsub_dataset(n_sci,n_ref,rolls, + centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - - # Check image is centered properly + pass + # Check image is recentered properly for i,image in enumerate(pyklip_dataset._input): assert mock_sci.all_data[i,1:,1:] == pytest.approx(image[:-1,:-1]), f"Frame {i} centered improperly." # Check roll assignments and filenames match up for sci dataset for r,roll in enumerate(pyklip_dataset._PAs): + assert roll == rolls[r] assert pyklip_dataset._filenames[r] == f'MOCK_sci_roll{roll}.fits_INT1', f"Incorrect roll assignment for frame {r}." - - # Check ref library + # Check ref library shape + assert pyklip_dataset._psflib.master_library.shape[0] == n_sci+n_ref def test_pyklipdata_badtelescope(): mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) @@ -161,7 +176,10 @@ def test_psf_sub_ADI_nocrop(): numbasis = [1] rolls = [270+13,270-13] - mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls,pl_contrast=1e-3) + mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls, + st_amp=st_amp, + noise_amp=noise_amp, + pl_contrast=pl_contrast) result = do_psf_subtraction(mock_sci,mock_ref, numbasis=numbasis, @@ -217,11 +235,13 @@ def test_psf_sub_RDI_nocrop(): numbasis = [1] rolls = [13,0] - noise_amp=1e-8 - pl_contrast=1e-3 + mock_sci,mock_ref = create_psfsub_dataset(1,1,rolls,ref_psf_spread=1., + centerxy=(49.5,49.5), pl_contrast=pl_contrast, - noise_amp=noise_amp) + noise_amp=noise_amp, + st_amp=st_amp + ) result = do_psf_subtraction(mock_sci,mock_ref, numbasis=numbasis, @@ -237,6 +257,24 @@ def test_psf_sub_RDI_nocrop(): # import matplotlib.pyplot as plt + # fig,axes = plt.subplots(1,3,sharey=True,layout='constrained',figsize=(12,3)) + # im0 = axes[0].imshow(mock_sci[0].data,origin='lower') + # plt.colorbar(im0,ax=axes[0],shrink=0.8) + # axes[0].scatter(mock_sci[0].ext_hdr['STARLOCX'],mock_sci[0].ext_hdr['STARLOCY']) + # axes[0].set_title(f'Sci Input') + + # im1 = axes[1].imshow(mock_ref[0].data,origin='lower') + # plt.colorbar(im1,ax=axes[1],shrink=0.8) + # axes[1].scatter(mock_ref[0].ext_hdr['STARLOCX'],mock_ref[0].ext_hdr['STARLOCY']) + # axes[1].set_title('Ref Input') + + # im2 = axes[2].imshow(mock_sci[0].data - mock_ref[0].data,origin='lower') + # plt.colorbar(im2,ax=axes[2],shrink=0.8) + # axes[2].scatter(mock_sci[0].ext_hdr['STARLOCX'],mock_sci[0].ext_hdr['STARLOCY']) + # axes[2].set_title('Difference') + + # fig.suptitle('Inputs') + # fig,axes = plt.subplots(1,3,sharey=True,layout='constrained',figsize=(12,3)) # im0 = axes[0].imshow(frame.data - np.nanmedian(frame.data),origin='lower') # plt.colorbar(im0,ax=axes[0],shrink=0.8) @@ -248,12 +286,14 @@ def test_psf_sub_RDI_nocrop(): # axes[1].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) # axes[1].set_title('Analytical result') - # im2 = axes[2].imshow(masked_frame - np.nanmedian(frame.data) - analytical_result,origin='lower') + # norm = LogNorm(vmin=1e-8, vmax=1, clip=False) + # im2 = axes[2].imshow(frame.data - np.nanmedian(frame.data) - analytical_result, + # origin='lower',norm=None) # plt.colorbar(im2,ax=axes[2],shrink=0.8) # axes[2].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) # axes[2].set_title('Difference') - # fig.suptitle('RDI') + # fig.suptitle('RDI Result') # plt.show() # plt.close() @@ -277,10 +317,12 @@ def test_psf_sub_RDI_nocrop(): def test_psf_sub_ADIRDI_nocrop(): - numbasis = [1,2] + numbasis = [1,2,3,4] rolls = [13,-13,0] mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls, - pl_contrast=1e-3) + st_amp=st_amp, + noise_amp=noise_amp, + pl_contrast=pl_contrast) analytical_result1 = (rotate(mock_sci[0].data - (mock_sci[1].data/2+mock_ref[0].data/2),-rolls[0],reshape=False,cval=0) + rotate(mock_sci[1].data - (mock_sci[0].data/2+mock_ref[0].data/2),-rolls[1],reshape=False,cval=0)) / 2 @@ -306,12 +348,12 @@ def test_psf_sub_ADIRDI_nocrop(): # axes[0].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) # axes[0].set_title(f'PSF Sub Result ({numbasis[i]} KL Modes, Median Subtracted)') - # im1 = axes[1].imshow(analytical_results[i],origin='lower') + # im1 = axes[1].imshow(analytical_results[0],origin='lower') # plt.colorbar(im1,ax=axes[1],shrink=0.8) # axes[1].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) # axes[1].set_title('Analytical result') - # im2 = axes[2].imshow(masked_frame - np.nanmedian(frame.data) - analytical_results[i],origin='lower') + # im2 = axes[2].imshow(masked_frame - np.nanmedian(frame.data) - analytical_results[0],origin='lower') # plt.colorbar(im2,ax=axes[2],shrink=0.8) # axes[2].scatter(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY']) # axes[2].set_title('Difference') @@ -353,18 +395,14 @@ def test_psf_sub_withcrop(): # Overall counts should decrease if not np.nansum(mock_sci[0].data) > np.nansum(frame.data): - raise Exception(f"ADI subtraction resulted in increased counts for frame {i}.") + raise Exception(f"PSF subtraction resulted in increased counts for frame {i}.") # Check expected data shape expected_data_shape = (len(numbasis),60,60) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") -if __name__ == '__main__': - test_psf_sub_ADI_nocrop() - test_psf_sub_RDI_nocrop() - test_psf_sub_ADIRDI_nocrop() - test_psf_sub_withcrop() +if __name__ == '__main__': test_pyklipdata_ADI() test_pyklipdata_RDI() test_pyklipdata_ADIRDI() @@ -374,3 +412,8 @@ def test_psf_sub_withcrop(): test_pyklipdata_notdataset() test_pyklipdata_badimgshapes() test_pyklipdata_multiplepixscales() + test_psf_sub_ADI_nocrop() + test_psf_sub_RDI_nocrop() + test_psf_sub_ADIRDI_nocrop() + test_psf_sub_withcrop() + From 7d1710d2992fd0a5808b508dc331f2ad15bcb94c Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Sun, 9 Feb 2025 13:01:59 -0500 Subject: [PATCH 19/35] flatfield mocks bug fix due to default header update --- corgidrp/mocks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index c9fd624b..dd64522a 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -421,7 +421,7 @@ def create_onsky_rasterscans(dataset,filedir=None,planet=None,band=None, im_size frame = data.Image(sim_data, pri_hdr=prihdr, ext_hdr=exthdr) pl=planet band=band - frame.pri_hdr.append(('TARGET', pl), end=True) + frame.pri_hdr.set('TARGET', pl) frame.pri_hdr.append(('FILTER', band), end=True) if filedir is not None: frame.save(filedir=filedir, filename=filepattern.format(i)) From 631be47aaf8157eee52b38d2e57ec5345f0b8dc1 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Mon, 10 Feb 2025 12:34:17 -0500 Subject: [PATCH 20/35] docstring updates --- tests/test_psfsub.py | 36 +++++++++++++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index d276963b..f99905b9 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -55,6 +55,8 @@ def create_circular_mask(h, w, center=None, r=None): ## pyKLIP data class tests def test_pyklipdata_ADI(): + """Tests that pyklip dataset centers frame, assigns rolls, and initializes PSF library properly for ADI data. + """ rolls = [0,90] # Init with center shifted by 1 pixel @@ -77,7 +79,8 @@ def test_pyklipdata_ADI(): assert pyklip_dataset.psflib is None, "pyklip_dataset.psflib is not None, even though no reference dataset was provided." def test_pyklipdata_RDI(): - + """Tests that pyklip dataset centers frame, assigns rolls, and initializes PSF library properly for RDI data. + """ rolls = [45,180] n_sci = 1 n_ref = 1 @@ -101,7 +104,8 @@ def test_pyklipdata_RDI(): assert pyklip_dataset._psflib.master_library.shape[0] == n_sci+n_ref def test_pyklipdata_ADIRDI(): - + """Tests that pyklip dataset centers frame, assigns rolls, and initializes PSF library properly for ADI+RDI data. + """ rolls = [45,-45,180] n_sci = 2 n_ref = 1 @@ -125,6 +129,8 @@ def test_pyklipdata_ADIRDI(): assert pyklip_dataset._psflib.master_library.shape[0] == n_sci+n_ref def test_pyklipdata_badtelescope(): + """Tests that pyklip data class initialization fails if data does not come from Roman. + """ mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) mock_sci[0].pri_hdr['TELESCOP'] = "HUBBLE" @@ -132,6 +138,8 @@ def test_pyklipdata_badtelescope(): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) def test_pyklipdata_badinstrument(): + """Tests that pyklip data class initialization fails if data does not come from Coronagraph Instrument. + """ mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) mock_sci[0].pri_hdr['INSTRUME'] = "WFI" @@ -139,6 +147,8 @@ def test_pyklipdata_badinstrument(): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) def test_pyklipdata_badcfamname(): + """Tests that pyklip data class raises an error if the CFAM position is not a valid position name. + """ mock_sci,mock_ref = create_psfsub_dataset(1,1,[0,0]) mock_sci[0].ext_hdr['CFAMNAME'] = "BAD" @@ -146,6 +156,8 @@ def test_pyklipdata_badcfamname(): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) def test_pyklipdata_notdataset(): + """Tests that pyklip data class raises an error if the iput is not a corgidrp dataset object. + """ mock_sci,mock_ref = create_psfsub_dataset(1,0,[0]) mock_ref = [] with pytest.raises(UserWarning): @@ -157,6 +169,8 @@ def test_pyklipdata_notdataset(): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) def test_pyklipdata_badimgshapes(): + """Tests that pyklip data class enforces all data frames to have the same shape. + """ mock_sci,mock_ref = create_psfsub_dataset(2,0,[0,0]) mock_sci[0].data = np.zeros((5,5)) @@ -164,6 +178,8 @@ def test_pyklipdata_badimgshapes(): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) def test_pyklipdata_multiplepixscales(): + """Tests that pyklip data class enforces that each frame has the same pixel scale. + """ mock_sci,mock_ref = create_psfsub_dataset(2,0,[0,0]) mock_sci[0].ext_hdr["PIXSCALE"] = 10 @@ -173,6 +189,10 @@ def test_pyklipdata_multiplepixscales(): ## PSF subtraction step tests def test_psf_sub_ADI_nocrop(): + """Tests that psf subtraction step correctly identifies an ADI dataset (multiple rolls, no references), + that overall counts decrease, that the KLIP result matches the analytical expectation, and that the + output data shape is correct. + """ numbasis = [1] rolls = [270+13,270-13] @@ -232,7 +252,10 @@ def test_psf_sub_ADI_nocrop(): raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") def test_psf_sub_RDI_nocrop(): - + """Tests that psf subtraction step correctly identifies an RDI dataset (single roll, 1 or more references), + that overall counts decrease, that the KLIP result matches the analytical expectation, and that the + output data shape is correct. + """ numbasis = [1] rolls = [13,0] @@ -316,6 +339,10 @@ def test_psf_sub_RDI_nocrop(): raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after RDI subtraction.") def test_psf_sub_ADIRDI_nocrop(): + """Tests that psf subtraction step correctly identifies an ADI+RDI dataset (multiple rolls, 1 or more references), + that overall counts decrease, that the KLIP result matches the analytical expectation for 1 KL mode, and that the + output data shape is correct. + """ numbasis = [1,2,3,4] rolls = [13,-13,0] @@ -382,6 +409,9 @@ def test_psf_sub_ADIRDI_nocrop(): raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI+RDI subtraction.") def test_psf_sub_withcrop(): + """Tests that psf subtraction step results in the correct data shape when + cropping by default, and that overall counts decrease. + """ numbasis = [1,2] rolls = [270+13,270-13] From f6d35282d3fd5b9c1f4cdfa88e73b3efa024335e Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 08:42:52 -0800 Subject: [PATCH 21/35] remove star location offset keywords since we track STARLOC and MASKLOC separately --- corgidrp/data.py | 27 ++++----------------------- 1 file changed, 4 insertions(+), 23 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index fc9826c2..b72b3583 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -1397,8 +1397,7 @@ class PyKLIPDataset(pyKLIP_Data): def __init__(self, dataset, psflib_dataset=None, - highpass=False, - center_include_offset=True): + highpass=False): """ Initialize the pyKLIP instrument class for space telescope data. # TODO: Determine inner working angle based on PAM positions @@ -1412,9 +1411,6 @@ def __init__(self, Dataset containing input reference observations. The default is None. highpass (bool, optional): Toggle to do highpass filtering. Defaults fo False. - center_include_offset (bool, optional): - Toggle as to whether the relative header offset values of each - image is applied during image centering. Defaults to True. """ # Initialize pyKLIP Data class. @@ -1422,10 +1418,7 @@ def __init__(self, # Set filter wavelengths self.wave_hlc = {'1F': 575e-9} # meters - - # Optional variables - self.center_include_offset = center_include_offset - + # Read science and reference files. self.readdata(dataset, psflib_dataset, highpass) @@ -1575,13 +1568,7 @@ def readdata(self, PIXSCALE += [pix_scale] # Get centers. - if self.center_include_offset == True: - # Use the offset values from the header to adjust the center - centers = np.array([shead['STARLOCX'] + phead['XOFFSET'] / pix_scale, - shead['STARLOCY'] + phead['YOFFSET'] / pix_scale] * NINTS) - else: - # Assume the STARLOC define the correct center for each image - centers = np.array([shead['STARLOCX'], shead['STARLOCY']] * NINTS) + centers = np.array([shead['STARLOCX'], shead['STARLOCY']] * NINTS) # Get metadata. input_all += [data] @@ -1666,13 +1653,7 @@ def readdata(self, PIXSCALE += [pix_scale] # Get centers. - if self.center_include_offset == True: - # Use the offset values from the header to adjust the center - centers = np.array([shead['STARLOCX'] + phead['XOFFSET'] / pix_scale, - shead['STARLOCY'] + phead['YOFFSET'] / pix_scale] * NINTS) - else: - # Assume the STARLOC define the correct center for each image - centers = np.array([shead['STARLOCX'], shead['STARLOCY']] * NINTS) + centers = np.array([shead['STARLOCX'], shead['STARLOCY']] * NINTS) psflib_data_all += [data] psflib_centers_all += [centers] From 09399cb1bb3ab60413cef662701051104d2af3d6 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 09:05:40 -0800 Subject: [PATCH 22/35] Update pixelscale header kw --- corgidrp/data.py | 7 +++---- corgidrp/mocks.py | 5 ++--- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index b72b3583..4dbcacc6 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -1372,12 +1372,11 @@ class PyKLIPDataset(pyKLIP_Data): A pyKLIP instrument class for Roman Coronagraph Instrument data. # TODO: Add more bandpasses, modes to self.wave_hlc - # Clarify attribute descriptions # Add wcs header info! Attrs: input: Input corgiDRP dataset. - centers: PSF center locations (double check this). + centers: Star center locations. filenums: file numbers. filenames: file names. PAs: position angles. @@ -1564,7 +1563,7 @@ def readdata(self, if data.ndim != 3: raise UserWarning('Requires 2D/3D data cube') NINTS = data.shape[0] - pix_scale = shead['PIXSCALE'] # arcsec + pix_scale = shead['PLTSCALE'] * 1000. # arcsec PIXSCALE += [pix_scale] # Get centers. @@ -1649,7 +1648,7 @@ def readdata(self, if data.ndim != 3: raise UserWarning('Requires 2D/3D data cube') NINTS = data.shape[0] - pix_scale = shead['PIXSCALE'] # arcsec + pix_scale = shead['PLTSCALE'] * 1000. # arcsec PIXSCALE += [pix_scale] # Get centers. diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index b537a63a..d6292396 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2323,11 +2323,10 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol exthdr['MASKLOCY'] = psfcenty exthdr['STARLOCX'] = psfcentx exthdr['STARLOCY'] = psfcenty - exthdr['PIXSCALE'] = pixscale + exthdr['PLTSCALE'] = pixscale # This is in milliarcseconds! exthdr["ROLL"] = roll_angles[i] exthdr["HIERARCH DATA_LEVEL"] = 'L3' - #exthdr["HISTORY"] = "" # This line keeps triggering an "illegal value" error - + # Add WCS header info, if provided if wcs_header is None: wcs_header = generate_wcs(roll_angles[i], From 720a98251d5db546d3c2df2741fca255c481a30b Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 09:07:45 -0800 Subject: [PATCH 23/35] docstring updates --- corgidrp/mocks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index d6292396..123b14fe 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2202,13 +2202,13 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol wcs_header (astropy.fits.Header, optional): Fits header object containing WCS information. If not provided, a mock header will be created. Defaults to None. data_shape (list of int): desired shape of data array. Must have length 2. Defaults to - [1024,1024]. + [100,100]. centerxy (list of float): Desired PSF center in xy order. Must have length 2. Defaults to image center. outdir (str, optional): Desired output directory. If not provided, data will not be saved. Defaults to None. - st_amp (float): Amplitude of stellar psf added to fake data. Defaults to 10000. - noise_amp (float): Amplitude of gaussian noise added to fake data. Defaults to 1e-11. + st_amp (float): Amplitude of stellar psf added to fake data. Defaults to 100. + noise_amp (float): Amplitude of gaussian noise added to fake data. Defaults to 1. ref_psf_spread (float): Fractional increase in gaussian PSF width between science and reference PSFs. Defaults to 1. pl_contrast (float): Flux ratio between planet and starlight incident on the detector. From 09ec8a51bedeb26747aceea6c3ea7612c0f0bed8 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 09:31:40 -0800 Subject: [PATCH 24/35] replace more "PIXSCALE"s with "PLTSCALE"s --- corgidrp/l3_to_l4.py | 4 +--- tests/test_psfsub.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 150b99cd..7a74d7b1 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -286,7 +286,7 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, ext_hdr['NAXIS2'] = naxis2 # Add info from input dataset - ext_hdr_keys = ['BUNIT','PIXSCALE','CFAMNAME', + ext_hdr_keys = ['BUNIT','PLTSCALE','CFAMNAME', 'DPAMNAME','FPAMNAME','FSAMNAME', 'LSAMNAME','SPAMNAME'] for kw in ext_hdr_keys: @@ -318,8 +318,6 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, dataset_out.update_after_processing_step(history_msg) - # TODO: Update DQ to 1 where there are nans - return dataset_out def northup(input_dataset,correct_wcs=False): diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index f99905b9..350a3d0c 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -182,7 +182,7 @@ def test_pyklipdata_multiplepixscales(): """ mock_sci,mock_ref = create_psfsub_dataset(2,0,[0,0]) - mock_sci[0].ext_hdr["PIXSCALE"] = 10 + mock_sci[0].ext_hdr["PLTSCALE"] = 10 with pytest.raises(UserWarning): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) From af895d23e87d23f11839f330188e84c075c61a71 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 09:34:45 -0800 Subject: [PATCH 25/35] mocks.create_flux_image now uses mocks.gaussian_array --- corgidrp/mocks.py | 50 +++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index 123b14fe..b5be9108 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2034,6 +2034,28 @@ def create_photon_countable_frames(Nbrights=30, Ndarks=40, EMgain=5000, kgain=7, return ill_dataset, dark_dataset, ill_mean, dark_mean +def gaussian_array(array_shape=[50,50],sigma=2.5,amp=100.,xoffset=0.,yoffset=0.): + """Generate a 2D square array with a centered gaussian surface (for mock PSF data). + + Args: + array_shape (int, optional): Shape of desired array in pixels. Defaults to [50,50]. + sigma (float, optional): Standard deviation of the gaussian curve, in pixels. Defaults to 5. + amp (float,optional): Amplitude of gaussian curve. Defaults to 1. + xoffset (float,optional): x offset of gaussian from array center. Defaults to 0. + yoffset (float,optional): y offset of gaussian from array center. Defaults to 0. + + Returns: + np.array: 2D array of a gaussian surface. + """ + x, y = np.meshgrid(np.linspace(-array_shape[0]/2, array_shape[0]/2, array_shape[0]), + np.linspace(-array_shape[1]/2, array_shape[1]/2, array_shape[1])) + dst = np.sqrt((x-xoffset)**2+(y-yoffset)**2) + + # Calculate Gaussian + gauss = np.exp(-((dst)**2 / (2.0 * sigma**2))) * amp / (2.0 * np.pi * sigma**2) + + return gauss + def create_flux_image(star_flux, fwhm, cal_factor, filedir=None, color_cor = 1., platescale=21.8, add_gauss_noise=True, noise_scale=1., background = 0., file_save=False): """ Create simulated data for absolute flux calibration. This is a point source in the image center with a 2D-Gaussian PSF @@ -2095,7 +2117,6 @@ def create_flux_image(star_flux, fwhm, cal_factor, filedir=None, color_cor = 1., # inject gaussian psf star stampsize = int(np.ceil(3 * fwhm)) sigma = fwhm/ (2.*np.sqrt(2*np.log(2))) - amplitude = flux/(2. * np.pi * sigma**2) # coordinate system y, x = np.indices([stampsize, stampsize]) @@ -2113,7 +2134,8 @@ def create_flux_image(star_flux, fwhm, cal_factor, filedir=None, color_cor = 1., ymin = y[0][0] ymax = y[-1][-1] - psf = amplitude * np.exp(-((x - xpos)**2. + (y - ypos)**2.) / (2. * sigma**2)) + # psf = amplitude * np.exp(-((x - xpos)**2. + (y - ypos)**2.) / (2. * sigma**2)) + psf = gaussian_array(stampsize,sigma,flux,xoffset=0.,yoffset=0.) # inject the star into the image sim_data[ymin:ymax + 1, xmin:xmax + 1] += psf @@ -2152,30 +2174,6 @@ def create_flux_image(star_flux, fwhm, cal_factor, filedir=None, color_cor = 1., MJDREF = 0.0 / [d] MJD of fiducial time """ -def gaussian_array(array_shape=[50,50],sigma=2.5,amp=100.,xoffset=0.,yoffset=0.): - """Generate a 2D square array with a centered gaussian surface (for mock PSF data). - - Args: - array_shape (int, optional): Shape of desired array in pixels. Defaults to [50,50]. - sigma (float, optional): Standard deviation of the gaussian curve, in pixels. Defaults to 5. - amp (float,optional): Amplitude of gaussian curve. Defaults to 1. - xoffset (float,optional): x offset of gaussian from array center. Defaults to 0. - yoffset (float,optional): y offset of gaussian from array center. Defaults to 0. - - Returns: - np.array: 2D array of a gaussian surface. - """ - x, y = np.meshgrid(np.linspace(-array_shape[0]/2, array_shape[0]/2, array_shape[0]), - np.linspace(-array_shape[1]/2, array_shape[1]/2, array_shape[1])) - dst = np.sqrt((x-xoffset)**2+(y-yoffset)**2) - - # lower normal part of gaussian - normal = 1/(2.0 * np.pi * sigma**2) - - # Calculating Gaussian filter - gauss = np.exp(-((dst)**2 / (2.0 * sigma**2))) * normal * amp - - return gauss def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhole_reffiles=None, wcs_header = None, From ef35b0e4e0a39106ded361ee87155920b0e7f31e Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 09:42:28 -0800 Subject: [PATCH 26/35] add test to catch wrong psf subtraction mode --- tests/test_psfsub.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 350a3d0c..2ce4a31d 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -432,6 +432,25 @@ def test_psf_sub_withcrop(): if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") +def test_psf_sub_badmode(): + """Tests that psf subtraction step fails correctly if an unconfigured mode is supplied (e.g. SDI). + """ + + numbasis = [1,2,3,4] + rolls = [13,-13,0] + mock_sci,mock_ref = create_psfsub_dataset(2,1,rolls, + st_amp=st_amp, + noise_amp=noise_amp, + pl_contrast=pl_contrast) + + + with pytest.raises(Exception): + _ = do_psf_subtraction(mock_sci,mock_ref, + numbasis=numbasis, + mode='SDI', + fileprefix='test_SDI', + do_crop=False) + if __name__ == '__main__': test_pyklipdata_ADI() test_pyklipdata_RDI() @@ -446,4 +465,4 @@ def test_psf_sub_withcrop(): test_psf_sub_RDI_nocrop() test_psf_sub_ADIRDI_nocrop() test_psf_sub_withcrop() - + test_psf_sub_badmode() From c89fb71c2801e4b9cef6f4f457b83933a437edb3 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 10:26:02 -0800 Subject: [PATCH 27/35] bug fix mocks.gaussian_array() --- corgidrp/mocks.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index b5be9108..c106c107 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2047,12 +2047,15 @@ def gaussian_array(array_shape=[50,50],sigma=2.5,amp=100.,xoffset=0.,yoffset=0.) Returns: np.array: 2D array of a gaussian surface. """ - x, y = np.meshgrid(np.linspace(-array_shape[0]/2, array_shape[0]/2, array_shape[0]), - np.linspace(-array_shape[1]/2, array_shape[1]/2, array_shape[1])) + x, y = np.meshgrid(np.linspace(-array_shape[0]/2+0.5, array_shape[0]/2-0.5, array_shape[0]), + np.linspace(-array_shape[1]/2+0.5, array_shape[1]/2-0.5, array_shape[1])) dst = np.sqrt((x-xoffset)**2+(y-yoffset)**2) # Calculate Gaussian gauss = np.exp(-((dst)**2 / (2.0 * sigma**2))) * amp / (2.0 * np.pi * sigma**2) + + #np.exp(-((x - xpos)**2. + (y - ypos)**2.) / (2. * sigma**2)) * flux/(2. * np.pi * sigma**2) + return gauss @@ -2117,7 +2120,7 @@ def create_flux_image(star_flux, fwhm, cal_factor, filedir=None, color_cor = 1., # inject gaussian psf star stampsize = int(np.ceil(3 * fwhm)) sigma = fwhm/ (2.*np.sqrt(2*np.log(2))) - + # coordinate system y, x = np.indices([stampsize, stampsize]) y -= stampsize // 2 @@ -2134,8 +2137,7 @@ def create_flux_image(star_flux, fwhm, cal_factor, filedir=None, color_cor = 1., ymin = y[0][0] ymax = y[-1][-1] - # psf = amplitude * np.exp(-((x - xpos)**2. + (y - ypos)**2.) / (2. * sigma**2)) - psf = gaussian_array(stampsize,sigma,flux,xoffset=0.,yoffset=0.) + psf = gaussian_array((stampsize,stampsize),sigma,flux) # inject the star into the image sim_data[ymin:ymax + 1, xmin:xmax + 1] += psf From abcecd5f0ab45c87c48e30fa0f2465c89c57a6bd Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 10:40:46 -0800 Subject: [PATCH 28/35] debugging cleanup --- corgidrp/mocks.py | 3 --- tests/test_psfsub.py | 5 ++--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index c106c107..51f0eff8 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2053,9 +2053,6 @@ def gaussian_array(array_shape=[50,50],sigma=2.5,amp=100.,xoffset=0.,yoffset=0.) # Calculate Gaussian gauss = np.exp(-((dst)**2 / (2.0 * sigma**2))) * amp / (2.0 * np.pi * sigma**2) - - #np.exp(-((x - xpos)**2. + (y - ypos)**2.) / (2. * sigma**2)) * flux/(2. * np.pi * sigma**2) - return gauss diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 2ce4a31d..38cc474a 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -64,7 +64,7 @@ def test_pyklipdata_ADI(): centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - pass + # Check image is centered properly for i,image in enumerate(pyklip_dataset._input): @@ -88,7 +88,6 @@ def test_pyklipdata_RDI(): mock_sci,mock_ref = create_psfsub_dataset(n_sci,n_ref,rolls,centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - pass # Check image is centered properly for i,image in enumerate(pyklip_dataset._input): @@ -114,7 +113,7 @@ def test_pyklipdata_ADIRDI(): centerxy=(50.5,50.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) - pass + # Check image is recentered properly for i,image in enumerate(pyklip_dataset._input): From e3d2991180b96c5ac2929fc1ba9dbdc77fa0cbd3 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 10:47:16 -0800 Subject: [PATCH 29/35] add different shifts for x and y in pyklip dataset tests --- tests/test_psfsub.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 38cc474a..7c6cec8b 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -59,16 +59,16 @@ def test_pyklipdata_ADI(): """ rolls = [0,90] - # Init with center shifted by 1 pixel + # Init with center shifted by 1 pixel in x, 2 pixels in y mock_sci,mock_ref = create_psfsub_dataset(2,0,rolls, - centerxy=(50.5,50.5)) + centerxy=(50.5,51.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) # Check image is centered properly for i,image in enumerate(pyklip_dataset._input): - assert mock_sci.all_data[i,1:,1:] == pytest.approx(image[:-1,:-1]), f"Frame {i} centered improperly." + assert mock_sci.all_data[i,2:,1:] == pytest.approx(image[:-2,:-1]), f"Frame {i} centered improperly." # Check roll assignments and filenames match up for sci dataset for r,roll in enumerate(pyklip_dataset._PAs): @@ -84,15 +84,15 @@ def test_pyklipdata_RDI(): rolls = [45,180] n_sci = 1 n_ref = 1 - # Init with center shifted by 1 pixel - mock_sci,mock_ref = create_psfsub_dataset(n_sci,n_ref,rolls,centerxy=(50.5,50.5)) + # Init with center shifted + mock_sci,mock_ref = create_psfsub_dataset(n_sci,n_ref,rolls,centerxy=(50.5,51.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) # Check image is centered properly for i,image in enumerate(pyklip_dataset._input): - assert mock_sci.all_data[i,1:,1:] == pytest.approx(image[:-1,:-1]), f"Frame {i} centered improperly." + assert mock_sci.all_data[i,2:,1:] == pytest.approx(image[:-2,:-1]), f"Frame {i} centered improperly." # Check roll assignments and filenames match up for sci dataset for r,roll in enumerate(pyklip_dataset._PAs): @@ -110,14 +110,14 @@ def test_pyklipdata_ADIRDI(): n_ref = 1 # Init with center shifted by 1 pixel mock_sci,mock_ref = create_psfsub_dataset(n_sci,n_ref,rolls, - centerxy=(50.5,50.5)) + centerxy=(50.5,51.5)) pyklip_dataset = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) # Check image is recentered properly for i,image in enumerate(pyklip_dataset._input): - assert mock_sci.all_data[i,1:,1:] == pytest.approx(image[:-1,:-1]), f"Frame {i} centered improperly." + assert mock_sci.all_data[i,2:,1:] == pytest.approx(image[:-2,:-1]), f"Frame {i} centered improperly." # Check roll assignments and filenames match up for sci dataset for r,roll in enumerate(pyklip_dataset._PAs): From 841f32fd06686d5996dfe837ac8fe480a4639cd2 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 16:09:43 -0800 Subject: [PATCH 30/35] add error propagation for 3D data, tests for nan_flags and flag_nans in detector.py --- corgidrp/data.py | 32 ++++---- corgidrp/detector.py | 5 +- tests/test_err_dq.py | 37 +++++++-- tests/test_psfsub.py | 179 +++++++++++++++++++++++++++++++++++++++++-- 4 files changed, 223 insertions(+), 30 deletions(-) diff --git a/corgidrp/data.py b/corgidrp/data.py index 4dbcacc6..7217c690 100644 --- a/corgidrp/data.py +++ b/corgidrp/data.py @@ -149,19 +149,19 @@ def add_error_term(self, input_error, err_name): Updates Dataset.all_err. Args: - input_error (np.array): 2-d or 3-d error layer + input_error (np.array): per-frame or per-dataset error layer err_name (str): name of the uncertainty layer """ - if input_error.ndim == 3: + if input_error.ndim == self.all_data.ndim: for i,frame in enumerate(self.frames): frame.add_error_term(input_error[i], err_name) - elif input_error.ndim ==2: + elif input_error.ndim == self.all_data.ndim - 1: for frame in self.frames: frame.add_error_term(input_error, err_name) else: - raise ValueError("input_error is not either a 2D or 3D array.") + raise ValueError("input_error is not either a 2D or 3D array for 2D data, or a 3D or 4D array for 3D data.") # Preserve pointer links between Dataset.all_err and Image.err self.all_err = np.array([frame.err for frame in self.frames]) @@ -288,8 +288,8 @@ def __init__(self, data_or_filepath, pri_hdr=None, ext_hdr=None, err = None, dq if err is not None: if np.shape(self.data) != np.shape(err)[-self.data.ndim:]: raise ValueError("The shape of err is {0} while we are expecting shape {1}".format(err.shape[-self.data.ndim:], self.data.shape)) - #we want to have a 3 dim error array - if err.ndim > 2: + #we want to have an extra dimension in the error array + if err.ndim == self.data.ndim+1: self.err = err else: self.err = err.reshape((1,)+err.shape) @@ -297,7 +297,7 @@ def __init__(self, data_or_filepath, pri_hdr=None, ext_hdr=None, err = None, dq err_hdu = hdulist.pop("ERR") self.err = err_hdu.data self.err_hdr = err_hdu.header - if self.err.ndim == 2: + if self.err.ndim == self.data.ndim: self.err = self.err.reshape((1,)+self.err.shape) else: self.err = np.zeros((1,)+self.data.shape) @@ -350,7 +350,7 @@ def __init__(self, data_or_filepath, pri_hdr=None, ext_hdr=None, err = None, dq if np.shape(self.data) != np.shape(err)[-self.data.ndim:]: raise ValueError("The shape of err is {0} while we are expecting shape {1}".format(err.shape[-self.data.ndim:], self.data.shape)) #we want to have a 3 dim error array - if err.ndim > 2: + if err.ndim == self.data.ndim + 1: self.err = err else: self.err = err.reshape((1,)+err.shape) @@ -518,7 +518,7 @@ def get_masked_data(self): def add_error_term(self, input_error, err_name): """ - Add a layer of a specific additive uncertainty on the 3-dim error array extension + Add a layer of a specific additive uncertainty on the 3- or 4-dim error array extension and update the combined uncertainty in the first layer. Update the error header and assign the error name. @@ -526,18 +526,22 @@ def add_error_term(self, input_error, err_name): in the configuration file Args: - input_error (np.array): 2-d error layer + input_error (np.array): error layer with same shape as data err_name (str): name of the uncertainty layer """ - if input_error.ndim != 2 or input_error.shape != self.data.shape: - raise ValueError("we expect a 2-dimensional error layer with dimensions {0}".format(self.data.shape)) + ndim = self.data.ndim + if not (input_error.ndim==2 or input_error.ndim==3) or input_error.shape != self.data.shape: + raise ValueError("we expect a 2-dimensional or 3-dimensional error layer with dimensions {0}".format(self.data.shape)) #first layer is always the updated combined error - self.err[0,:,:] = np.sqrt(self.err[0,:,:]**2 + input_error**2) + if ndim == 2: + self.err[0,:,:] = np.sqrt(self.err[0,:,:]**2 + input_error**2) + elif ndim == 3: + self.err[0,:,:,:] = np.sqrt(self.err[0,:,:,:]**2 + input_error**2) self.err_hdr["Layer_1"] = "combined_error" if corgidrp.track_individual_errors: - #append new error as layer on 3D cube + #append new error as layer on 3D or 4D cube self.err=np.append(self.err, [input_error], axis=0) layer = str(self.err.shape[0]) diff --git a/corgidrp/detector.py b/corgidrp/detector.py index 649c82a3..6883a526 100644 --- a/corgidrp/detector.py +++ b/corgidrp/detector.py @@ -833,8 +833,11 @@ def nan_flags(dataset,threshold=1): # mask bad pixels bad = np.where(dataset_out.all_dq >= threshold) dataset_out.all_data[bad] = np.nan - dataset_out.all_err[bad[0],:,bad[1],bad[2]] = np.nan + new_error = np.zeros_like(dataset_out.all_data) + new_error[bad] = np.nan + dataset_out.add_error_term(new_error, 'DQ flagged') + return dataset_out def flag_nans(dataset,flag_val=1): diff --git a/tests/test_err_dq.py b/tests/test_err_dq.py index 7d2dd4a4..966d5f59 100644 --- a/tests/test_err_dq.py +++ b/tests/test_err_dq.py @@ -26,6 +26,15 @@ dqhd = fits.Header() dqhd["CASE"] = "test" +data_3d = np.ones([2,1024,1024]) * 2 +err_3d = np.zeros([2,1024,1024]) +err1_3d = np.ones([2,1024,1024]) +err2_3d = err1_3d.copy() +err3_3d = np.ones([2,1,1024,1024]) * 0.5 +dq_3d = np.zeros([2,1024,1024], dtype = int) +dq1_3d = dq_3d.copy() +dq1_3d[0,0,0] = 1 + old_err_tracking = corgidrp.track_individual_errors # use default parameters detector_params = DetectorParams({}) @@ -118,6 +127,22 @@ def test_add_error_term(): assert image_test.err_hdr["Layer_2"] == "error_noid" assert image_test.err_hdr["Layer_3"] == "error_nuts" + image_3d = Image(data_3d,prhd,exthd,err_3d,dq_3d,errhd,dqhd) + image_3d.add_error_term(err1_3d, "error_noid") + assert image_3d.err[0,0,0,0] == err_3d[0,0,0] + image_3d.add_error_term(err2_3d, "error_nuts") + assert image_3d.err.shape == (3,2,1024,1024) + assert image_3d.err[0,0,0,0] == np.sqrt(err1_3d[0,0,0]**2 + err2_3d[0,0,0]**2) + image_3d.save(filename="test_image3d.fits") + + image_test_3d = Image('test_image3d.fits') + assert np.array_equal(image_test_3d.dq, dq_3d) + assert np.array_equal(image_test_3d.err, image_3d.err) + assert image_test_3d.err.shape == (3,2,1024,1024) + assert image_test_3d.err_hdr["Layer_1"] == "combined_error" + assert image_test_3d.err_hdr["Layer_2"] == "error_noid" + assert image_test_3d.err_hdr["Layer_3"] == "error_nuts" + def test_err_dq_dataset(): """ test the behavior of the err and data arrays in the dataset @@ -150,7 +175,6 @@ def test_get_masked_data(): assert masked_data.mean()==2 assert masked_data.sum()==image2.data.sum()-2 - def test_err_adderr_notrack(): """ test the initialization of error and adding errors when we are not tracking @@ -172,7 +196,6 @@ def test_err_adderr_notrack(): assert image1.err.shape == (1,1024,1024) assert image1.err[0,0,0] == np.sqrt(err1[0,0]**2 + err2[0,0]**2) - def test_read_many_errors_notrack(): """ Check that we can successfully discard errors when reading in a frame with multiple errors @@ -188,7 +211,6 @@ def test_read_many_errors_notrack(): with pytest.raises(KeyError): assert image_test.err_hdr["Layer_3"] == "error_nuts" - def test_err_array_sizes(): ''' Check that we're robust to 2D error arrays @@ -222,8 +244,6 @@ def test_err_array_sizes(): dark_frame.save(filedir=calibdir, filename=dark_filename) testcaldb.scan_dir_for_new_entries(calibdir) - - def teardown_module(): """ Runs automatically at the end. ONLY IN PYTEST @@ -235,7 +255,6 @@ def teardown_module(): corgidrp.track_individual_errors = old_err_tracking - # for debugging. does not run with pytest!! if __name__ == '__main__': test_err_array_sizes() @@ -244,6 +263,10 @@ def teardown_module(): test_add_error_term() test_err_dq_dataset() test_get_masked_data() + test_err_adderr_notrack() + test_read_many_errors_notrack() + test_err_array_sizes() for i in range(3): - os.remove('test_image{0}.fits'.format(i)) \ No newline at end of file + os.remove('test_image{0}.fits'.format(i)) + os.remove('test_image3d.fits') \ No newline at end of file diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 7c6cec8b..a1fbaf0d 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -1,15 +1,10 @@ -from corgidrp.mocks import create_psfsub_dataset +from corgidrp.mocks import create_psfsub_dataset,create_default_headers from corgidrp.l3_to_l4 import do_psf_subtraction -from corgidrp.data import PyKLIPDataset +from corgidrp.data import PyKLIPDataset, Image, Dataset +from corgidrp.detector import nan_flags, flag_nans from scipy.ndimage import shift, rotate import pytest import numpy as np -import glob -import os -from astropy.io import fits -from pyklip.instruments import Instrument -from pyklip import parallelized -from matplotlib.colors import LogNorm ## Helper functions/quantities @@ -185,6 +180,166 @@ def test_pyklipdata_multiplepixscales(): with pytest.raises(UserWarning): _ = PyKLIPDataset(mock_sci,psflib_dataset=mock_ref) +# DQ flagging tests + +def make_test_data(frame_shape,n_frames=1,): + """Makes a test corgidrp Dataset of all zeros with the desired + frame shape and number of frames. + + Args: + frame_shape (listlike of int): 2D or 3D image shape desired. + n_frames (int, optional): Number of frames. Defaults to 1. + + Returns: + corgidrp.Dataset: mock dataset of all zeros. + """ + + frames = [] + for i in range(n_frames): + prihdr, exthdr = create_default_headers() + im_data = np.zeros(frame_shape).astype(np.float64) + frame = Image(im_data, pri_hdr=prihdr, ext_hdr=exthdr) + + frames.append(frame) + + dataset = Dataset(frames) + return dataset + +def test_nanflags_2D(): + """Test detector.nan_flags() on 2D data. + """ + + # 2D: + mock_dataset = make_test_data([10,10],n_frames=2,) + mock_dataset.all_dq[0,4,6] = 1 + mock_dataset.all_dq[1,5,7] = 1 + + expected_data = np.zeros([2,10,10]) + expected_data[0,4,6] = np.nan + expected_data[1,5,7] = np.nan + + expected_err = np.zeros([2,1,10,10]) + expected_err[0,0,4,6] = np.nan + expected_err[1,0,5,7] = np.nan + + nanned_dataset = nan_flags(mock_dataset,threshold=1) + + if not np.array_equal(nanned_dataset.all_data, expected_data,equal_nan=True): + # import matplotlib.pyplot as plt + # fig,axes = plt.subplots(1,2) + # axes[0].imshow(nanned_dataset.all_data[0,:,:]) + # axes[1].imshow(expected_data[0,:,:]) + # plt.show() + # plt.close() + raise Exception('2D nan_flags test produced unexpected result') + + if not np.array_equal(nanned_dataset.all_err,expected_err,equal_nan=True): + raise Exception('2D nan_flags test produced unexpected result for ERR array') + + +def test_nanflags_3D(): + """Test detector.nan_flags() on 3D data. + """ + + # 3D: + mock_dataset = make_test_data([3,10,10],n_frames=2,) + mock_dataset.all_dq[0,0,4,6] = 1 + mock_dataset.all_dq[1,:,5,7] = 1 + + expected_data = np.zeros([2,3,10,10]) + expected_data[0,0,4,6] = np.nan + expected_data[1,:,5,7] = np.nan + + expected_err = np.zeros([2,1,3,10,10]) + expected_err[0,0,0,4,6] = np.nan + expected_err[1,0,:,5,7] = np.nan + + nanned_dataset = nan_flags(mock_dataset,threshold=1) + + if not np.array_equal(nanned_dataset.all_data, expected_data,equal_nan=True): + raise Exception('2D nan_flags test produced unexpected result') + + if not np.array_equal(nanned_dataset.all_err, expected_err,equal_nan=True): + raise Exception('3D nan_flags test produced unexpected result for ERR array') + +def test_nanflags_mixed_dqvals(): + """Test detector.nan_flags() on 3D data with some DQ values below the threshold. + """ + + # 3D: + mock_dataset = make_test_data([3,10,10],n_frames=2,) + mock_dataset.all_dq[0,0,4,6] = 1 + mock_dataset.all_dq[1,:,5,7] = 2 + + expected_data = np.zeros([2,3,10,10]) + expected_data[1,:,5,7] = np.nan + + expected_err = np.zeros([2,1,3,10,10]) + expected_err[1,0,:,5,7] = np.nan + + nanned_dataset = nan_flags(mock_dataset,threshold=2) + + if not np.array_equal(nanned_dataset.all_data, expected_data,equal_nan=True): + raise Exception('nan_flags with mixed dq values produced unexpected result') + + if not np.array_equal(nanned_dataset.all_err, expected_err,equal_nan=True): + raise Exception('3D nan_flags with mixed dq values produced unexpected result for ERR array') + +def test_flagnans_2D(): + """Test detector.flag_nans() on 2D data. + """ + + # 2D: + mock_dataset = make_test_data([10,10],n_frames=2,) + mock_dataset.all_data[0,4,6] = np.nan + mock_dataset.all_data[1,5,7] = np.nan + + expected_dq = np.zeros([2,10,10]) + expected_dq[0,4,6] = 1 + expected_dq[1,5,7] = 1 + + flagged_dataset = flag_nans(mock_dataset) + + if not np.array_equal(flagged_dataset.all_dq, expected_dq,equal_nan=True): + raise Exception('2D nan_flags test produced unexpected result for DQ array') + +def test_flagnans_3D(): + """Test detector.flag_nans() on 3D data. + """ + + # 3D: + mock_dataset = make_test_data([3,10,10],n_frames=2,) + mock_dataset.all_data[0,0,4,6] = np.nan + mock_dataset.all_data[1,:,5,7] = np.nan + + expected_dq = np.zeros([2,3,10,10]) + expected_dq[0,0,4,6] = 1 + expected_dq[1,:,5,7] = 1 + + flagged_dataset = flag_nans(mock_dataset) + + if not np.array_equal(flagged_dataset.all_dq, expected_dq,equal_nan=True): + raise Exception('3D flag_nans test produced unexpected result for DQ array') + + +def test_flagnans_flagval2(): + """Test detector.flag_nans() on 3D data with a non-default DQ value. + """ + + # 3D: + mock_dataset = make_test_data([3,10,10],n_frames=2,) + mock_dataset.all_data[0,0,4,6] = np.nan + mock_dataset.all_data[1,:,5,7] = np.nan + + expected_dq = np.zeros([2,3,10,10]) + expected_dq[0,0,4,6] = 2 + expected_dq[1,:,5,7] = 2 + + flagged_dataset = flag_nans(mock_dataset,flag_val=2) + + if not np.array_equal(flagged_dataset.all_dq, expected_dq,equal_nan=True): + raise Exception('3D nan_flags test produced unexpected result for DQ array') + ## PSF subtraction step tests def test_psf_sub_ADI_nocrop(): @@ -460,6 +615,14 @@ def test_psf_sub_badmode(): test_pyklipdata_notdataset() test_pyklipdata_badimgshapes() test_pyklipdata_multiplepixscales() + + test_nanflags_2D() + test_nanflags_3D() + test_nanflags_mixed_dqvals() + test_flagnans_2D() + test_flagnans_3D() + test_flagnans_flagval2() + test_psf_sub_ADI_nocrop() test_psf_sub_RDI_nocrop() test_psf_sub_ADIRDI_nocrop() From 718f5cc7a2d72131b0d64c2a91b791c86d5bfd48 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 17:03:21 -0800 Subject: [PATCH 31/35] enabled single dataset input for psf subtraction --- corgidrp/l3_to_l4.py | 17 +++++++- corgidrp/mocks.py | 4 ++ tests/test_psfsub.py | 98 +++++++++++++++++++++++++++++++++----------- 3 files changed, 94 insertions(+), 25 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 7a74d7b1..7cdb7b73 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -207,10 +207,25 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, """ sci_dataset = input_dataset.copy() + + # Use input reference dataset if provided if not reference_star_dataset is None: ref_dataset = reference_star_dataset.copy() + + # Try getting PSF references via the "PSFREF" header kw else: - ref_dataset = None + split_datasets, unique_vals = sci_dataset.split_dataset(prihdr_keywords=["PSFREF"]) + unique_vals = np.array(unique_vals) + + if 0. in unique_vals: + sci_dataset = split_datasets[int(np.nonzero(np.array(unique_vals) == 0)[0])] + else: + raise UserWarning('No science files found in input dataset.') + + if 1. in unique_vals: + ref_dataset = split_datasets[int(np.nonzero(np.array(unique_vals) == 1)[0])] + else: + ref_dataset = None assert len(sci_dataset) > 0, "Science dataset has no data." diff --git a/corgidrp/mocks.py b/corgidrp/mocks.py index 51f0eff8..d7338bd9 100644 --- a/corgidrp/mocks.py +++ b/corgidrp/mocks.py @@ -2307,6 +2307,10 @@ def create_psfsub_dataset(n_sci,n_ref,roll_angles,darkhole_scifiles=None,darkhol yoffset=yoff+psf_off_xy[1]) img_data += planet_psf + # Assign PSFREF flag + prihdr['PSFREF'] = 0 + else: + prihdr['PSFREF'] = 1 # Add necessary header keys prihdr['TELESCOP'] = 'ROMAN' diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index a1fbaf0d..0c568134 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -236,7 +236,6 @@ def test_nanflags_2D(): if not np.array_equal(nanned_dataset.all_err,expected_err,equal_nan=True): raise Exception('2D nan_flags test produced unexpected result for ERR array') - def test_nanflags_3D(): """Test detector.nan_flags() on 3D data. """ @@ -321,7 +320,6 @@ def test_flagnans_3D(): if not np.array_equal(flagged_dataset.all_dq, expected_dq,equal_nan=True): raise Exception('3D flag_nans test produced unexpected result for DQ array') - def test_flagnans_flagval2(): """Test detector.flag_nans() on 3D data with a non-default DQ value. """ @@ -342,6 +340,56 @@ def test_flagnans_flagval2(): ## PSF subtraction step tests +def test_psf_sub_split_dataset(): + """Tests that psf subtraction step correctly identifies an ADI dataset (multiple rolls, no references), + that overall counts decrease, that the KLIP result matches the analytical expectation, and that the + output data shape is correct. + """ + + # Sci & Ref + CASE='SCI+REF' + numbasis = [1] + rolls = [270+13,270-13,0,0] + mock_sci,mock_ref = create_psfsub_dataset(2,2,rolls, + st_amp=st_amp, + noise_amp=noise_amp, + pl_contrast=pl_contrast) + + # combine mock_sci and mock_ref into 1 dataset + frames = [*mock_sci,*mock_ref] + mock_sci_and_ref = Dataset(frames) + + result = do_psf_subtraction(mock_sci_and_ref, + numbasis=numbasis, + fileprefix='test_single_dataset', + do_crop=False) + + # Sci only + CASE='SCI_ONLY' + mock_sci,mock_ref = create_psfsub_dataset(2,2,rolls, + st_amp=st_amp, + noise_amp=noise_amp, + pl_contrast=pl_contrast) + + # pass only science frames + result = do_psf_subtraction(mock_sci, + numbasis=numbasis, + fileprefix='test_sci_only_dataset', + do_crop=False) + + # Should choose ADI + for frame in result: + if not frame.ext_hdr['KLIP_ALG'] == 'ADI': + raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") + + # pass only reference frames (should fail) + CASE='REF_ONLY' + with pytest.raises(UserWarning): + _ = do_psf_subtraction(mock_ref, + numbasis=numbasis, + fileprefix='test_ref_only_dataset', + do_crop=False) + def test_psf_sub_ADI_nocrop(): """Tests that psf subtraction step correctly identifies an ADI dataset (multiple rolls, no references), that overall counts decrease, that the KLIP result matches the analytical expectation, and that the @@ -606,25 +654,27 @@ def test_psf_sub_badmode(): do_crop=False) if __name__ == '__main__': - test_pyklipdata_ADI() - test_pyklipdata_RDI() - test_pyklipdata_ADIRDI() - test_pyklipdata_badtelescope() - test_pyklipdata_badinstrument() - test_pyklipdata_badcfamname() - test_pyklipdata_notdataset() - test_pyklipdata_badimgshapes() - test_pyklipdata_multiplepixscales() - - test_nanflags_2D() - test_nanflags_3D() - test_nanflags_mixed_dqvals() - test_flagnans_2D() - test_flagnans_3D() - test_flagnans_flagval2() - - test_psf_sub_ADI_nocrop() - test_psf_sub_RDI_nocrop() - test_psf_sub_ADIRDI_nocrop() - test_psf_sub_withcrop() - test_psf_sub_badmode() + # test_pyklipdata_ADI() + # test_pyklipdata_RDI() + # test_pyklipdata_ADIRDI() + # test_pyklipdata_badtelescope() + # test_pyklipdata_badinstrument() + # test_pyklipdata_badcfamname() + # test_pyklipdata_notdataset() + # test_pyklipdata_badimgshapes() + # test_pyklipdata_multiplepixscales() + + # test_nanflags_2D() + # test_nanflags_3D() + # test_nanflags_mixed_dqvals() + # test_flagnans_2D() + # test_flagnans_3D() + # test_flagnans_flagval2() + + test_psf_sub_split_dataset() + + # test_psf_sub_ADI_nocrop() + # test_psf_sub_RDI_nocrop() + # test_psf_sub_ADIRDI_nocrop() + # test_psf_sub_withcrop() + # test_psf_sub_badmode() From 7a79d632b8775ec006fec395578f0a6e7f6080b4 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Tue, 4 Mar 2025 17:24:49 -0800 Subject: [PATCH 32/35] make psf_sub output a single 3d frame --- corgidrp/l3_to_l4.py | 101 +++++++++++++++++++++---------------------- tests/test_psfsub.py | 22 +++++----- 2 files changed, 60 insertions(+), 63 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 7cdb7b73..6cf6211f 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -271,59 +271,56 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, pyklip_data = fits.getdata(result_fpath) pyklip_hdr = fits.getheader(result_fpath) - frames = [] - for i,frame_data in enumerate(pyklip_data): - - # TODO: Handle DQ & errors correctly - err = np.zeros_like(frame_data) - dq = np.zeros_like(frame_data) # This will get filled out later - - # Clean up primary header - pri_hdr = pyklip_hdr.copy() - naxis1 = pri_hdr['NAXIS1'] - naxis2 = pri_hdr['NAXIS2'] - pri_hdr['NAXIS'] = 0 - - remove_kws = ['NAXIS1','NAXIS2','NAXIS3', - 'CRPIX1','CRPIX2'] - for kw in remove_kws: - del pri_hdr[kw] - - # Add observation info from input dataset - pri_hdr_keys = ['TELESCOP','INSTRUME'] - for kw in pri_hdr_keys: - pri_hdr[kw] = sci_dataset[0].pri_hdr[kw] - - # Make extension header - ext_hdr = fits.Header() - ext_hdr['NAXIS'] = 2 - ext_hdr['NAXIS1'] = naxis1 - ext_hdr['NAXIS2'] = naxis2 - - # Add info from input dataset - ext_hdr_keys = ['BUNIT','PLTSCALE','CFAMNAME', - 'DPAMNAME','FPAMNAME','FSAMNAME', - 'LSAMNAME','SPAMNAME'] - for kw in ext_hdr_keys: - ext_hdr[kw] = sci_dataset[0].ext_hdr[kw] - - # Add info from pyklip - ext_hdr['KLIP_ALG'] = mode - ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] - ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] - ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] - if "HISTORY" in sci_dataset[0].ext_hdr.keys(): - history_str = str(sci_dataset[0].ext_hdr['HISTORY']) - ext_hdr['HISTORY'] = ''.join(history_str.split('\n')) - - # Construct Image object and add to list - frame = data.Image(frame_data, - pri_hdr=pri_hdr, ext_hdr=ext_hdr, - err=err, dq=dq) - - frames.append(frame) + # TODO: Handle DQ & errors correctly + err = np.zeros([1,*pyklip_data.shape]) + dq = np.zeros_like(pyklip_data) # This will get filled out later + + # Clean up primary header + pri_hdr = pyklip_hdr.copy() + naxis1 = pri_hdr['NAXIS1'] + naxis2 = pri_hdr['NAXIS2'] + naxis3 = pri_hdr['NAXIS3'] + pri_hdr['NAXIS'] = 0 + + remove_kws = ['NAXIS1','NAXIS2','NAXIS3', + 'CRPIX1','CRPIX2'] + for kw in remove_kws: + del pri_hdr[kw] + + # Add observation info from input dataset + pri_hdr_keys = ['TELESCOP','INSTRUME'] + for kw in pri_hdr_keys: + pri_hdr[kw] = sci_dataset[0].pri_hdr[kw] + + # Make extension header + ext_hdr = fits.Header() + ext_hdr['NAXIS'] = 2 + ext_hdr['NAXIS1'] = naxis1 + ext_hdr['NAXIS2'] = naxis2 + ext_hdr['NAXIS3'] = naxis2 + + # Add info from input dataset + ext_hdr_keys = ['BUNIT','PLTSCALE','CFAMNAME', + 'DPAMNAME','FPAMNAME','FSAMNAME', + 'LSAMNAME','SPAMNAME'] + for kw in ext_hdr_keys: + ext_hdr[kw] = sci_dataset[0].ext_hdr[kw] + + # Add info from pyklip + ext_hdr['KLIP_ALG'] = mode + #ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] + ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] + ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] + if "HISTORY" in sci_dataset[0].ext_hdr.keys(): + history_str = str(sci_dataset[0].ext_hdr['HISTORY']) + ext_hdr['HISTORY'] = ''.join(history_str.split('\n')) + + # Construct Image object and add to list + frame = data.Image(pyklip_data, + pri_hdr=pri_hdr, ext_hdr=ext_hdr, + err=err, dq=dq) - dataset_out = data.Dataset(frames) + dataset_out = data.Dataset([frame]) # Flag nans in the dq array and then add nans to the error array dataset_out = flag_nans(dataset_out,flag_val=1) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index 0c568134..fc078520 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -449,7 +449,7 @@ def test_psf_sub_ADI_nocrop(): raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") # Check expected data shape - expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) + expected_data_shape = (1,len(numbasis),*mock_sci[0].data.shape) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") @@ -477,7 +477,7 @@ def test_psf_sub_RDI_nocrop(): for i,frame in enumerate(result): - mask = create_circular_mask(*frame.data.shape,r=iwa_pix,center=(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY'])) + mask = create_circular_mask(*frame.data.shape[-2:],r=iwa_pix,center=(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY'])) masked_frame = np.where(mask,np.nan,frame.data) # import matplotlib.pyplot as plt @@ -536,7 +536,7 @@ def test_psf_sub_RDI_nocrop(): raise Exception("RDI subtraction did not produce expected analytical result.") # Check expected data shape - expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) + expected_data_shape = (1,len(numbasis),*mock_sci[0].data.shape) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after RDI subtraction.") @@ -566,7 +566,7 @@ def test_psf_sub_ADIRDI_nocrop(): for i,frame in enumerate(result): - mask = create_circular_mask(*frame.data.shape,r=iwa_pix,center=(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY'])) + mask = create_circular_mask(*frame.data.shape[-2:],r=iwa_pix,center=(frame.ext_hdr['STARLOCX'],frame.ext_hdr['STARLOCY'])) masked_frame = np.where(mask,np.nan,frame.data) # import matplotlib.pyplot as plt @@ -606,7 +606,7 @@ def test_psf_sub_ADIRDI_nocrop(): raise Exception("ADI+RDI subtraction did not produce expected analytical result.") # Check expected data shape - expected_data_shape = (len(numbasis),*mock_sci[0].data.shape) + expected_data_shape = (1,len(numbasis),*mock_sci[0].data.shape) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI+RDI subtraction.") @@ -630,7 +630,7 @@ def test_psf_sub_withcrop(): raise Exception(f"PSF subtraction resulted in increased counts for frame {i}.") # Check expected data shape - expected_data_shape = (len(numbasis),60,60) + expected_data_shape = (1,len(numbasis),60,60) if not result.all_data.shape == expected_data_shape: raise Exception(f"Result data shape was {result.all_data.shape} instead of expected {expected_data_shape} after ADI subtraction.") @@ -671,10 +671,10 @@ def test_psf_sub_badmode(): # test_flagnans_3D() # test_flagnans_flagval2() - test_psf_sub_split_dataset() + # test_psf_sub_split_dataset() # test_psf_sub_ADI_nocrop() - # test_psf_sub_RDI_nocrop() - # test_psf_sub_ADIRDI_nocrop() - # test_psf_sub_withcrop() - # test_psf_sub_badmode() + test_psf_sub_RDI_nocrop() + test_psf_sub_ADIRDI_nocrop() + test_psf_sub_withcrop() + test_psf_sub_badmode() From 40d867a78d238f33197eeb1b064c52cddd48a1a1 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Wed, 5 Mar 2025 09:08:20 -0800 Subject: [PATCH 33/35] default to keeping all sci dataset header keywords and adding pyklip ones --- corgidrp/l3_to_l4.py | 56 +++++++++++++++++--------------------------- tests/test_psfsub.py | 28 +++++++++++----------- 2 files changed, 35 insertions(+), 49 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 6cf6211f..8c1a9fc4 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -271,57 +271,43 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, pyklip_data = fits.getdata(result_fpath) pyklip_hdr = fits.getheader(result_fpath) - # TODO: Handle DQ & errors correctly + # TODO: Handle errors correctly err = np.zeros([1,*pyklip_data.shape]) dq = np.zeros_like(pyklip_data) # This will get filled out later - # Clean up primary header - pri_hdr = pyklip_hdr.copy() - naxis1 = pri_hdr['NAXIS1'] - naxis2 = pri_hdr['NAXIS2'] - naxis3 = pri_hdr['NAXIS3'] - pri_hdr['NAXIS'] = 0 - - remove_kws = ['NAXIS1','NAXIS2','NAXIS3', - 'CRPIX1','CRPIX2'] - for kw in remove_kws: - del pri_hdr[kw] - - # Add observation info from input dataset - pri_hdr_keys = ['TELESCOP','INSTRUME'] - for kw in pri_hdr_keys: - pri_hdr[kw] = sci_dataset[0].pri_hdr[kw] + # Collapse sci_dataset headers + pri_hdr = sci_dataset[0].pri_hdr.copy() + ext_hdr = sci_dataset[0].ext_hdr.copy() - # Make extension header - ext_hdr = fits.Header() + # Add relevant info from the pyklip headers: + for kw, val, comment in pyklip_hdr._cards: + if not 'NAXIS' in kw: + pri_hdr.set(kw,val,comment) + + # Record KLIP algorithm explicitly + pri_hdr.set('KLIP_ALG',mode) + + # Update NAXIS keywords + pri_hdr.set('NAXIS',0) ext_hdr['NAXIS'] = 2 - ext_hdr['NAXIS1'] = naxis1 - ext_hdr['NAXIS2'] = naxis2 - ext_hdr['NAXIS3'] = naxis2 - - # Add info from input dataset - ext_hdr_keys = ['BUNIT','PLTSCALE','CFAMNAME', - 'DPAMNAME','FPAMNAME','FSAMNAME', - 'LSAMNAME','SPAMNAME'] - for kw in ext_hdr_keys: - ext_hdr[kw] = sci_dataset[0].ext_hdr[kw] - - # Add info from pyklip - ext_hdr['KLIP_ALG'] = mode - #ext_hdr['KLMODES'] = pyklip_hdr[f'KLMODE{i}'] + ext_hdr['NAXIS1'] = pyklip_hdr['NAXIS1'] + ext_hdr['NAXIS2'] = pyklip_hdr['NAXIS2'] + ext_hdr['NAXIS3'] = pyklip_hdr['NAXIS3'] + + # Add info from pyklip to ext_hdr ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] if "HISTORY" in sci_dataset[0].ext_hdr.keys(): history_str = str(sci_dataset[0].ext_hdr['HISTORY']) ext_hdr['HISTORY'] = ''.join(history_str.split('\n')) - # Construct Image object and add to list + # Construct Image and Dataset object frame = data.Image(pyklip_data, pri_hdr=pri_hdr, ext_hdr=ext_hdr, err=err, dq=dq) dataset_out = data.Dataset([frame]) - + # Flag nans in the dq array and then add nans to the error array dataset_out = flag_nans(dataset_out,flag_val=1) dataset_out = nan_flags(dataset_out,threshold=1) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index fc078520..d63b2c21 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -348,7 +348,7 @@ def test_psf_sub_split_dataset(): # Sci & Ref CASE='SCI+REF' - numbasis = [1] + numbasis = [1,4,8] rolls = [270+13,270-13,0,0] mock_sci,mock_ref = create_psfsub_dataset(2,2,rolls, st_amp=st_amp, @@ -379,8 +379,8 @@ def test_psf_sub_split_dataset(): # Should choose ADI for frame in result: - if not frame.ext_hdr['KLIP_ALG'] == 'ADI': - raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") + if not frame.pri_hdr['KLIP_ALG'] == 'ADI': + raise Exception(f"Chose {frame.pri_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") # pass only reference frames (should fail) CASE='REF_ONLY' @@ -445,8 +445,8 @@ def test_psf_sub_ADI_nocrop(): if np.nanmax(np.abs(frame.data - analytical_result)) > 1e-5: raise Exception(f"Absolute difference between ADI result and analytical result is greater then 1e-5.") - if not frame.ext_hdr['KLIP_ALG'] == 'ADI': - raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") + if not frame.pri_hdr['KLIP_ALG'] == 'ADI': + raise Exception(f"Chose {frame.pri_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") # Check expected data shape expected_data_shape = (1,len(numbasis),*mock_sci[0].data.shape) @@ -528,8 +528,8 @@ def test_psf_sub_RDI_nocrop(): raise Exception(f"RDI subtraction resulted in increased counts for frame {i}.") # The step should choose mode RDI based on having 1 roll and 1 reference. - if not frame.ext_hdr['KLIP_ALG'] == 'RDI': - raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'RDI' mode when provided 1 science image and 1 reference.") + if not frame.pri_hdr['KLIP_ALG'] == 'RDI': + raise Exception(f"Chose {frame.pri_hdr['KLIP_ALG']} instead of 'RDI' mode when provided 1 science image and 1 reference.") # Frame should match analytical result outside of the IWA (after correcting for the median offset) if not np.nanmax(np.abs((masked_frame - np.nanmedian(frame.data)) - analytical_result)) < 1e-5: @@ -597,8 +597,8 @@ def test_psf_sub_ADIRDI_nocrop(): raise Exception(f"ADI+RDI subtraction resulted in increased counts for frame {i}.") # Corgidrp should know to choose ADI+RDI mode - if not frame.ext_hdr['KLIP_ALG'] == 'ADI+RDI': - raise Exception(f"Chose {frame.ext_hdr['KLIP_ALG']} instead of 'ADI+RDI' mode when provided 2 science images and 1 reference.") + if not frame.pri_hdr['KLIP_ALG'] == 'ADI+RDI': + raise Exception(f"Chose {frame.pri_hdr['KLIP_ALG']} instead of 'ADI+RDI' mode when provided 2 science images and 1 reference.") # Frame should match analytical result outside of the IWA (after correcting for the median offset) for KL mode 1 if i==0: @@ -671,10 +671,10 @@ def test_psf_sub_badmode(): # test_flagnans_3D() # test_flagnans_flagval2() - # test_psf_sub_split_dataset() + test_psf_sub_split_dataset() # test_psf_sub_ADI_nocrop() - test_psf_sub_RDI_nocrop() - test_psf_sub_ADIRDI_nocrop() - test_psf_sub_withcrop() - test_psf_sub_badmode() + # test_psf_sub_RDI_nocrop() + # test_psf_sub_ADIRDI_nocrop() + # test_psf_sub_withcrop() + # test_psf_sub_badmode() From 590878b9dfbf51fff76385931152e3ca69ab25be Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Wed, 5 Mar 2025 09:23:08 -0800 Subject: [PATCH 34/35] more psfsub header updates --- corgidrp/l3_to_l4.py | 15 ++++------- tests/test_psfsub.py | 64 +++++++++++++++++++++----------------------- 2 files changed, 35 insertions(+), 44 deletions(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 8c1a9fc4..617a87d7 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -280,23 +280,18 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, ext_hdr = sci_dataset[0].ext_hdr.copy() # Add relevant info from the pyklip headers: + skip_kws = ['PSFCENTX','PSFCENTY'] for kw, val, comment in pyklip_hdr._cards: - if not 'NAXIS' in kw: - pri_hdr.set(kw,val,comment) + if not kw in skip_kws: + ext_hdr.set(kw,val,comment) # Record KLIP algorithm explicitly pri_hdr.set('KLIP_ALG',mode) - # Update NAXIS keywords - pri_hdr.set('NAXIS',0) - ext_hdr['NAXIS'] = 2 - ext_hdr['NAXIS1'] = pyklip_hdr['NAXIS1'] - ext_hdr['NAXIS2'] = pyklip_hdr['NAXIS2'] - ext_hdr['NAXIS3'] = pyklip_hdr['NAXIS3'] - # Add info from pyklip to ext_hdr ext_hdr['STARLOCX'] = pyklip_hdr['PSFCENTX'] ext_hdr['STARLOCY'] = pyklip_hdr['PSFCENTY'] + if "HISTORY" in sci_dataset[0].ext_hdr.keys(): history_str = str(sci_dataset[0].ext_hdr['HISTORY']) ext_hdr['HISTORY'] = ''.join(history_str.split('\n')) @@ -307,7 +302,7 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, err=err, dq=dq) dataset_out = data.Dataset([frame]) - + # Flag nans in the dq array and then add nans to the error array dataset_out = flag_nans(dataset_out,flag_val=1) dataset_out = nan_flags(dataset_out,threshold=1) diff --git a/tests/test_psfsub.py b/tests/test_psfsub.py index d63b2c21..b484aca1 100644 --- a/tests/test_psfsub.py +++ b/tests/test_psfsub.py @@ -341,13 +341,11 @@ def test_flagnans_flagval2(): ## PSF subtraction step tests def test_psf_sub_split_dataset(): - """Tests that psf subtraction step correctly identifies an ADI dataset (multiple rolls, no references), - that overall counts decrease, that the KLIP result matches the analytical expectation, and that the - output data shape is correct. + """Tests that psf subtraction step can correctly split an input dataset into + science and reference dataset, if they are not passed in separately. """ # Sci & Ref - CASE='SCI+REF' numbasis = [1,4,8] rolls = [270+13,270-13,0,0] mock_sci,mock_ref = create_psfsub_dataset(2,2,rolls, @@ -359,19 +357,18 @@ def test_psf_sub_split_dataset(): frames = [*mock_sci,*mock_ref] mock_sci_and_ref = Dataset(frames) + # Pass combined dataset to do_psf_subtraction result = do_psf_subtraction(mock_sci_and_ref, numbasis=numbasis, fileprefix='test_single_dataset', do_crop=False) - # Sci only - CASE='SCI_ONLY' - mock_sci,mock_ref = create_psfsub_dataset(2,2,rolls, - st_amp=st_amp, - noise_amp=noise_amp, - pl_contrast=pl_contrast) - - # pass only science frames + # Should choose ADI+RDI + for frame in result: + if not frame.pri_hdr['KLIP_ALG'] == 'ADI+RDI': + raise Exception(f"Chose {frame.pri_hdr['KLIP_ALG']} instead of 'ADI+RDI' mode when provided 2 science images and 2 references.") + + # Try passing only science frames result = do_psf_subtraction(mock_sci, numbasis=numbasis, fileprefix='test_sci_only_dataset', @@ -383,7 +380,6 @@ def test_psf_sub_split_dataset(): raise Exception(f"Chose {frame.pri_hdr['KLIP_ALG']} instead of 'ADI' mode when provided 2 science images and no references.") # pass only reference frames (should fail) - CASE='REF_ONLY' with pytest.raises(UserWarning): _ = do_psf_subtraction(mock_ref, numbasis=numbasis, @@ -654,27 +650,27 @@ def test_psf_sub_badmode(): do_crop=False) if __name__ == '__main__': - # test_pyklipdata_ADI() - # test_pyklipdata_RDI() - # test_pyklipdata_ADIRDI() - # test_pyklipdata_badtelescope() - # test_pyklipdata_badinstrument() - # test_pyklipdata_badcfamname() - # test_pyklipdata_notdataset() - # test_pyklipdata_badimgshapes() - # test_pyklipdata_multiplepixscales() - - # test_nanflags_2D() - # test_nanflags_3D() - # test_nanflags_mixed_dqvals() - # test_flagnans_2D() - # test_flagnans_3D() - # test_flagnans_flagval2() + test_pyklipdata_ADI() + test_pyklipdata_RDI() + test_pyklipdata_ADIRDI() + test_pyklipdata_badtelescope() + test_pyklipdata_badinstrument() + test_pyklipdata_badcfamname() + test_pyklipdata_notdataset() + test_pyklipdata_badimgshapes() + test_pyklipdata_multiplepixscales() + + test_nanflags_2D() + test_nanflags_3D() + test_nanflags_mixed_dqvals() + test_flagnans_2D() + test_flagnans_3D() + test_flagnans_flagval2() test_psf_sub_split_dataset() - # test_psf_sub_ADI_nocrop() - # test_psf_sub_RDI_nocrop() - # test_psf_sub_ADIRDI_nocrop() - # test_psf_sub_withcrop() - # test_psf_sub_badmode() + test_psf_sub_ADI_nocrop() + test_psf_sub_RDI_nocrop() + test_psf_sub_ADIRDI_nocrop() + test_psf_sub_withcrop() + test_psf_sub_badmode() From 01963754f622ea1a8e4845916c643e303ceee235 Mon Sep 17 00:00:00 2001 From: Ell Bogat Date: Wed, 5 Mar 2025 14:02:05 -0800 Subject: [PATCH 35/35] minor header update --- corgidrp/l3_to_l4.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corgidrp/l3_to_l4.py b/corgidrp/l3_to_l4.py index 617a87d7..7641df4d 100644 --- a/corgidrp/l3_to_l4.py +++ b/corgidrp/l3_to_l4.py @@ -280,7 +280,7 @@ def do_psf_subtraction(input_dataset, reference_star_dataset=None, ext_hdr = sci_dataset[0].ext_hdr.copy() # Add relevant info from the pyklip headers: - skip_kws = ['PSFCENTX','PSFCENTY'] + skip_kws = ['PSFCENTX','PSFCENTY','CREATOR','CTYPE3'] for kw, val, comment in pyklip_hdr._cards: if not kw in skip_kws: ext_hdr.set(kw,val,comment)