Skip to content

Currently available interfaces to spectrographs

ESPRESSO

Bases: ESO_PIPELINE

Interface to handle ESPRESSO observations (S2D and S1D).

With ESPRESSO data we are considering 3 sub-Instruments:

  • ESPRESSO18 - Before 2019-06-27
  • ESPRESSO19 - Before 2020-12-18
  • ESPRESSO21 - Until the ends of time (hopefully)

User parameters:

================================ ================ ================ ================ ================ Parameter name Mandatory Default Value Valid Values Comment ================================ ================ ================ ================ ================ ================================ ================ ================ ================ ================

.. note:: Also check the User parameters of the parent classes for further customization options of SBART

Source code in src/ASTRA/Instruments/ESPRESSO.py
class ESPRESSO(ESO_PIPELINE):
    """Interface to handle ESPRESSO observations (S2D and S1D).

    With ESPRESSO data we are considering 3 sub-Instruments:

    * ESPRESSO18 - Before  2019-06-27
    * ESPRESSO19 - Before  2020-12-18
    * ESPRESSO21 - Until the ends of time (hopefully)


    **User parameters:**

    ================================ ================ ================ ================ ================
    Parameter name                      Mandatory      Default Value    Valid Values    Comment
    ================================ ================ ================ ================ ================
    ================================ ================ ================ ================ ================

    .. note::
        Also check the **User parameters** of the parent classes for further customization options of SBART

    """

    _default_params = ESO_PIPELINE._default_params

    sub_instruments = {
        "ESPRESSO18": datetime.datetime.strptime("2019-06-27", r"%Y-%m-%d"),
        "ESPRESSO19": datetime.datetime.max,
        # "ESPRESSO19": datetime.datetime.strptime("2020-12-18", r"%Y-%m-%d"),
    }
    _name = "ESPRESSO"

    order_intervals: dict[DETECTOR_DEFINITION, slice] = {
        DETECTOR_DEFINITION.WHITE_LIGHT: list(range(170)),
        DETECTOR_DEFINITION.RED_DET: list(range(90, 170)),
        DETECTOR_DEFINITION.BLUE_DET: list(range(0, 90)),
    }

    def __init__(
        self,
        file_path,
        user_configs: Optional[Dict[str, Any]] = None,
        reject_subInstruments: Optional[Iterable[str]] = None,
        frameID: Optional[int] = None,
        quiet_user_params: bool = True,
    ):
        """ESPRESSO interface.

        Parameters
        ----------
        file_path
            Path to the S2D (or S1D) file.
        user_configs
            Dictionary whose keys are the configurable options of ESPRESSO (check above)
        reject_subInstruments
            Iterable of subInstruments to fully reject
        frameID
            ID for this observation. Only used for organization purposes by :class:`~SBART.data_objects.DataClass`

        """
        # Wavelength coverage

        coverage = (350, 900)

        super().__init__(
            inst_name="ESPRESSO",
            array_size={"S2D": (170, 9111), "S1D": (1, 443262)},
            file_path=file_path,
            frameID=frameID,
            KW_identifier="ESO",
            user_configs=user_configs,
            reject_subInstruments=reject_subInstruments,
            quiet_user_params=quiet_user_params,
        )

        self.instrument_properties["wavelength_coverage"] = coverage
        self.instrument_properties["resolution"] = 140_000
        self.instrument_properties["EarthLocation"] = EarthLocation.of_site("Cerro Paranal")
        self.instrument_properties["is_drift_corrected"] = True

        # https://www.eso.org/sci/facilities/paranal/astroclimate/site.html
        self.instrument_properties["site_pressure"] = 750

    def load_telemetry_info(self, header):
        # Find the UT number and load the airmass
        for i in range(1, 5):
            try:
                self.observation_info["airmass"] = header[f"HIERARCH ESO TEL{i} AIRM START"]
                self.UT_number = i
                break
            except KeyError as e:
                if i == 4:
                    msg = "\tCannot find ESO TELx AIRM START key"
                    raise KeyError(msg) from e

        # Environmental KWs for telfit (also needs airmassm previously loaded)
        ambi_KWs = {
            "relative_humidity": "AMBI RHUM",
            "ambient_temperature": "AMBI TEMP",
            "seeing": "AMBI FWHM START",
        }

        for name, endKW in ambi_KWs.items():
            self.observation_info[name] = float(header[f"HIERARCH ESO TEL{self.UT_number} {endKW}"])
            if "temperature" in name:  # store temperature in KELVIN for TELFIT
                self.observation_info[name] = convert_temperature(
                    self.observation_info[name],
                    old_scale="Celsius",
                    new_scale="Kelvin",
                )

        self.observation_info["DET_BINX"] = header["HIERARCH ESO DET BINX"]
        self.observation_info["DET_BINY"] = header["HIERARCH ESO DET BINY"]

    def check_header_QC_ESO_DRS(self, header):
        nonfatal_QC_flags = {
            "HIERARCH ESO INS{} ADC{} RA": 0,  # related with ADC2 problem
            "HIERARCH ESO INS{} ADC{} dec": 0,  # related with ADC2 problem
            "HIERARCH ESO INS{} ADC{} SENS1": 0,  # related with ADC2 problem
            "HIERARCH ESO INS{} ADC{} TEMP": 0,  # related with ADC2 problem
        }
        found_ADC_issue = False
        for flag, bad_value in nonfatal_QC_flags.items():
            found_UT = False
            for UT_KW in ["", "2", "3", "4"]:
                try:
                    for ADC in [1, 2]:
                        ADC_KW = flag.format(UT_KW, ADC)
                        if header[ADC_KW] == bad_value:
                            msg = f"QC flag {ADC_KW} has a value of {bad_value}"
                            logger.warning(msg)
                            self._status.store_warning(KW_WARNING(msg))
                            found_ADC_issue = True
                        found_UT = True
                except:
                    pass
            if not found_UT:
                logger.critical(f"Did not find the entry for the following UT related metric: {flag}")

        if found_ADC_issue:
            self._status.store_warning(KW_WARNING("ADC2 issues found"))

        super().check_header_QC_ESO_DRS(header)

    def build_mask(self, bypass_QualCheck: bool = False) -> None:
        super().build_mask(bypass_QualCheck=bypass_QualCheck, assess_bad_orders=False)

        if self.spectral_format == "S2D":
            # the first two orders of the RED CCD have a large amount of noise in the beginning so we remove a
            # portion from the start of those two orders Now, what is going on: we want to find the indexes,
            # from order 90 and 91 that are below the 5230 \AA
            inds = np.where(self.wavelengths[90:92, :] <= 5230)
            # numpy where returns the indexes assuming the zero to be 90 and the one to be 91. Remember that we
            # sliced the array to only remove from those two orders
            inds_1 = np.where(inds[0], 91, 90)
            # rebuild the 'numpy where' output, to pass to the mask as the proper output
            inds = (inds_1, inds[1])
            self.spectral_mask.add_indexes_to_mask(inds, ERROR_THRESHOLD)

        self.assess_bad_orders()

    def trigger_data_storage(self, *args, **kwargs):
        super().trigger_data_storage(*args, **kwargs)

HARPS

Bases: ESO_PIPELINE

Interface to handle HARPS data; S1D not supported.

This class also defines 2 sub-Instruments:

  • HARPSpre - Before 2015-05-29
  • HARPSpost - Until the ends of time (hopefully)

The steps to load the S2D data are described in the HARPS DRS manual <https://www.eso.org/sci/facilities/lasilla/instruments/harps/doc/DRS.pdf>_. The summary is:

- Construct the wavelength solution & correct from BERV
- Load instrumental drift
- Construct flux noises:

    - Table 10 of `the user manual <https://www.eso.org/sci/facilities/lasilla/instruments/harps/doc/manual/HARPS-UserManual2.4.pdf>`_ gives max RON of 7.07 for red detector
    - Noise = sqrt(obj + sky + n*dark*expTime + nBinY*ron^2)

User parameters:

Currently there are no HARPS-specific parameters

Note: Check the User parameters of the parent classes for further customization options of SBART

Source code in src/ASTRA/Instruments/HARPS.py
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
class HARPS(ESO_PIPELINE):
    """Interface to handle HARPS data; S1D **not** supported.

    This class also defines 2 sub-Instruments:

    * HARPSpre - Before  2015-05-29
    * HARPSpost - Until the ends of time (hopefully)

    The steps to load the S2D data are described in the HARPS `DRS manual <https://www.eso.org/sci/facilities/lasilla/instruments/harps/doc/DRS.pdf>`_. The summary is:

        - Construct the wavelength solution & correct from BERV
        - Load instrumental drift
        - Construct flux noises:

            - Table 10 of `the user manual <https://www.eso.org/sci/facilities/lasilla/instruments/harps/doc/manual/HARPS-UserManual2.4.pdf>`_ gives max RON of 7.07 for red detector
            - Noise = sqrt(obj + sky + n*dark*expTime + nBinY*ron^2)

    **User parameters:**

     Currently there are no HARPS-specific parameters

    *Note:* Check the **User parameters** of the parent classes for further customization options of SBART

    """

    sub_instruments = {
        "HARPS03": datetime.datetime.strptime("2015-05-29", r"%Y-%m-%d"),
        "HARPS15": datetime.datetime.max,
    }

    _name = "HARPS"
    _default_params = ESO_PIPELINE._default_params

    order_intervals: dict[DETECTOR_DEFINITION, slice] = {
        DETECTOR_DEFINITION.WHITE_LIGHT: list(range(0, 71)),
        DETECTOR_DEFINITION.RED_DET: list(range(47, 71)),
        DETECTOR_DEFINITION.BLUE_DET: list(range(0, 47)),
    }

    def __init__(
        self,
        file_path,
        user_configs: Optional[Dict[str, Any]] = None,
        reject_subInstruments=None,
        frameID=None,
        quiet_user_params: bool = True,
    ):
        """HARPS constructor.

        Parameters
        ----------
        file_path
            Path to the S2D (or S1D) file.
        user_configs
            Dictionary whose keys are the configurable options of ESPRESSO (check above)
        reject_subInstruments
            Iterable of subInstruments to fully reject
        frameID
            ID for this observation. Only used for organization purposes by :class:`~SBART.data_objects.DataClass`

        """
        logger.info(f"Creating frame from: {file_path}")

        if "use_old_pipeline" not in user_configs:
            # We need to do this, since we need to override the array size and header keywords
            user_configs["use_old_pipeline"] = self._default_params["use_old_pipeline"].default_value

        coverage = [350, 700]
        # Note: 46 blue orders and 26 red orders. From Table 2.2 of:
        # https://www.eso.org/sci/facilities/lasilla/instruments/harps/doc/manual/HARPS-UserManual2.4.pdf
        if user_configs["use_old_pipeline"]:
            mat_size = (72, 4096)
            KW_map = {
                "OBJECT": "OBJECT",
                "BJD": "HIERARCH ESO DRS BJD",
                "MJD": "MJD-OBS",
                "ISO-DATE": "DATE-OBS",
                "DRS-VERSION": "HIERARCH ESO DRS VERSION",
                "RA": "RA",
                "DEC": "DEC",
                "MD5-CHECK": "DATASUM",
                "SPEC_TYPE": None,
                "DRS_CCF_MASK": None,
                "DRS_FLUX_CORRECTION_TEMPLATE": None,
            }
        else:
            mat_size = (71, 4096)
            KW_map = {}

        if user_configs["use_old_pipeline"]:
            file_path, self.ccf_path, self.BIS_file, search_status = self.find_files(file_path)

        super().__init__(
            inst_name="HARPS",
            array_size={"S2D": mat_size},
            file_path=file_path,
            KW_identifier="ESO",
            frameID=frameID,
            user_configs=user_configs,
            reject_subInstruments=reject_subInstruments,
            quiet_user_params=quiet_user_params,
            override_KW_map=KW_map,
            override_indicators=("CONTRAST", "FWHM"),
        )

        if user_configs["use_old_pipeline"] and not search_status.is_good_flag:
            self.add_to_status(search_status)

        self.instrument_properties["wavelength_coverage"] = coverage

        if user_configs["use_old_pipeline"]:
            self.instrument_properties["is_drift_corrected"] = False
            self.is_BERV_corrected = False

        self.instrument_properties["resolution"] = 115_000
        self.instrument_properties["EarthLocation"] = EarthLocation.of_site("La Silla Observatory")
        # ? same as for Paranal?
        # https://www.eso.org/sci/facilities/paranal/astroclimate/site.html
        self.instrument_properties["site_pressure"] = 750
        self.is_blaze_corrected = False

    def load_telemetry_info(self, header):
        """Loads (at least) the following keywords:

        - relative humidity
        - ambient temperature, in Celsius
        - airmass
        - Detector

        Parameters
        ----------
        header

        Returns
        -------

        """
        ambi_KWs = {
            "relative_humidity": "RHUM",
            "ambient_temperature": "TEMP",
        }

        for name, endKW in ambi_KWs.items():
            self.observation_info[name] = header[f"HIERARCH {self.KW_identifier} TEL AMBI {endKW}"]
            if "temperature" in name:  # store temperature in KELVIN for TELFIT
                self.observation_info[name] = convert_temperature(
                    self.observation_info[name],
                    old_scale="Celsius",
                    new_scale="Kelvin",
                )

        if self.observation_info["relative_humidity"] == 255:
            logger.warning(f"{self.name} has an invalid value in the humidity sensor...")
            self.observation_info["relative_humidity"] = np.nan

        self.observation_info["airmass"] = header["HIERARCH ESO TEL AIRM START"]

    def find_files(self, file_name):
        """Find the CCF and S2D files and BIS files, which should be stored inside the same folder"""
        logger.debug("Searching for the ccf and e2ds files")

        search_status = MISSING_DATA("Missing the ccf file")
        ccf_path = None
        bis_path = None

        if os.path.isdir(file_name):
            logger.debug("Received a folder, searching inside for necessary files")
            # search for e2ds file
            folder_name = file_name

            e2ds_files = glob.glob(os.path.join(folder_name, "*e2ds_A.fits"), recursive=True)
            ccf_files = glob.glob(os.path.join(folder_name, "*ccf*A.fits"), recursive=True)
            bis_files = glob.glob(os.path.join(folder_name, "*bis*A.fits"), recursive=True)

            for name, elems in [
                ("e2ds_A", e2ds_files),
                ("ccf", ccf_files),
                ("bis", bis_files),
            ]:
                if len(elems) > 1:
                    msg = f"HARPS data only received folder name and it has more than 1 {name} file in it"
                    raise custom_exceptions.InvalidConfiguration(msg)

                if len(elems) < 1:
                    msg = f"HARPS data only received folder name and it has no {name} file in it"
                    if name != "bis":
                        # The BIS file is not critical for the run
                        raise custom_exceptions.InvalidConfiguration(msg)
                    logger.critical(msg)

            e2ds_path = e2ds_files[0]
            ccf_path = ccf_files[0]
            bis_path = bis_files[0]
            search_status = SUCCESS("Found all input files")
        else:
            logger.debug("Received path of E2DS file; searching for CCF with matching name")
            folder_name = os.path.dirname(file_name)
            e2ds_path = file_name
            file_start, *_ = os.path.basename(file_name).split("_")

            found_CCF = False
            found_BIS = False
            bis_files = glob.glob(os.path.join(folder_name, "*bis*A.fits"), recursive=True)
            ccf_files = Path(folder_name).glob("*ccf*A.fits")

            for file in ccf_files:
                if file_start in file.as_posix():
                    ccf_path = file
                    found_CCF = True
            for file in bis_files:
                if file_start in file:
                    bis_path = file
                    found_BIS = True
            if found_CCF:
                logger.info(f"Found CCF file: {ccf_path}")
                search_status = SUCCESS("Found CCF file")
            else:
                logger.critical("Was not able to find CCF file. Marking frame as invalid")
                ccf_path = ""

            if not found_BIS:
                bis_path = None

        return e2ds_path, ccf_path, bis_path, search_status

    def build_HARPS_wavelengths(self, hdr):
        """Compute the wavelength solution to this given spectra (EQ 4.1 of DRS manual)
        Convert from air wavelenbgths to vacuum
        """
        # degree of the polynomial
        d = hdr["HIERARCH ESO DRS CAL TH DEG LL"]
        # number of orders
        omax = hdr.get("HIERARCH ESO DRS CAL LOC NBO", self.array_size[0])
        xmax = self.array_size[1]

        # matrix X:
        #
        # axis 0: the entry corresponding to each coefficient
        # axis 1: each pixel number

        x = np.empty((d + 1, xmax), "int64")
        x[0].fill(1)  # x[0,*] = x^0 = 1,1,1,1,1,...
        x[1] = np.arange(xmax)

        for i in range(1, d):
            x[i + 1] = x[i] * x[1]

        # matrix A:
        #
        # axis 0: the different orders
        # axis 1: all coefficients for the given order

        A = np.reshape(
            [hdr["HIERARCH ESO DRS CAL TH COEFF LL" + str(i)] for i in range(omax * (d + 1))],
            (omax, d + 1),
        )  # slow 30 ms

        # the wavelengths for each order are a simple dot product between the coefficients and pixel-wise data (X)
        wavelengths = np.dot(A, x)

        vacuum_wavelengths = airtovac(wavelengths)
        return vacuum_wavelengths

    def _load_old_DRS_KWs(self, header):
        if not self._internal_configs["use_old_pipeline"]:
            raise custom_exceptions.InvalidConfiguration("Can't load data from old pipeline without the config")

        self.observation_info["MAX_BERV"] = header["HIERARCH ESO DRS BERVMX"] * kilometer_second
        self.observation_info["BERV"] = header["HIERARCH ESO DRS BERV"] * kilometer_second

        # Environmental KWs for telfit (also needs airmassm previously loaded)
        ambi_KWs = {
            "relative_humidity": "AMBI RHUM",
            "ambient_temperature": "AMBI TEMP",
        }

        for name, endKW in ambi_KWs.items():
            self.observation_info[name] = header[f"HIERARCH ESO TEL {endKW}"]
            if "temperature" in name:  # store temperature in KELVIN for TELFIT
                self.observation_info[name] = convert_temperature(
                    self.observation_info[name],
                    old_scale="Celsius",
                    new_scale="Kelvin",
                )

        for order in range(self.N_orders):
            self.observation_info["orderwise_SNRs"].append(header[f"HIERARCH ESO DRS SPE EXT SN{order}"])

        self.observation_info["airmass"] = header["HIERARCH ESO TEL AIRM START"]

        bad_drift = False
        try:
            flag = "HIERARCH ESO DRS DRIFT QC"
            if header[flag].strip() != "PASSED":
                bad_drift = True
                msg = f"QC flag {flag} meets the bad value"
                logger.warning(msg)
                self._status.store_warning(KW_WARNING(msg))
            else:
                # self.logger.info("DRIFT QC has passed")
                drift = header["HIERARCH ESO DRS DRIFT RV USED"] * meter_second
                drift_err = header["HIERARCH ESO DRS DRIFT NOISE"] * meter_second
        except Exception:
            bad_drift = True
            logger.warning("DRIFT KW does not exist")

        if bad_drift:
            logger.warning("Due to previous drift-related problems, setting it to zero [m/s]")
            drift = 0 * meter_second
            drift_err = 0 * meter_second

        self.observation_info["drift"] = drift
        self.observation_info["drift_ERR"] = drift_err
        self.load_ccf_data()

    def load_ccf_data(self) -> None:
        """Load the necessarfy CCF data from the file!"""
        logger.debug("Loading data from the ccf file")
        header = fits.getheader(self.ccf_path)

        self.observation_info["DRS_RV"] = header["HIERARCH ESO DRS CCF RV"] * kilometer_second
        self.observation_info["SPEC_TYPE"] = header["HIERARCH ESO DRS CCF MASK"]

        RV_err = np.sqrt(
            header["HIERARCH ESO DRS CAL TH ERROR"] ** 2
            +
            # hdulist[0].header['HIERARCH ESO DRS DRIFT NOISE']**2   +
            (1000 * header["HIERARCH ESO DRS CCF NOISE"]) ** 2,
        )
        self.observation_info["DRS_RV_ERR"] = RV_err * meter_second

        for key in self.available_indicators:
            full_key = "HIERARCH ESO DRS CCF " + key
            self.observation_info[key] = header[full_key]

        # We are missing error in CONTRAST!
        self.observation_info["FWHM_ERR"] = convert_data(
            2.35 * RV_err * meter_second,
            new_units=kilometer_second,
            as_value=True,
        )
        self.observation_info["BIS SPAN_ERR"] = convert_data(
            np.sqrt(2) * RV_err * meter_second,
            new_units=kilometer_second,
            as_value=True,
        )

        if self.BIS_file is not None:
            head = fits.getheader(self.BIS_file)
            self.observation_info["BIS SPAN"] = head["HIERARCH ESO DRS BIS SPAN"]

    def load_S1D_data(self) -> Mask:
        raise NotImplementedError

    def check_header_QC_old_DRS(self, header):
        logger.info("Currently missing QC checks for the old DRS")

    def load_old_DRS_S2D(self):
        """Loads the spectra

        Returns
        -------

        """
        super().load_S2D_data()

        with fits.open(self.file_path) as hdulist:
            # Compute the wavelength solution + BERV correction
            wave_from_file = self.build_HARPS_wavelengths(hdulist[0].header)

            sci_data = hdulist[0].data  # spetra from all orders

            # photon noise + estimate of max value for the rest
            # from ETC calculator the readout noise should be the largest contribution
            # assuming that it is of ~7e- (equal to manual) it should have a maximum contribution
            # of 200
            flux_errors = np.sqrt(250 + np.abs(sci_data, dtype=float))

            # Validate for overflows and missing data
            quality_data = np.zeros(sci_data.shape)
            quality_data[np.where(np.isnan(sci_data))] = NAN_DATA.code
            quality_data[np.where(sci_data > 300000)] = SATURATION.code
            quality_data[np.where(sci_data < -3 * flux_errors)] = ERROR_THRESHOLD.code

        self.spectra = sci_data.astype(np.float64)
        self.wavelengths = wave_from_file
        self.qual_data = quality_data
        self.uncertainties = flux_errors.astype(np.float64)

        self.build_mask(bypass_QualCheck=False)
        return 1

    def close_arrays(self):
        super().close_arrays()
        if self._internal_configs["use_old_pipeline"]:
            self.is_BERV_corrected = False

build_HARPS_wavelengths(hdr)

Compute the wavelength solution to this given spectra (EQ 4.1 of DRS manual) Convert from air wavelenbgths to vacuum

Source code in src/ASTRA/Instruments/HARPS.py
def build_HARPS_wavelengths(self, hdr):
    """Compute the wavelength solution to this given spectra (EQ 4.1 of DRS manual)
    Convert from air wavelenbgths to vacuum
    """
    # degree of the polynomial
    d = hdr["HIERARCH ESO DRS CAL TH DEG LL"]
    # number of orders
    omax = hdr.get("HIERARCH ESO DRS CAL LOC NBO", self.array_size[0])
    xmax = self.array_size[1]

    # matrix X:
    #
    # axis 0: the entry corresponding to each coefficient
    # axis 1: each pixel number

    x = np.empty((d + 1, xmax), "int64")
    x[0].fill(1)  # x[0,*] = x^0 = 1,1,1,1,1,...
    x[1] = np.arange(xmax)

    for i in range(1, d):
        x[i + 1] = x[i] * x[1]

    # matrix A:
    #
    # axis 0: the different orders
    # axis 1: all coefficients for the given order

    A = np.reshape(
        [hdr["HIERARCH ESO DRS CAL TH COEFF LL" + str(i)] for i in range(omax * (d + 1))],
        (omax, d + 1),
    )  # slow 30 ms

    # the wavelengths for each order are a simple dot product between the coefficients and pixel-wise data (X)
    wavelengths = np.dot(A, x)

    vacuum_wavelengths = airtovac(wavelengths)
    return vacuum_wavelengths

find_files(file_name)

Find the CCF and S2D files and BIS files, which should be stored inside the same folder

Source code in src/ASTRA/Instruments/HARPS.py
def find_files(self, file_name):
    """Find the CCF and S2D files and BIS files, which should be stored inside the same folder"""
    logger.debug("Searching for the ccf and e2ds files")

    search_status = MISSING_DATA("Missing the ccf file")
    ccf_path = None
    bis_path = None

    if os.path.isdir(file_name):
        logger.debug("Received a folder, searching inside for necessary files")
        # search for e2ds file
        folder_name = file_name

        e2ds_files = glob.glob(os.path.join(folder_name, "*e2ds_A.fits"), recursive=True)
        ccf_files = glob.glob(os.path.join(folder_name, "*ccf*A.fits"), recursive=True)
        bis_files = glob.glob(os.path.join(folder_name, "*bis*A.fits"), recursive=True)

        for name, elems in [
            ("e2ds_A", e2ds_files),
            ("ccf", ccf_files),
            ("bis", bis_files),
        ]:
            if len(elems) > 1:
                msg = f"HARPS data only received folder name and it has more than 1 {name} file in it"
                raise custom_exceptions.InvalidConfiguration(msg)

            if len(elems) < 1:
                msg = f"HARPS data only received folder name and it has no {name} file in it"
                if name != "bis":
                    # The BIS file is not critical for the run
                    raise custom_exceptions.InvalidConfiguration(msg)
                logger.critical(msg)

        e2ds_path = e2ds_files[0]
        ccf_path = ccf_files[0]
        bis_path = bis_files[0]
        search_status = SUCCESS("Found all input files")
    else:
        logger.debug("Received path of E2DS file; searching for CCF with matching name")
        folder_name = os.path.dirname(file_name)
        e2ds_path = file_name
        file_start, *_ = os.path.basename(file_name).split("_")

        found_CCF = False
        found_BIS = False
        bis_files = glob.glob(os.path.join(folder_name, "*bis*A.fits"), recursive=True)
        ccf_files = Path(folder_name).glob("*ccf*A.fits")

        for file in ccf_files:
            if file_start in file.as_posix():
                ccf_path = file
                found_CCF = True
        for file in bis_files:
            if file_start in file:
                bis_path = file
                found_BIS = True
        if found_CCF:
            logger.info(f"Found CCF file: {ccf_path}")
            search_status = SUCCESS("Found CCF file")
        else:
            logger.critical("Was not able to find CCF file. Marking frame as invalid")
            ccf_path = ""

        if not found_BIS:
            bis_path = None

    return e2ds_path, ccf_path, bis_path, search_status

load_ccf_data()

Load the necessarfy CCF data from the file!

Source code in src/ASTRA/Instruments/HARPS.py
def load_ccf_data(self) -> None:
    """Load the necessarfy CCF data from the file!"""
    logger.debug("Loading data from the ccf file")
    header = fits.getheader(self.ccf_path)

    self.observation_info["DRS_RV"] = header["HIERARCH ESO DRS CCF RV"] * kilometer_second
    self.observation_info["SPEC_TYPE"] = header["HIERARCH ESO DRS CCF MASK"]

    RV_err = np.sqrt(
        header["HIERARCH ESO DRS CAL TH ERROR"] ** 2
        +
        # hdulist[0].header['HIERARCH ESO DRS DRIFT NOISE']**2   +
        (1000 * header["HIERARCH ESO DRS CCF NOISE"]) ** 2,
    )
    self.observation_info["DRS_RV_ERR"] = RV_err * meter_second

    for key in self.available_indicators:
        full_key = "HIERARCH ESO DRS CCF " + key
        self.observation_info[key] = header[full_key]

    # We are missing error in CONTRAST!
    self.observation_info["FWHM_ERR"] = convert_data(
        2.35 * RV_err * meter_second,
        new_units=kilometer_second,
        as_value=True,
    )
    self.observation_info["BIS SPAN_ERR"] = convert_data(
        np.sqrt(2) * RV_err * meter_second,
        new_units=kilometer_second,
        as_value=True,
    )

    if self.BIS_file is not None:
        head = fits.getheader(self.BIS_file)
        self.observation_info["BIS SPAN"] = head["HIERARCH ESO DRS BIS SPAN"]

load_old_DRS_S2D()

Loads the spectra

Returns
Source code in src/ASTRA/Instruments/HARPS.py
def load_old_DRS_S2D(self):
    """Loads the spectra

    Returns
    -------

    """
    super().load_S2D_data()

    with fits.open(self.file_path) as hdulist:
        # Compute the wavelength solution + BERV correction
        wave_from_file = self.build_HARPS_wavelengths(hdulist[0].header)

        sci_data = hdulist[0].data  # spetra from all orders

        # photon noise + estimate of max value for the rest
        # from ETC calculator the readout noise should be the largest contribution
        # assuming that it is of ~7e- (equal to manual) it should have a maximum contribution
        # of 200
        flux_errors = np.sqrt(250 + np.abs(sci_data, dtype=float))

        # Validate for overflows and missing data
        quality_data = np.zeros(sci_data.shape)
        quality_data[np.where(np.isnan(sci_data))] = NAN_DATA.code
        quality_data[np.where(sci_data > 300000)] = SATURATION.code
        quality_data[np.where(sci_data < -3 * flux_errors)] = ERROR_THRESHOLD.code

    self.spectra = sci_data.astype(np.float64)
    self.wavelengths = wave_from_file
    self.qual_data = quality_data
    self.uncertainties = flux_errors.astype(np.float64)

    self.build_mask(bypass_QualCheck=False)
    return 1

load_telemetry_info(header)

Loads (at least) the following keywords:

  • relative humidity
  • ambient temperature, in Celsius
  • airmass
  • Detector
Parameters

header

Returns
Source code in src/ASTRA/Instruments/HARPS.py
def load_telemetry_info(self, header):
    """Loads (at least) the following keywords:

    - relative humidity
    - ambient temperature, in Celsius
    - airmass
    - Detector

    Parameters
    ----------
    header

    Returns
    -------

    """
    ambi_KWs = {
        "relative_humidity": "RHUM",
        "ambient_temperature": "TEMP",
    }

    for name, endKW in ambi_KWs.items():
        self.observation_info[name] = header[f"HIERARCH {self.KW_identifier} TEL AMBI {endKW}"]
        if "temperature" in name:  # store temperature in KELVIN for TELFIT
            self.observation_info[name] = convert_temperature(
                self.observation_info[name],
                old_scale="Celsius",
                new_scale="Kelvin",
            )

    if self.observation_info["relative_humidity"] == 255:
        logger.warning(f"{self.name} has an invalid value in the humidity sensor...")
        self.observation_info["relative_humidity"] = np.nan

    self.observation_info["airmass"] = header["HIERARCH ESO TEL AIRM START"]

CARMENES

Bases: Frame

Interface to handle KOBE-CARMENES data (optical arm only).

Using this class implies passing an input file (shaq_output_folder), from where we will load the following information:

  • Column 0 -> BJD
  • Column 5 -> CCF RV in km/s
  • Column 3 -> CCF RV Error in km/s
  • Column 10 -> BERV in km/s (used if we configure the override_BERV option)
  • Column 7 -> Instrumental drift in m/s
  • Column 8 -> Error in Instrumental drift in m/s
  • Column 9 -> QC flag on instrumental drift

Any observation that can't be cross-matched to the BJD will be marked as invalid

Source code in src/ASTRA/Instruments/CARMENES.py
class CARMENES(Frame):
    """Interface to handle KOBE-CARMENES data (optical arm only).

    Using this class implies passing an input file (shaq_output_folder), from where we will load
    the following information:

    - Column 0 -> BJD
    - Column 5 -> CCF RV in km/s
    - Column 3 -> CCF RV Error in km/s
    - Column 10 -> BERV in km/s (used if we configure the override_BERV option)
    - Column 7 -> Instrumental drift in m/s
    - Column 8 -> Error in Instrumental drift in m/s
    - Column 9 -> QC flag on instrumental drift

    Any observation that can't be cross-matched to the BJD will be marked as invalid
    """

    _default_params = Frame._default_params + DefaultValues(
        shaq_output_folder=UserParam(None, constraint=PathValue, mandatory=True),
        override_BERV=UserParam(True, constraint=BooleanValue, mandatory=False),
        is_KOBE_data=UserParam(True, constraint=BooleanValue, mandatory=False),
        max_hours_to_calibration=UserParam(
            100,
            constraint=NumericValue,
            mandatory=False,
            description="Maximum number of hours between observation and calibration. If exceeded, frame is invalid",
        ),
        sigma_clip_flux=UserParam(
            -1,
            constraint=NumericValue,  # -1 means that it is disabled,
            mandatory=False,
            description="If positive, then sigma clip the flux",
        ),
    )
    _default_params.update(
        "IS_SA_CORRECTED", UserParam(True, constraint=BooleanValue, mandatory=False)
    )

    sub_instruments = {
        "CARMENES": datetime.datetime.max,
    }
    _name = "CARMENES"

    KW_map = {
        "OBJECT": "OBJECT",
        "BJD": "HIERARCH CARACAL BJD",
        "MJD": "MJD-OBS",
        "ISO-DATE": "DATE-OBS",  # TODO: to check this KW name
        "DRS-VERSION": "HIERARCH CARACAL FOX VERSION",
        "RA": "RA",
        "DEC": "DEC",
    }

    def __init__(
        self,
        file_path,
        user_configs: Optional[Dict[str, Any]] = None,
        reject_subInstruments=None,
        frameID=None,
        quiet_user_params: bool = True,
    ):
        """

        Parameters
        ----------
        file_path
            Path to the S2D (or S1D) file.
        user_configs
            Dictionary whose keys are the configurable options of ESPRESSO (check above)
        reject_subInstruments
            Iterable of subInstruments to fully reject
        frameID
            ID for this observation. Only used for organization purposes by :class:`~SBART.data_objects.DataClass`
        """

        self._blaze_corrected = True

        super().__init__(
            inst_name=self._name,
            array_size={"S2D": [61, 4096]},
            file_path=file_path,
            frameID=frameID,
            KW_map=self.KW_map,
            available_indicators=("FWHM", "BIS SPAN"),
            user_configs=user_configs,
            reject_subInstruments=reject_subInstruments,
            need_external_data_load=True,
            quiet_user_params=quiet_user_params,
        )
        coverage = [500, 1750]
        self.instrument_properties["wavelength_coverage"] = coverage
        self.instrument_properties["is_drift_corrected"] = False

        # TODO: ensure that there are no problem when using the NIR arm of CARMENES!!!!!
        self.instrument_properties["resolution"] = 94600

        # lat/lon from: https://geohack.toolforge.org/geohack.php?pagename=Calar_Alto_Observatory&params=37_13_25_N_2_32_46_W_type:landmark_region:ES
        # height from: https://en.wikipedia.org/wiki/Calar_Alto_Observatory
        lat, lon = 37.223611, -2.546111
        self.instrument_properties["EarthLocation"] = EarthLocation.from_geodetic(
            lat=lat, lon=lon, height=2168
        )

        # from https://www.mide.com/air-pressure-at-altitude-calculator
        # and convert to Pa to mbar
        self.instrument_properties["site_pressure"] = 778.5095

        self.is_BERV_corrected = False

    def get_spectral_type(self):
        name_lowercase = self.file_path.stem
        if "vis_A" in name_lowercase:
            return "S2D"
        else:
            raise custom_exceptions.InternalError(
                f"{self.name} can't recognize the file that it received ( - {self.file_path.stem})!"
            )

    def load_instrument_specific_KWs(self, header):
        self.observation_info["airmass"] = header[f"AIRMASS"]

        # Load BERV info + previous RV
        self.observation_info["MAX_BERV"] = 30 * kilometer_second
        self.observation_info["BERV"] = (
            header["HIERARCH CARACAL BERV "] * kilometer_second
        )

        # TODO: check ambient temperature on CARMENES data TO SEE IF IT IS THE "REAL ONE"
        # Environmental KWs for telfit (also needs airmassm previously loaded)
        ambi_KWs = {
            "relative_humidity": "AMBI RHUM",
            "ambient_temperature": "AMBI TEMPERATURE",
        }

        for name, endKW in ambi_KWs.items():
            self.observation_info[name] = header[f"HIERARCH CAHA GEN {endKW}"]
            if "temperature" in name:  # store temperature in KELVIN for TELFIT
                self.observation_info[name] = convert_temperature(
                    self.observation_info[name], old_scale="Celsius", new_scale="Kelvin"
                )
        for order in range(self.N_orders):
            self.observation_info["orderwise_SNRs"].append(
                header[f"HIERARCH CARACAL FOX SNR {order}"]
            )

        try:
            self.observation_info["MOON PHASE"] = header[
                "HIERARCH CAHA INS SCHEDULER MOON PHASE"
            ]
            self.observation_info["MOON DISTANCE"] = header[
                "HIERARCH CAHA INS SCHEDULER MOON DISTANCE"
            ]
        except KeyError:
            self.observation_info["MOON PHASE"] = 0
            self.observation_info["MOON DISTANCE"] = 0

    def check_header_QC(self, header: fits.Header):
        """Header QC checks for CARMENES.

        1) Drift calibration was done (CARACAL DRIFT FP REF exists)
        2) Time between observation and calibration is smaller than "max_hours_to_calibration"

        Can add the following status:

        - KW_WARNING("Drift flag of KOBE is greater than 1")
            If th drift value is greater than one. This is actually set in the DataClass.load_CARMENES_extra_information()

        Args:
            header (fits.Header): _description_
        """
        kill_messages = []

        try:
            drift_file = header["CARACAL DRIFT FP REF"]

            night_drift = drift_file.split("s-")[0].split("car-")[-1]
            night_drift = datetime.datetime.strptime(night_drift, r"%Y%m%dT%Hh%Mm%S")
            night_data = datetime.datetime.strptime(
                header["CARACAL DATE-OBS"], r"%Y-%m-%dT%H:%M:%S"
            )
            msg = f"\tDistance between calibration (FP) and OB has surpassed the limit of {self._internal_configs['max_hours_to_calibration']}"
            if (
                abs(night_drift - night_data).total_seconds()
                > datetime.timedelta(
                    hours=self._internal_configs["max_hours_to_calibration"]
                ).total_seconds()
            ):
                kill_messages.append(msg)

        except KeyError:
            kill_messages.append("Observation missing the CARACAL DRIFT FP REF keyword")

        if self._internal_configs["is_KOBE_data"]:
            # we can have skysubs outside of KOBE
            for msg in kill_messages:
                logger.critical(msg)
                self.add_to_status(FATAL_KW(msg.replace("\t", "")))

    def load_S2D_data(self):
        if self.is_open:
            return
        super().load_S2D_data()

        with fits.open(self.file_path) as hdulist:
            s2d_data = hdulist["SPEC"].data * 100000  # spetra from all olders
            err_data = hdulist["SIG"].data * 100000
            wavelengths = hdulist["WAVE"].data  # vacuum wavelengths; no BERV correction

        self.wavelengths = wavelengths
        self.spectra = s2d_data
        self.uncertainties = err_data
        self.build_mask(bypass_QualCheck=True)
        return 1

    def finalize_data_load(self, bad_flag: Optional[Flag] = None):
        bad_flag = MISSING_SHAQ_RVS
        super().finalize_data_load(bad_flag)

        moon_sep = self.observation_info["MOON DISTANCE"]
        moon_illum = self.observation_info["MOON PHASE"]
        rv = self.observation_info["DRS_RV"]
        berv = self.observation_info["BERV"]
        fwhm = self.observation_info["FWHM"]

        curr_rv_diff = (rv - berv).to(kilometer_second).value
        conditions = (
            moon_sep > 80,
            (30 < moon_sep < 80) and moon_illum < 0.6,
            abs(curr_rv_diff) > 5 * fwhm,
        )
        if not any(conditions):
            logger.warning("Target is close to the moon, setting warning flag")
            self.add_to_status(KW_WARNING("Target is close to moon"))

    def load_S1D_data(self) -> Mask:
        raise NotImplementedError

    def build_mask(self, bypass_QualCheck: bool = False):
        # We evaluate the bad orders all at once
        super().build_mask(bypass_QualCheck, assess_bad_orders=False)

        bpmap0 = np.zeros((61, 4096), dtype=np.uint64)
        bpmap0[
            14:38, [2453 - 3, 2453 - 2, 2453 - 1, 2453, 2453 + 1, 2453 + 2, 2453 + 3]
        ] |= 1
        bpmap0[14:38, 1643] |= 1  # ghost of hotspot tail
        bpmap0[14:38, 2459] |= (
            1  # spikes of hotspot satellite (bug not correct due to bug in v2.00)
        )
        bpmap0[15:41, 3374] |= 1  # displaced column; ignore by marking as nan
        bpmap0[28, 3395:3400] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits
        bpmap0[34, 838:850] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis.fits
        bpmap0[34, 2035:2044] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[34, 3150:3161] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis.fits
        bpmap0[35, 403:410] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis
        bpmap0[35, 754:759] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[35, 1083:1093] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis
        bpmap0[35, 1944:1956] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis
        bpmap0[35, 2710:2715] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[35, 3050:3070] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis
        bpmap0[35, 3706:3717] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis
        bpmap0[35, 3706:3717] |= 1  # car-20160803T22h46m41s-sci-gtoc-vis
        bpmap0[36, 303:308] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[36, 312:317] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[36, 1311:1315] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[36, 1325:1329] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[37, 1326:1343] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[39, 1076:1082] |= 1  # car-20170626T02h00m17s-sci-gtoc-vis
        bpmap0[39, 1204:1212] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[39, 1236:1243] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[39, 1463:1468] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[39, 2196:2203] |= 1  # car-20160520T03h10m13s-sci-gtoc-vis.fits
        bpmap0[39, 2493:2504] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[39, 3705:3717] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[40, 2765:2773] |= 1  # car-20170419T03h27m48s-sci-gtoc-vis
        bpmap0[40, 3146:3153] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[40, 3556:3564] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[41, 486:491] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[41, 495:501] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[41, 1305:1315] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[42, 480:490] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[42, 1316:1330] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[42, 2363:2368] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[42, 2375:2382] |= 1  # car-20170509T03h05m21s-sci-gtoc-vis
        bpmap0[44, 3355:3361] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[46, 311:321] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits
        bpmap0[46, 835:845] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[46, 1156:1171] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits
        bpmap0[46, 1895:1905] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[46, 2212:2232] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits
        bpmap0[47, 2127:2133] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 2218:2223] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 2260:2266] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 2313:2319] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 3111:3116] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 3267:3272] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 3316:3321] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 3432:3438] |= 1  # car-20170509T03h05m21s-sci-gtoc-vis
        bpmap0[47, 3480:3488] |= 1  # car-20160714T00h18m29s-sci-gtoc-vis
        bpmap0[47, 3658:3665] |= 1  # car-20170509T03h05m21s-sci-gtoc-vis
        bpmap0[49, 1008:1017] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits
        bpmap0[49, 2532:2544] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits
        bpmap0[49, 3046:3056] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits
        bpmap0[49, 3574:3588] |= 1  # car-20160701T00h49m36s-sci-gtoc-vis.fits

        # Constructing the bad pixel map, as defined by SERVAL in here:
        # https://github.com/mzechmeister/serval/blob/c2f47b26f1102333dfe76f93c2a686807cda02ce/src/inst_CARM_VIS.py#L95

        bpmap0[25:41, 3606:3588] |= 1  # Our rejection

        if self._internal_configs["sigma_clip_flux"] > 0:
            sigma = self._internal_configs["sigma_clip_flux"]
            for order_number in range(self.N_orders):
                cont = median_filter(self.spectra[order_number], size=500)
                inds = np.where(
                    self.spectra[order_number]
                    >= cont + sigma * self.uncertainties[order_number]
                )
                bpmap0[order_number, inds] |= 1
        self.spectral_mask.add_indexes_to_mask(np.where(bpmap0 != 0), QUAL_DATA)

        # remove extremely negative points!
        self.spectral_mask.add_indexes_to_mask(
            np.where(self.spectra < -3 * self.uncertainties), MISSING_DATA
        )
        self.spectral_mask.add_indexes_to_mask(
            np.where(self.uncertainties == 0), MISSING_DATA
        )

        self.assess_bad_orders()

    def close_arrays(self):
        super().close_arrays()
        self.is_BERV_corrected = False

check_header_QC(header)

Header QC checks for CARMENES.

1) Drift calibration was done (CARACAL DRIFT FP REF exists) 2) Time between observation and calibration is smaller than "max_hours_to_calibration"

Can add the following status:

  • KW_WARNING("Drift flag of KOBE is greater than 1") If th drift value is greater than one. This is actually set in the DataClass.load_CARMENES_extra_information()

Parameters:

Name Type Description Default
header Header

description

required
Source code in src/ASTRA/Instruments/CARMENES.py
def check_header_QC(self, header: fits.Header):
    """Header QC checks for CARMENES.

    1) Drift calibration was done (CARACAL DRIFT FP REF exists)
    2) Time between observation and calibration is smaller than "max_hours_to_calibration"

    Can add the following status:

    - KW_WARNING("Drift flag of KOBE is greater than 1")
        If th drift value is greater than one. This is actually set in the DataClass.load_CARMENES_extra_information()

    Args:
        header (fits.Header): _description_
    """
    kill_messages = []

    try:
        drift_file = header["CARACAL DRIFT FP REF"]

        night_drift = drift_file.split("s-")[0].split("car-")[-1]
        night_drift = datetime.datetime.strptime(night_drift, r"%Y%m%dT%Hh%Mm%S")
        night_data = datetime.datetime.strptime(
            header["CARACAL DATE-OBS"], r"%Y-%m-%dT%H:%M:%S"
        )
        msg = f"\tDistance between calibration (FP) and OB has surpassed the limit of {self._internal_configs['max_hours_to_calibration']}"
        if (
            abs(night_drift - night_data).total_seconds()
            > datetime.timedelta(
                hours=self._internal_configs["max_hours_to_calibration"]
            ).total_seconds()
        ):
            kill_messages.append(msg)

    except KeyError:
        kill_messages.append("Observation missing the CARACAL DRIFT FP REF keyword")

    if self._internal_configs["is_KOBE_data"]:
        # we can have skysubs outside of KOBE
        for msg in kill_messages:
            logger.critical(msg)
            self.add_to_status(FATAL_KW(msg.replace("\t", "")))

load_CARMENES_extra_information(self)

CARMENES pipeline does not give RVs, we have to do an external load of the information

Parameters

shaq_folder : str Path to the main folder of shaq-outputs. where all the KOBE-*** targets live

Source code in src/ASTRA/Instruments/CARMENES.py
def load_CARMENES_extra_information(self: DataClass) -> None:
    """CARMENES pipeline does not give RVs, we have to do an external load of the information

    Parameters
    ----------
    shaq_folder : str
        Path to the main folder of shaq-outputs. where all the KOBE-*** targets live
    """

    name_to_search = self.Target.true_name

    if self.observations[0]._internal_configs["is_KOBE_data"]:
        if "KOBE-" not in name_to_search:
            name_to_search = (
                "KOBE-" + name_to_search
            )  # temporary fix for naming problem!
    else:
        logger.info(
            f"Not loading KOBE data, searching for {name_to_search} dat file with Rvs"
        )

    shaq_folder = Path(self.observations[0]._internal_configs["shaq_output_folder"])
    override_BERV = self.observations[0]._internal_configs["override_BERV"]

    if shaq_folder.name.endswith("dat"):
        logger.info("Received the previous RV file, not searching for outputs")
        shaqfile = shaq_folder
    else:
        logger.info("Searching for outputs of previous RV extraction")
        shaqfile = shaq_folder / name_to_search / f"{name_to_search}_RVs.dat"

    if shaqfile.exists():
        logger.info("Loading extra CARMENES data from {}", shaqfile)
    else:
        logger.critical(f"RV file does not exist on {shaqfile}")
        raise custom_exceptions.InvalidConfiguration("Missing RV file for data")

    number_loads = 0
    locs = []
    loaded_BJDs = [frame.get_KW_value("BJD") for frame in self.observations]
    with open(shaqfile) as file:
        for line in file:
            if "#" in line:  # header or other "BAD" files
                continue
            # TODO: implement a more thorough check in here, to mark the "bad" frames as invalid!
            ll = line.strip().split()
            if len(ll) == 0:
                logger.warning(f"shaq RV from {name_to_search} has empty line")
                continue
            bjd = round(float(ll[1]) - 2400000.0, 7)  # we have the full bjd date

            try:
                index = loaded_BJDs.index(
                    bjd
                )  # to make sure that everything is loaded in the same order
                locs.append(index)
            except ValueError:
                logger.warning("RV shaq has entry that does not exist in the S2D files")
                continue

            self.observations[index].import_KW_from_outside(
                "DRS_RV", float(ll[5]) * kilometer_second, optional=False
            )
            self.observations[index].import_KW_from_outside(
                "DRS_RV_ERR", float(ll[3]) * kilometer_second, optional=False
            )
            if override_BERV:
                self.observations[index].import_KW_from_outside(
                    "BERV", float(ll[10]) * kilometer_second, optional=False
                )
                berv_factor = 1 + float(ll[10]) / SPEED_OF_LIGHT
                self.observations[index].import_KW_from_outside(
                    "BERV_FACTOR", berv_factor, optional=False
                )

            self.observations[index].import_KW_from_outside(
                "FWHM", float(ll[11]), optional=True
            )
            self.observations[index].import_KW_from_outside(
                "BIS SPAN", float(ll[13]), optional=True
            )

            drift_val = np.nan_to_num(float(ll[7])) * meter_second
            drift_err = np.nan_to_num(float(ll[8])) * meter_second
            drift_flag = float(ll[9])
            if drift_flag > 1:
                self.observations[index].add_to_status(
                    KW_WARNING("Drift flag of KOBE is greater than 1")
                )

            self.observations[index].import_KW_from_outside(
                "drift", drift_val, optional=False
            )
            self.observations[index].import_KW_from_outside(
                "drift_ERR", drift_err, optional=False
            )

            number_loads += 1
            self.observations[index].finalized_external_data_load()

    if number_loads < len(self.observations):
        msg = f"RV shaq outputs does not have value for all S2D files of {name_to_search} ({number_loads}/{len(self.observations)})"

        logger.critical(msg)

Interface to MAROON-X data.

MAROONX

Bases: Frame

Interface to handle MAROONX data.

Source code in src/ASTRA/Instruments/MAROONX.py
class MAROONX(Frame):
    """Interface to handle MAROONX data."""

    _default_params = Frame._default_params

    # Adding one more day than the official time, so that we ensure to include any observations
    # from the last day

    sub_instruments = {
        "MAROON1": datetime.datetime.strptime("09-15-2020", r"%m-%d-%Y"),
        "MAROON2": datetime.datetime.strptime("12-02-2020", r"%m-%d-%Y"),
        "MAROON3": datetime.datetime.strptime("03-04-2021", r"%m-%d-%Y"),
        "MAROON4": datetime.datetime.strptime("04-30-2021", r"%m-%d-%Y"),
        "MAROON5": datetime.datetime.strptime("06-04-2021", r"%m-%d-%Y"),
        "MAROON6": datetime.datetime.strptime("08-23-2021", r"%m-%d-%Y"),
        "MAROON7": datetime.datetime.strptime("11-23-2021", r"%m-%d-%Y"),
        "MAROON8": datetime.datetime.strptime("04-27-2022", r"%m-%d-%Y"),
        "MAROON9": datetime.datetime.strptime("06-03-2022", r"%m-%d-%Y"),
        "MAROON10": datetime.datetime.strptime("08-15-2022", r"%m-%d-%Y"),
        "MAROON11": datetime.datetime.strptime("07-11-2023", r"%m-%d-%Y"),
        "MAROON12": datetime.datetime.strptime("10-28-2023", r"%m-%d-%Y"),
        "MAROON13": datetime.datetime.strptime("11-29-2023", r"%m-%d-%Y"),
        "MAROON14": datetime.datetime.strptime("01-03-2024", r"%m-%d-%Y"),
        "MAROON15": datetime.datetime.strptime("04-24-2024", r"%m-%d-%Y"),
        "MAROON16": datetime.datetime.strptime("06-13-2024", r"%m-%d-%Y"),
        "MAROON17": datetime.datetime.strptime("07-18-2024", r"%m-%d-%Y"),
        "MAROON18": datetime.datetime.strptime("08-20-2024", r"%m-%d-%Y"),
        "MAROON19": datetime.datetime.strptime("10-15-2024", r"%m-%d-%Y"),
        "MAROON20": datetime.datetime.strptime("01-09-2025", r"%m-%d-%Y"),
        "MAROON21": datetime.datetime.strptime("02-04-2025", r"%m-%d-%Y"),
        "MAROON22": datetime.datetime.max,
    }
    _name = "MAROONX"

    order_intervals: dict[DETECTOR_DEFINITION, slice] = {
        DETECTOR_DEFINITION.WHITE_LIGHT: list(range(62)),
        DETECTOR_DEFINITION.RED_DET: list(range(34, 62)),
        DETECTOR_DEFINITION.BLUE_DET: list(range(0, 34)),
    }
    KW_map = {}

    def __init__(
        self,
        file_path: Path,
        user_configs: Optional[Dict[str, Any]] = None,
        reject_subInstruments=None,
        frameID=None,
        quiet_user_params: bool = True,
    ):
        """Construct MAROON-X object.

        Parameters
        ----------
        file_path
            Path to the S2D (or S1D) file.
        user_configs
            Dictionary whose keys are the configurable options of ESPRESSO (check above)
        reject_subInstruments
            Iterable of subInstruments to fully reject
        frameID
            ID for this observation. Only used for organization purposes by :class:`~SBART.data_objects.DataClass`

        """
        self._blaze_corrected = True

        super().__init__(
            inst_name=self._name,
            array_size={"S2D": [62, 4036]},
            file_path=file_path,
            frameID=frameID,
            KW_map=self.KW_map,
            available_indicators=("FWHM", "BIS SPAN"),
            user_configs=user_configs,
            reject_subInstruments=reject_subInstruments,
            need_external_data_load=False,
            quiet_user_params=quiet_user_params,
        )
        coverage = [500, 920]
        self.instrument_properties["wavelength_coverage"] = coverage
        self.instrument_properties["is_drift_corrected"] = False

        self.instrument_properties["resolution"] = 86_000

        # lat/lon from: https://geohack.toolforge.org/geohack.php?params=19_49_25_N_155_28_9_W
        lat, lon = 19.820667, -155.468056
        self.instrument_properties["EarthLocation"] = EarthLocation.from_geodetic(lat=lat, lon=lon, height=4214)

        # from https://www.mide.com/air-pressure-at-altitude-calculator
        # and convert from Pa to mbar
        self.instrument_properties["site_pressure"] = 599.4049

        self.is_BERV_corrected = False

    def get_spectral_type(self) -> str:
        """Get the spectral type from the filename."""
        if not self.file_path.name.endswith("hd5"):
            raise custom_exceptions.InternalError("MAROON-X interface only recognizes hd5 files")
        return "S2D"

    def load_instrument_specific_KWs(self, header) -> None:
        """Override parent class, does nothing."""
        ...

    def load_header_info(self) -> None:
        """Load information from the header."""
        store = pd.HDFStore(self.file_path, "r+")
        header_blue = store["header_blue"]
        header_red = store["header_red"]

        orders_blue = store["spec_blue"].index.levels[1]
        orders_red = store["spec_red"].index.levels[1]
        store.close()

        for order_set, header_det in [
            (orders_blue, header_blue),
            (orders_red, header_red),
        ]:
            for order in order_set:
                self.observation_info["orderwise_SNRs"].append(float(header_det[f"SNR_{order}"]))
        for name, kw in [
            ("ISO-DATE", "MAROONX TELESCOPE TIME"),
            ("OBJECT", "MAROONX TELESCOPE TARGETNAME"),
        ]:
            self.observation_info[name] = header_blue[kw]

        for name, kw in [
            ("airmass", "MAROONX TELESCOPE AIRMASS"),
            ("relative_humidity", "MAROONX TELESCOPE HUMIDITY"),
            ("ambient_temperature", "MAROONX WEATHER TEMPERATURE"),  # TODO: check units
            ("BERV", "BERV_FLUXWEIGHTED_FRD"),
            ("JD", "JD_UTC_FLUXWEIGHTED_FRD"),
            ("EXPTIME", "EXPTIME"),
        ]:
            self.observation_info[name] = float(header_blue[kw])

        self.observation_info["BERV"] = self.observation_info["BERV"] * meter_second
        self.observation_info["BERV_FACTOR"] = (
            1 + self.observation_info["BERV"].to(kilometer_second).value / SPEED_OF_LIGHT
        )
        # Convert ambient temperature to Kelvin
        self.observation_info["ambient_temperature"] = convert_temperature(
            self.observation_info["ambient_temperature"],
            old_scale="Celsius",
            new_scale="Kelvin",
        )

        # Note: we don't have DRS values for MAROON-X
        self.observation_info["DRS_RV"] = 0 * meter_second
        self.observation_info["DRS_RV_ERR"] = 0 * meter_second

        self.find_instrument_type()
        self.assess_bad_orders()

    def load_S2D_data(self) -> None:
        """Load the S2D data from the HD5 files."""
        if self.is_open:
            return
        super().load_S2D_data()
        store = pd.HDFStore(self.file_path, "r+")
        spec_red = store["spec_red"]
        spec_blue = store["spec_blue"]
        store.close()

        red_pix = spec_red["wavelengths"][6].values[0].shape[0]
        blue_pix = spec_blue["wavelengths"][6].values[0].shape[0]
        blue_pad = red_pix - blue_pix

        blue_det_flux = np.vstack(spec_blue["optimal_extraction"][6])
        p_blue_det_flux = np.pad(blue_det_flux, ((0, 0), (0, blue_pad)), mode="constant")
        blue_det_wave = np.vstack(spec_blue["wavelengths"][6])
        p_blue_det_wave = np.pad(blue_det_wave, ((0, 0), (0, blue_pad)), mode="constant")
        blue_det_err = np.vstack(spec_blue["optimal_var"][6])
        p_blue_det_err = np.pad(blue_det_err, ((0, 0), (0, blue_pad)), mode="constant")

        red_det_flux = np.vstack(spec_red["optimal_extraction"][6])
        red_det_wave = np.vstack(spec_red["wavelengths"][6])
        red_det_err = np.vstack(spec_red["optimal_var"][6])

        self.wavelengths = np.vstack((p_blue_det_wave, red_det_wave))
        self.spectra = np.vstack((p_blue_det_flux, red_det_flux))
        self.uncertainties = np.vstack((p_blue_det_err, red_det_err))
        self.build_mask(bypass_QualCheck=True)

    def load_S1D_data(self) -> Mask:
        """Load S1D data, currently not implemented."""
        raise NotImplementedError

    def build_mask(self, bypass_QualCheck: bool = False) -> None:
        """Construct the pixel-wise mask."""
        # We evaluate the bad orders all at once
        super().build_mask(bypass_QualCheck, assess_bad_orders=False)

        bpmap0 = np.zeros((62, 4036), dtype=np.uint64)
        # Remove the first blue order
        bpmap0[0, :] = 1

        if self._internal_configs["SIGMA_CLIP_FLUX_VALUES"] > 0:
            sigma = self._internal_configs["SIGMA_CLIP_FLUX_VALUES"]
            for order_number in range(self.N_orders):
                cont = median_filter(self.spectra[order_number], size=500)
                inds = np.where(self.spectra[order_number] >= cont + sigma * self.uncertainties[order_number])
                bpmap0[order_number, inds] |= 1
        self.spectral_mask.add_indexes_to_mask(np.where(bpmap0 != 0), QUAL_DATA)

        # remove extremely negative points!
        self.spectral_mask.add_indexes_to_mask(np.where(self.spectra < -3 * self.uncertainties), MISSING_DATA)

        self.assess_bad_orders()

    def close_arrays(self) -> None:
        """Close arrays in memory."""
        super().close_arrays()
        self.is_BERV_corrected = False

build_mask(bypass_QualCheck=False)

Construct the pixel-wise mask.

Source code in src/ASTRA/Instruments/MAROONX.py
def build_mask(self, bypass_QualCheck: bool = False) -> None:
    """Construct the pixel-wise mask."""
    # We evaluate the bad orders all at once
    super().build_mask(bypass_QualCheck, assess_bad_orders=False)

    bpmap0 = np.zeros((62, 4036), dtype=np.uint64)
    # Remove the first blue order
    bpmap0[0, :] = 1

    if self._internal_configs["SIGMA_CLIP_FLUX_VALUES"] > 0:
        sigma = self._internal_configs["SIGMA_CLIP_FLUX_VALUES"]
        for order_number in range(self.N_orders):
            cont = median_filter(self.spectra[order_number], size=500)
            inds = np.where(self.spectra[order_number] >= cont + sigma * self.uncertainties[order_number])
            bpmap0[order_number, inds] |= 1
    self.spectral_mask.add_indexes_to_mask(np.where(bpmap0 != 0), QUAL_DATA)

    # remove extremely negative points!
    self.spectral_mask.add_indexes_to_mask(np.where(self.spectra < -3 * self.uncertainties), MISSING_DATA)

    self.assess_bad_orders()

close_arrays()

Close arrays in memory.

Source code in src/ASTRA/Instruments/MAROONX.py
def close_arrays(self) -> None:
    """Close arrays in memory."""
    super().close_arrays()
    self.is_BERV_corrected = False

get_spectral_type()

Get the spectral type from the filename.

Source code in src/ASTRA/Instruments/MAROONX.py
def get_spectral_type(self) -> str:
    """Get the spectral type from the filename."""
    if not self.file_path.name.endswith("hd5"):
        raise custom_exceptions.InternalError("MAROON-X interface only recognizes hd5 files")
    return "S2D"

load_S1D_data()

Load S1D data, currently not implemented.

Source code in src/ASTRA/Instruments/MAROONX.py
def load_S1D_data(self) -> Mask:
    """Load S1D data, currently not implemented."""
    raise NotImplementedError

load_S2D_data()

Load the S2D data from the HD5 files.

Source code in src/ASTRA/Instruments/MAROONX.py
def load_S2D_data(self) -> None:
    """Load the S2D data from the HD5 files."""
    if self.is_open:
        return
    super().load_S2D_data()
    store = pd.HDFStore(self.file_path, "r+")
    spec_red = store["spec_red"]
    spec_blue = store["spec_blue"]
    store.close()

    red_pix = spec_red["wavelengths"][6].values[0].shape[0]
    blue_pix = spec_blue["wavelengths"][6].values[0].shape[0]
    blue_pad = red_pix - blue_pix

    blue_det_flux = np.vstack(spec_blue["optimal_extraction"][6])
    p_blue_det_flux = np.pad(blue_det_flux, ((0, 0), (0, blue_pad)), mode="constant")
    blue_det_wave = np.vstack(spec_blue["wavelengths"][6])
    p_blue_det_wave = np.pad(blue_det_wave, ((0, 0), (0, blue_pad)), mode="constant")
    blue_det_err = np.vstack(spec_blue["optimal_var"][6])
    p_blue_det_err = np.pad(blue_det_err, ((0, 0), (0, blue_pad)), mode="constant")

    red_det_flux = np.vstack(spec_red["optimal_extraction"][6])
    red_det_wave = np.vstack(spec_red["wavelengths"][6])
    red_det_err = np.vstack(spec_red["optimal_var"][6])

    self.wavelengths = np.vstack((p_blue_det_wave, red_det_wave))
    self.spectra = np.vstack((p_blue_det_flux, red_det_flux))
    self.uncertainties = np.vstack((p_blue_det_err, red_det_err))
    self.build_mask(bypass_QualCheck=True)

load_header_info()

Load information from the header.

Source code in src/ASTRA/Instruments/MAROONX.py
def load_header_info(self) -> None:
    """Load information from the header."""
    store = pd.HDFStore(self.file_path, "r+")
    header_blue = store["header_blue"]
    header_red = store["header_red"]

    orders_blue = store["spec_blue"].index.levels[1]
    orders_red = store["spec_red"].index.levels[1]
    store.close()

    for order_set, header_det in [
        (orders_blue, header_blue),
        (orders_red, header_red),
    ]:
        for order in order_set:
            self.observation_info["orderwise_SNRs"].append(float(header_det[f"SNR_{order}"]))
    for name, kw in [
        ("ISO-DATE", "MAROONX TELESCOPE TIME"),
        ("OBJECT", "MAROONX TELESCOPE TARGETNAME"),
    ]:
        self.observation_info[name] = header_blue[kw]

    for name, kw in [
        ("airmass", "MAROONX TELESCOPE AIRMASS"),
        ("relative_humidity", "MAROONX TELESCOPE HUMIDITY"),
        ("ambient_temperature", "MAROONX WEATHER TEMPERATURE"),  # TODO: check units
        ("BERV", "BERV_FLUXWEIGHTED_FRD"),
        ("JD", "JD_UTC_FLUXWEIGHTED_FRD"),
        ("EXPTIME", "EXPTIME"),
    ]:
        self.observation_info[name] = float(header_blue[kw])

    self.observation_info["BERV"] = self.observation_info["BERV"] * meter_second
    self.observation_info["BERV_FACTOR"] = (
        1 + self.observation_info["BERV"].to(kilometer_second).value / SPEED_OF_LIGHT
    )
    # Convert ambient temperature to Kelvin
    self.observation_info["ambient_temperature"] = convert_temperature(
        self.observation_info["ambient_temperature"],
        old_scale="Celsius",
        new_scale="Kelvin",
    )

    # Note: we don't have DRS values for MAROON-X
    self.observation_info["DRS_RV"] = 0 * meter_second
    self.observation_info["DRS_RV_ERR"] = 0 * meter_second

    self.find_instrument_type()
    self.assess_bad_orders()

load_instrument_specific_KWs(header)

Override parent class, does nothing.

Source code in src/ASTRA/Instruments/MAROONX.py
def load_instrument_specific_KWs(self, header) -> None:
    """Override parent class, does nothing."""
    ...