Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 9 additions & 10 deletions stixcore/io/product_processors/fits/processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -723,18 +723,19 @@ def generate_primary_header(cls, filename, product, *, version=0):
# if not isinstance(product.obt_beg, SCETime):
# raise ValueError("Expected SCETime as time format")

scet_timerange = product.scet_timerange
headers = FitsProcessor.generate_common_header(filename, product, version=version) + (
# Name, Value, Comment
# ('MJDREF', product.obs_beg.mjd),
# ('DATEREF', product.obs_beg.fits),
("OBT_BEG", product.scet_timerange.start.as_float().value, "Start acquisition time in OBT"),
("OBT_END", product.scet_timerange.end.as_float().value, "End acquisition time in OBT"),
("OBT_BEG", scet_timerange.start.as_float().value, "Start acquisition time in OBT"),
("OBT_END", scet_timerange.end.as_float().value, "End acquisition time in OBT"),
("TIMESYS", "OBT", "System used for time keywords"),
("LEVEL", "L0", "Processing level of the data"),
("DATE-OBS", product.scet_timerange.start.to_string(), "Depreciated, same as DATE-BEG"),
("DATE-BEG", product.scet_timerange.start.to_string(), "Start time of observation"),
("DATE-AVG", product.scet_timerange.avg.to_string(), "Average time of observation"),
("DATE-END", product.scet_timerange.end.to_string(), "End time of observation"),
("DATE-OBS", scet_timerange.start.to_string(), "Depreciated, same as DATE-BEG"),
("DATE-BEG", scet_timerange.start.to_string(), "Start time of observation"),
("DATE-AVG", scet_timerange.avg.to_string(), "Average time of observation"),
("DATE-END", scet_timerange.end.to_string(), "End time of observation"),
("DATAMIN", product.dmin, "Minimum valid physical value"),
("DATAMAX", product.dmax, "Maximum valid physical value"),
("BUNIT", product.bunit, "Units of physical value, after application of BSCALE, BZERO"),
Expand Down Expand Up @@ -897,10 +898,8 @@ def write_fits(self, product, *, version=0):

# In TM sent as uint in units of 0.1 so convert to cs as the time center
# can be on 0.5ds points
data["time"] = np.atleast_1d(
np.around((data["time"] - prod.scet_timerange.start).as_float().to(u.cs)).astype("uint32")
)
data["timedel"] = np.atleast_1d(np.uint32(np.around(data["timedel"].as_float().to(u.cs))))
data["time"] = np.atleast_1d(np.around((data["time"] - prod.utc_timerange.start).to(u.cs)).astype("uint32"))
data["timedel"] = np.atleast_1d(np.uint32(np.around(data["timedel"].to(u.cs))))

try:
control["time_stamp"] = control["time_stamp"].as_float()
Expand Down
11 changes: 9 additions & 2 deletions stixcore/io/product_processors/tests/test_processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from stixcore.products.product import Product
from stixcore.soop.manager import SOOPManager
from stixcore.time import SCETime, SCETimeRange
from stixcore.time.datetime import SCETimeDelta


@pytest.fixture
Expand Down Expand Up @@ -189,10 +190,16 @@ def test_level0_processor_generate_primary_header(datetime, product):
def test_count_data_mixin(p_file):
processor = FitsL0Processor("some/path")
p = Product(p_file)

if isinstance(p.data["timedel"], SCETimeDelta):
assert p.exposure == p.data["timedel"].as_float().min().to_value("s")
assert p.max_exposure == p.data["timedel"].as_float().max().to_value("s")
else:
assert p.exposure == p.data["timedel"].min().to_value("s")
assert p.max_exposure == p.data["timedel"].max().to_value("s")
Comment on lines +195 to +199
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd avoid asserts like this for use if and raise a specific exception


assert p.dmin == p.data["counts"].min().value
assert p.dmax == p.data["counts"].max().value
assert p.exposure == p.data["timedel"].min().as_float().to_value()
assert p.max_exposure == p.data["timedel"].max().as_float().to_value()

test_data = {
"DATAMAX": p.dmax,
Expand Down
39 changes: 26 additions & 13 deletions stixcore/processing/tests/test_publish.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,15 @@ def test_publish_fits_to_esa_incomplete(product, out_dir):
)

t = SCETime(coarse=[beg.coarse, end.coarse])
product.data = QTable({"time": t, "timedel": t - beg, "fcounts": np.array([1, 2]), "control_index": [1, 1]})
t_utc = t.to_time()
product.data = QTable(
{
"time": t_utc,
"timedel": (t - beg).as_float().to("cs"),
"fcounts": np.array([1, 2]),
"control_index": [1, 1],
}
)
product.raw = ["packet1.xml", "packet2.xml"]
product.parent = ["packet1.xml", "packet2.xml"]
product.level = "L1"
Expand All @@ -135,9 +143,9 @@ def test_publish_fits_to_esa_incomplete(product, out_dir):
product.NAME = "background"
product.obt_beg = beg
product.obt_end = end
product.date_obs = beg
product.date_beg = beg
product.date_end = end
product.date_obs = beg.to_datetime()
product.date_beg = beg.to_datetime()
product.date_end = end.to_datetime()
product.exposure = 2
product.max_exposure = 3
product.dmin = 2
Expand Down Expand Up @@ -222,10 +230,12 @@ def test_fits_incomplete_switch_over(out_dir):
)

t = SCETime(coarse=[beg.coarse, end.coarse])
t_utc = t.to_time()

product.data = QTable(
{
"time": t,
"timedel": t - beg,
"time": t_utc,
"timedel": (t - beg).as_float().to("cs"),
"fcounts": np.array([1, 2]),
"counts": np.array([1, 2]) * u.deg_C,
"control_index": [1, 1],
Expand All @@ -244,9 +254,9 @@ def test_fits_incomplete_switch_over(out_dir):
product.name = "background"
product.obt_beg = beg
product.obt_end = end
product.date_obs = beg
product.date_beg = beg
product.date_end = end
product.date_obs = beg.to_datetime()
product.date_beg = beg.to_datetime()
product.date_end = end.to_datetime()
product.exposure = 2
product.max_exposure = 3
product.dmin = 2
Expand Down Expand Up @@ -371,7 +381,10 @@ def test_publish_fits_to_esa(product, out_dir):
)

t = SCETime(coarse=[beg.coarse, end.coarse])
product.data = QTable({"time": t, "timedel": t - beg, "fcounts": np.array([1, 2]), "control_index": [1, 1]})
t_utc = t.to_time()
product.data = QTable(
{"time": t_utc, "timedel": (t - beg).as_float().to("cs"), "fcounts": np.array([1, 2]), "control_index": [1, 1]}
)
product.raw = ["packet1.xml", "packet2.xml"]
product.parent = ["packet1.xml", "packet2.xml"]
product.level = "L1"
Expand All @@ -382,9 +395,9 @@ def test_publish_fits_to_esa(product, out_dir):
product.name = "xray-spec"
product.obt_beg = beg
product.obt_end = end
product.date_obs = beg
product.date_beg = beg
product.date_end = end
product.date_obs = beg.to_datetime()
product.date_beg = beg.to_datetime()
product.date_end = end.to_datetime()
product.exposure = 2
product.max_exposure = 3
product.dmin = 2
Expand Down
14 changes: 14 additions & 0 deletions stixcore/products/level1/quicklookL1.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from stixcore.products.level0.quicklookL0 import QLProduct
from stixcore.products.product import L1Mixin
from stixcore.time import SCETimeRange
from stixcore.time.datetime import SCETime, SCETimeDelta
from stixcore.util.logging import get_logger

__all__ = ["LightCurve", "Background", "Spectra", "Variance", "FlareFlag", "EnergyCalibration", "TMStatusFlareList"]
Expand Down Expand Up @@ -242,6 +243,19 @@ def from_level0(cls, l0product, parent=""):
l1.level = "L1"
engineering.raw_to_engineering_product(l1, IDBManager.instance)

# convert SCETimes to UTC Time
if "time" in l1.data.colnames and isinstance(l1.data["time"], SCETime):
l1.data.replace_column(
"time",
l1.data["time"].to_time(),
)
# convert SCETimesDelta to Quantity (s)
if "timedel" in l1.data.colnames and isinstance(l1.data["timedel"], SCETimeDelta):
l1.data.replace_column(
"timedel",
l1.data["timedel"].as_float(),
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this an int to start with?

)

# fix for wrong calibration in IDB https://github.com/i4Ds/STIXCore/issues/432
# nix00122 was wrong assumed to be in ds but it is plain s
l1.control["integration_time"] = l1.control["integration_time"] * 10
Expand Down
95 changes: 77 additions & 18 deletions stixcore/products/product.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from itertools import chain

import numpy as np
import pytz
from sunpy.time.timerange import TimeRange
from sunpy.util.datatype_factory_base import (
BasicRegistrationFactory,
MultipleMatchError,
Expand All @@ -17,6 +19,7 @@

import stixcore.processing.decompression as decompression
import stixcore.processing.engineering as engineering
from stixcore.ephemeris.manager import Spice
from stixcore.idb.manager import IDBManager
from stixcore.time import SCETime, SCETimeDelta, SCETimeRange
from stixcore.tmtc.packet_factory import Packet
Expand Down Expand Up @@ -47,7 +50,7 @@

# date when the min integration time was changed from 1.0s to 0.5s needed to fix count and time
# offset issue
MIN_INT_TIME_CHANGE = datetime(2021, 9, 6, 13)
MIN_INT_TIME_CHANGE = datetime(2021, 9, 6, 13, tzinfo=pytz.UTC)


def read_qtable(file, hdu, hdul=None):
Expand Down Expand Up @@ -258,8 +261,18 @@ def __call__(self, *args, **kwargs):
ssid = 34

if level not in ["LB", "LL01"] and "timedel" in data.colnames and "time" in data.colnames:
data["timedel"] = SCETimeDelta(data["timedel"])
offset = SCETime.from_float(pri_header["OBT_BEG"] * u.s)
# select the time format based on available header keywords
offset = None
if "TIMESYS" in pri_header and pri_header["TIMESYS"] == "UTC":
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
if "TIMESYS" in pri_header and pri_header["TIMESYS"] == "UTC":
if pri_header.get("TIMESYS", "") == "UTC":

try:
offset = Time(pri_header["DATE-OBS"])
except Exception:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

very broad exception

offset = None

# fallback to OBT_BEG if no TIMESYS=UTC or DATE-OBS is present or not parseable
if offset is None:
offset = SCETime.from_float(pri_header["OBT_BEG"] * u.s)
data["timedel"] = SCETimeDelta(data["timedel"])

try:
control["time_stamp"] = SCETime.from_float(control["time_stamp"])
Expand Down Expand Up @@ -524,10 +537,22 @@ def __init__(

@property
def scet_timerange(self):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For this specific case we always have OBT-BEG and OBT-END so maybe this should use those?

return SCETimeRange(
start=self.data["time"][0] - self.data["timedel"][0] / 2,
end=self.data["time"][-1] + self.data["timedel"][-1] / 2,
)
if isinstance(self.data["time"], SCETime):
return SCETimeRange(
start=self.data["time"][0] - self.data["timedel"][0] / 2,
end=self.data["time"][-1] + self.data["timedel"][-1] / 2,
)
else:
start_str = Spice.instance.datetime_to_scet((self.data["time"][0] - self.data["timedel"][0] / 2).datetime)
end_str = Spice.instance.datetime_to_scet((self.data["time"][-1] + self.data["timedel"][-1] / 2).datetime)
if "/" in start_str:
start_str = start_str.split("/")[-1]
if "/" in end_str:
end_str = end_str.split("/")[-1]
return SCETimeRange(
start=SCETime.from_string(start_str),
end=SCETime.from_string(end_str),
)

@property
def raw(self):
Expand Down Expand Up @@ -645,7 +670,7 @@ def __add__(self, other):
other_data["old_index"] = [f"o{i}" for i in other_data["control_index"]]
self_data["old_index"] = [f"s{i}" for i in self_data["control_index"]]

if (self.service_type, self.service_subtype) == (3, 25):
if (self.service_type, self.service_subtype) == (3, 25) and self.level in ["L0", "LB"]:
self_data["time"] = SCETime(self_control["scet_coarse"], self_control["scet_fine"])
other_data["time"] = SCETime(other_control["scet_coarse"], other_control["scet_fine"])

Expand All @@ -656,8 +681,10 @@ def __add__(self, other):

# Fits write we do np.around(time - start_time).as_float().to(u.cs)).astype("uint32"))
# So need to do something similar here to avoid comparing un-rounded value to rounded values
data["time_float"] = np.around((data["time"] - data["time"].min()).as_float().to("cs"))

if isinstance(data["time"], SCETime):
data["time_float"] = np.around((data["time"] - data["time"].min()).as_float().to("cs"))
else:
data["time_float"] = np.around((data["time"] - data["time"].min()).to("cs"))
# remove duplicate data based on time bin and sort the data
data = unique(data, keys=["time_float"])
# data.sort(["time_float"])
Expand Down Expand Up @@ -766,12 +793,12 @@ def split_to_files(self):

yield out
else: # L1+
utc_timerange = self.scet_timerange.to_timerange()
utc_timerange = self.utc_timerange

for day in utc_timerange.get_dates():
ds = day
de = day + 1 * u.day
utc_times = self.data["time"].to_time()
utc_times = self.data["time"]
i = np.where((utc_times >= ds) & (utc_times < de))

if len(i[0]) > 0:
Expand Down Expand Up @@ -853,11 +880,17 @@ def bunit(self):

@property
def exposure(self):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should really call this min_exposure as I'd expect this to contain all the exposure times

return self.data["timedel"].as_float().min().to_value("s")
if isinstance(self.data["timedel"], SCETimeDelta):
return self.data["timedel"].as_float().min().to_value("s")
else:
return self.data["timedel"].min().to_value("s")

@property
def max_exposure(self):
return self.data["timedel"].as_float().max().to_value("s")
if isinstance(self.data["timedel"], SCETimeDelta):
return self.data["timedel"].as_float().max().to_value("s")
else:
return self.data["timedel"].max().to_value("s")


class EnergyChannelsMixin:
Expand Down Expand Up @@ -914,7 +947,13 @@ class L1Mixin(FitsHeaderMixin):

@property
def utc_timerange(self):
return self.scet_timerange.to_timerange()
if isinstance(self.data["time"], SCETime):
self.scet_timerange.to_timerange()
else:
return TimeRange(
(self.data["time"][0] - self.data["timedel"][0] / 2).datetime,
(self.data["time"][-1] + self.data["timedel"][-1] / 2).datetime,
Comment on lines +954 to +955
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the .datetime really needed?

)

@classmethod
def from_level0(cls, l0product, parent=""):
Expand All @@ -940,10 +979,10 @@ def from_level0(cls, l0product, parent=""):
if idbs[0] < (2, 26, 36) and len(l1.data) > 1:
# Check if request was at min configured time resolution
if (
l1.utc_timerange.start.datetime < MIN_INT_TIME_CHANGE
l0product.scet_timerange.start.to_datetime() < MIN_INT_TIME_CHANGE
and l1.data["timedel"].as_float().min() == 1 * u.s
) or (
l1.utc_timerange.start.datetime >= MIN_INT_TIME_CHANGE
l0product.scet_timerange.start.to_datetime() >= MIN_INT_TIME_CHANGE
and l1.data["timedel"].as_float().min() == 0.5 * u.s
):
l1.data["timedel"][1:-1] = l1.data["timedel"][:-2]
Expand All @@ -955,13 +994,33 @@ def from_level0(cls, l0product, parent=""):
l1.control.replace_column("parent", [parent] * len(l1.control))
l1.level = "L1"
engineering.raw_to_engineering_product(l1, IDBManager.instance)

# convert SCETimes to UTC Time
if "time" in l1.data.colnames and isinstance(l1.data["time"], SCETime):
l1.data.replace_column(
"time",
l1.data["time"].to_time(),
)
# convert SCETimesDelta to Quantity (s)
if "timedel" in l1.data.colnames and isinstance(l1.data["timedel"], SCETimeDelta):
l1.data.replace_column(
"timedel",
l1.data["timedel"].as_float(),
)

return l1


class L2Mixin(FitsHeaderMixin):
@property
def utc_timerange(self):
return self.scet_timerange.to_timerange()
if isinstance(self.data["time"], SCETime):
self.scet_timerange.to_timerange()
else:
return TimeRange(
(self.data["time"][0] - self.data["timedel"][0] / 2).datetime,
(self.data["time"][-1] + self.data["timedel"][-1] / 2).datetime,
)

@classmethod
def from_level1(cls, l1product, parent="", idlprocessor=None):
Expand Down
Loading