diff --git a/stixcore/products/common.py b/stixcore/products/common.py index b489ca3a..86e3615b 100644 --- a/stixcore/products/common.py +++ b/stixcore/products/common.py @@ -242,21 +242,6 @@ def _get_energies_from_mask(date, mask=None): return low, high -def get_min_uint(values): - """ - Find the smallest unsigned int that can represent max value. - """ - max_value = np.array(values).max() - if max_value < 256: # 2**8 - return np.uint8 - elif max_value < 65536: # 2**16 - return np.uint16 - elif max_value < 4294967296: # 2**32 - return np.uint32 - elif max_value < 18446744073709551616: # 2**64 - return np.uint64 - - def rebin_proportional(y1, x1, x2): x1 = np.asarray(x1) y1 = np.asarray(y1) diff --git a/stixcore/products/level0/quicklookL0.py b/stixcore/products/level0/quicklookL0.py index 7eca3ab1..e1e7fc5c 100644 --- a/stixcore/products/level0/quicklookL0.py +++ b/stixcore/products/level0/quicklookL0.py @@ -15,7 +15,6 @@ _get_energy_bins, _get_pixel_mask, _get_sub_spectrum_mask, - get_min_uint, rebin_proportional, unscale_triggers, ) @@ -203,12 +202,12 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data["time"] = time data["timedel"] = duration data.add_meta(name="timedel", nix="NIX00405", packets=packets) - data["triggers"] = triggers.astype(get_min_uint(triggers)) + data["triggers"] = triggers.astype(np.min_scalar_type(triggers)) data.add_meta(name="triggers", nix="NIX00274", packets=packets) data["triggers_comp_err"] = np.float32(np.sqrt(triggers_var)) data["rcr"] = np.hstack(packets.get_value("NIX00276")).flatten().astype(np.ubyte) data.add_meta(name="rcr", nix="NIX00276", packets=packets) - data["counts"] = (counts.T * u.ct).astype(get_min_uint(counts)) + data["counts"] = (counts.T * u.ct).astype(np.min_scalar_type(counts)) data.add_meta(name="counts", nix="NIX00272", packets=packets) data["counts_comp_err"] = np.float32(np.sqrt(counts_var).T * u.ct) @@ -316,10 +315,10 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data["time"] = time data["timedel"] = duration data.add_meta(name="timedel", nix="NIX00405", packets=packets) - data["triggers"] = triggers.astype(get_min_uint(triggers)) + data["triggers"] = triggers.astype(np.min_scalar_type(triggers)) data.add_meta(name="triggers", nix="NIX00274", packets=packets) data["triggers_comp_err"] = np.float32(np.sqrt(triggers_var)) - data["counts"] = (counts.T * u.ct).astype(get_min_uint(counts)) + data["counts"] = (counts.T * u.ct).astype(np.min_scalar_type(counts)) data.add_meta(name="counts", nix="NIX00278", packets=packets) data["counts_comp_err"] = np.float32(np.sqrt(counts_var).T * u.ct) @@ -438,10 +437,10 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data.add_meta(name="timedel", nix="NIX00405", packets=packets) data["detector_index"] = detector_index.reshape(-1, 32).astype(np.ubyte) data.add_meta(name="detector_index", nix="NIX00100", packets=packets) - data["spectra"] = (counts.reshape(-1, 32, num_energies) * u.ct).astype(get_min_uint(counts)) + data["spectra"] = (counts.reshape(-1, 32, num_energies) * u.ct).astype(np.min_scalar_type(counts)) data["spectra"].meta = {"NIXS": "NIX00452", "PCF_CURTX": packets.get("NIX00452")[0].idb_info.PCF_CURTX} data["spectra_comp_err"] = np.float32(np.sqrt(counts_var.reshape(-1, 32, num_energies))) - data["triggers"] = triggers.reshape(-1, num_energies).astype(get_min_uint(triggers)) + data["triggers"] = triggers.reshape(-1, num_energies).astype(np.min_scalar_type(triggers)) data.add_meta(name="triggers", nix="NIX00484", packets=packets) data["triggers_comp_err"] = np.float32(np.sqrt(triggers_var.reshape(-1, num_energies))) data["num_integrations"] = num_integrations.reshape(-1, num_energies).astype(np.ubyte)[:, 0] @@ -556,7 +555,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data["timedel"] = duration data.add_meta(name="timedel", nix="NIX00405", packets=packets) data["control_index"] = control_indices - data["variance"] = variance.astype(get_min_uint(variance)) + data["variance"] = variance.astype(np.min_scalar_type(variance)) data.add_meta(name="variance", nix="NIX00281", packets=packets) data["variance_comp_err"] = np.float32(np.sqrt(variance_var)) @@ -801,7 +800,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data["timedel"] = duration[unique_time_indices] data.add_meta(name="timedel", nix="NIX00122", packets=packets) - data["counts"] = (full_counts * u.ct).astype(get_min_uint(full_counts)) + data["counts"] = (full_counts * u.ct).astype(np.min_scalar_type(full_counts)) data.add_meta(name="counts", nix="NIX00158", packets=packets) data["counts_comp_err"] = (np.sqrt(full_counts_var) * u.ct).astype(np.float32) data["control_index"] = np.arange(len(control)).astype(np.uint16) @@ -849,7 +848,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): control = Control() control["scet_coarse"] = packets.get("scet_coarse") control["scet_fine"] = packets.get("scet_fine") - control["index"] = np.arange(len(control)).astype(get_min_uint(len(control))) + control["index"] = np.arange(len(control)).astype(np.min_scalar_type(len(control))) # When the packets are parsed empty packets are dropped but in LB we don't parse so this # is not known need to compare control and levelb.control and only use matching rows diff --git a/stixcore/products/level0/scienceL0.py b/stixcore/products/level0/scienceL0.py index ff5f81e2..d6e0349e 100644 --- a/stixcore/products/level0/scienceL0.py +++ b/stixcore/products/level0/scienceL0.py @@ -11,7 +11,6 @@ _get_compression_scheme, _get_detector_mask, _get_pixel_mask, - get_min_uint, unscale_triggers, ) from stixcore.products.product import ( @@ -199,7 +198,7 @@ def from_levelb(cls, levelb, *, parent="", keep_parse_tree=True): control["index"] = np.ubyte(0) packet_ids = levelb.control["packet"].reshape(1, -1) - control["packet"] = packet_ids.astype(get_min_uint(packet_ids)) + control["packet"] = packet_ids.astype(np.min_scalar_type(packet_ids)) control["raw_file"] = np.unique(levelb.control["raw_file"]).reshape(1, -1) control["parent"] = parent @@ -245,7 +244,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data.add_data("pixel_masks", _get_pixel_mask(packets, "NIXD0407")) data.add_data("detector_masks", _get_detector_mask(packets)) triggers = np.array([packets.get_value(f"NIX00{i}") for i in range(408, 424)]).T - data["triggers"] = triggers.astype(get_min_uint(triggers)) + data["triggers"] = triggers.astype(np.min_scalar_type(triggers)) data["triggers"].meta = {"NIXS": [f"NIX00{i}" for i in range(408, 424)]} data.add_basic(name="num_samples", nix="NIX00406", packets=packets, dtype=np.uint16) @@ -302,7 +301,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data = data[sub_index] data["time"] = control["time_stamp"][0] + data["start_time"] + data["integration_time"] / 2 data["timedel"] = SCETimeDelta(data["integration_time"]) - data["counts"] = (counts * u.ct).astype(get_min_uint(counts)) + data["counts"] = (counts * u.ct).astype(np.min_scalar_type(counts)) # data.add_meta(name='counts', nix='NIX00065', packets=packets) data["control_index"] = control["index"][0] @@ -416,7 +415,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): factor=factor, ) - data["triggers"] = triggers.T.astype(get_min_uint(triggers)) + data["triggers"] = triggers.T.astype(np.min_scalar_type(triggers)) data["triggers"].meta = {"NIXS": [f"NIX00{i}" for i in range(242, 258)]} data["triggers_comp_err"] = np.float32(np.sqrt(triggers_var).T) # data.add_basic(name='num_energy_groups', nix='NIX00258', packets=packets, dtype=np.ubyte) @@ -565,7 +564,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data["time"] = control["time_stamp"][0] + data["delta_time"] + data["integration_time"] / 2 data["timedel"] = data["integration_time"] - data["counts"] = (counts * u.ct).astype(get_min_uint(counts)) + data["counts"] = (counts * u.ct).astype(np.min_scalar_type(counts)) data.add_meta(name="counts", nix="NIX00260", packets=packets) data["counts_comp_err"] = np.float32(counts_var * u.ct) data["control_index"] = control["index"][0] @@ -875,7 +874,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data["timedel"] = deltas data["timedel"].meta = {"NIXS": ["NIX00441", "NIX00269"]} - data["triggers"] = triggers.astype(get_min_uint(triggers)) + data["triggers"] = triggers.astype(np.min_scalar_type(triggers)) data.add_meta(name="triggers", nix="NIX00267", packets=packets) data["triggers_comp_err"] = np.float32(np.sqrt(triggers_var)) data.add_meta(name="triggers_comp_err", nix="NIX00267", packets=packets) @@ -885,7 +884,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): data["pixel_masks"] = pixel_masks data.add_meta(name="pixel_masks", nix="NIXD0407", packets=packets) - data["counts"] = (counts * u.ct).astype(get_min_uint(counts)) + data["counts"] = (counts * u.ct).astype(np.min_scalar_type(counts)) data.add_meta(name="counts", nix="NIX00268", packets=packets) data["counts_comp_err"] = np.float32(np.sqrt(counts_var) * u.ct) data["control_index"] = np.ubyte(0) @@ -957,7 +956,7 @@ def from_levelb(cls, levelb, parent="", keep_parse_tree=True): control["packet"] = levelb.control["packet"].reshape(1, -1) control["parent"] = parent - control["index"] = np.arange(len(control)).astype(get_min_uint(len(control))) + control["index"] = np.arange(len(control)).astype(np.min_scalar_type(len(control))) delta_time = ((control["summing_value"] * control["averaging_value"]) / 1000.0) * u.s samples = packets.get_value("NIX00089") diff --git a/stixcore/products/product.py b/stixcore/products/product.py index 645bcf2d..b810f910 100644 --- a/stixcore/products/product.py +++ b/stixcore/products/product.py @@ -38,7 +38,7 @@ from collections import defaultdict -from stixcore.products.common import _get_energies_from_mask, get_min_uint +from stixcore.products.common import _get_energies_from_mask from stixcore.util.logging import get_logger logger = get_logger(__name__) @@ -420,7 +420,7 @@ def from_packets(cls, packets, NIX00405_offset=0): control["integration_time"] = np.zeros_like(control["scet_coarse"], float) * u.s # control = unique(control) - control["index"] = np.arange(len(control)).astype(get_min_uint(len(control))) + control["index"] = np.arange(len(control)).astype(np.min_scalar_type(len(control))) return control @@ -669,6 +669,8 @@ def __add__(self, other): # update the control index in data to a new unique sequence newids = dict() + data["control_index"] = data["control_index"].astype(np.min_scalar_type(len(control))) + for row in data: oid = row["old_index"] @@ -1027,7 +1029,7 @@ def from_levelb(cls, levelb, parent, keep_parse_tree=True): control["scet_coarse"] = packets.get("scet_coarse") control["scet_fine"] = packets.get("scet_fine") control["integration_time"] = 0 - control["index"] = np.arange(len(control)).astype(get_min_uint(len(control))) + control["index"] = np.arange(len(control)).astype(np.min_scalar_type(len(control))) control["raw_file"] = levelb.control["raw_file"] control["packet"] = levelb.control["packet"] @@ -1049,7 +1051,7 @@ def from_levelb(cls, levelb, parent, keep_parse_tree=True): name = param.idb_info.get_product_attribute_name() data.add_basic(name=name, nix=nix, attr="value", packets=packets, reshape=reshape) - data["control_index"] = np.arange(len(control)).astype(get_min_uint(len(control))) + data["control_index"] = np.arange(len(control)).astype(np.min_scalar_type(len(control))) return cls( service_type=packets.service_type,