From e582fb567db584254c1dd56b7e15510fda8c7a67 Mon Sep 17 00:00:00 2001
From: maaijke <mevius@astron.nl>
Date: Wed, 7 May 2025 15:17:38 +0200
Subject: [PATCH] automaticly find correct beam for IDOLS lik data

---
 scintillation/averaging.py | 39 +++++++++++++++++++-------------------
 1 file changed, 20 insertions(+), 19 deletions(-)

diff --git a/scintillation/averaging.py b/scintillation/averaging.py
index 247396d..d2c7836 100644
--- a/scintillation/averaging.py
+++ b/scintillation/averaging.py
@@ -165,7 +165,7 @@ def copy_attrs_to_dict(h5_leaf, dict_container: Optional[Dict] = None,
 
 
 def parse_datetime_str(datetime_str):
-    return Time(datetime_str.split(' ')[0], format='isot', scale='utc').to_datetime()
+    return Time(datetime_str.split(' ')[0], format='isot', scale='utc').to_datetime().replace(tzinfo=timezone.utc)
 
 
 def extract_root_metadata(dataset):
@@ -527,8 +527,8 @@ def make_plot(data_array, time_axis, frequency_axis, station_name, plot_full_pat
               label="Dynspec"):
     fig = plt.figure(figsize=(6, 4), dpi=120)
     ax = plt.gca()
-    start_time = datetime.fromtimestamp(time_axis[0]).strftime("%Y/%m/%d %H:%M:%S")
-    datetime_axis = [datetime.fromtimestamp(time_s) for time_s in time_axis]
+    start_time = datetime.utcfromtimestamp(time_axis[0]).strftime("%Y/%m/%d %H:%M:%S")
+    datetime_axis = [datetime.utcfromtimestamp(time_s) for time_s in time_axis]
 
     times = mdates.date2num(datetime_axis)
     title = '{label} {station_name} - {start_time}'.format(label=label,
@@ -565,8 +565,8 @@ def make_S4plot(data_array, time_axis, frequency_axis, station_name, plot_full_p
                 label="S4"):
     fig = plt.figure(figsize=(6, 4), dpi=120)
     ax = plt.gca()
-    start_time = datetime.fromtimestamp(time_axis[0]).strftime("%Y/%m/%d %H:%M:%S")
-    datetime_axis = [datetime.fromtimestamp(time_s) for time_s in time_axis]
+    start_time = datetime.utcfromtimestamp(time_axis[0]).strftime("%Y/%m/%d %H:%M:%S")
+    datetime_axis = [datetime.utcfromtimestamp(time_s) for time_s in time_axis]
 
     times = mdates.date2num(datetime_axis)
     title = '{label} {station_name} - {start_time}'.format(label=label,
@@ -601,7 +601,7 @@ def make_S4plot(data_array, time_axis, frequency_axis, station_name, plot_full_p
 def make_cross_corr_plots(plot_full_path, cross_corr, uvpp, time,vel):
     fig = plt.figure(figsize=(6, 4), dpi=120)
     ax = plt.gca()
-    #start_time = datetime.fromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
+    #start_time = datetime.utcfromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
     start_time = time.strftime("%Y/%m/%d %H:%M:%S")
 
     label = "Cross correlation (at delay 0)"
@@ -627,7 +627,7 @@ def make_cross_corr_plots(plot_full_path, cross_corr, uvpp, time,vel):
 def make_delay_plots(plot_full_path, delays, uvpp, time,vel):
     fig = plt.figure(figsize=(6, 4), dpi=120)
     ax = plt.gca()
-    #start_time = datetime.fromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
+    #start_time = datetime.utcfromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
     start_time = time.strftime("%Y/%m/%d %H:%M:%S")
 
     label = "Delay"
@@ -679,11 +679,11 @@ def create_averaged_dataset(sample_info, data_array, flags=None, bandpass=None):
 
 
 def round_up_datetime(datet, interval):
-    return datetime.fromtimestamp(numpy.ceil(datet.timestamp() / interval) * interval)
+    return datetime.utcfromtimestamp(numpy.ceil(datet.timestamp() / interval) * interval).replace(tzinfo=timezone.utc)
 
 
 def round_down_datetime(datet, interval):
-    return datetime.fromtimestamp(numpy.floor(datet.timestamp() / interval) * interval)
+    return datetime.utcfromtimestamp(numpy.floor(datet.timestamp() / interval) * interval).replace(tzinfo=timezone.utc)
 
 
 def split_samples(dynspec_name,
@@ -710,11 +710,11 @@ def split_samples(dynspec_name,
         time_delta = metadata['SAMPLING_TIME']
 
     if 'DYNSPEC_START_UTC' in metadata:
-        obs_start_time = parse_datetime_str(metadata['DYNSPEC_START_UTC'])
-        obs_end_time = parse_datetime_str(metadata['DYNSPEC_STOP_UTC'])
+        obs_start_time = parse_datetime_str(metadata['DYNSPEC_START_UTC']).replace(tzinfo=timezone.utc)
+        obs_end_time = parse_datetime_str(metadata['DYNSPEC_STOP_UTC']).replace(tzinfo=timezone.utc)
     else:
-        obs_start_time = parse_datetime_str(metadata['OBSERVATION_START_UTC'])
-        obs_end_time = parse_datetime_str(metadata['OBSERVATION_END_UTC'])
+        obs_start_time = parse_datetime_str(metadata['OBSERVATION_START_UTC']).replace(tzinfo=timezone.utc)
+        obs_end_time = parse_datetime_str(metadata['OBSERVATION_END_UTC']).replace(tzinfo=timezone.utc)
     if 'AXIS_VALUE_WORLD' in metadata['SPECTRAL']:
         frequency = metadata['SPECTRAL']['AXIS_VALUE_WORLD']
     else:
@@ -732,9 +732,10 @@ def split_samples(dynspec_name,
         data_array = dataset[dynspec_name]['DATA']
         nofch = 1 #TODO check if this is always the case for DYNSPEC data
     else:
-        data_array = dataset[dynspec_name]['BEAM_000']['STOKES_0']
+        beam_name = [ibeam for ibeam in dataset[dynspec_name].keys() if "BEAM" in ibeam ][0]
+        data_array = dataset[dynspec_name][beam_name]['STOKES_0']
         #take median over channels for raw data
-        nofch = dataset[dynspec_name]['BEAM_000']['STOKES_0'].attrs['NOF_CHANNELS'][0]
+        nofch = dataset[dynspec_name][beam_name]['STOKES_0'].attrs['NOF_CHANNELS'][0]
     averaging_window_in_samples = int(numpy.ceil(averaging_window / time_delta))
     averaging_window_in_seconds = averaging_window_in_samples * time_delta
     S4_60s_window_in_samples = int(60. / time_delta)
@@ -772,8 +773,8 @@ def split_samples(dynspec_name,
             'average_window_samples': averaging_window_in_samples,
             'average_window_seconds': averaging_window_in_seconds,
             'sample_time_samples': output_time_samples,
-            'sample_start_datetime': datetime.fromtimestamp(time_obs[start_index]),
-            'sample_end_datetime': datetime.fromtimestamp(time_obs[end_index]),
+            'sample_start_datetime': datetime.utcfromtimestamp(time_obs[start_index]).replace(tzinfo=timezone.utc),
+            'sample_end_datetime': datetime.utcfromtimestamp(time_obs[end_index]).replace(tzinfo=timezone.utc),
             'n_time_samples': len(indexs),
             'sample_start_frequency': start_frequency,
             'sample_end_frequency': end_frequency}
@@ -909,8 +910,8 @@ def get_velocities(metadata,
             'average_window_samples': averaging_window_in_samples,
             'average_window_seconds': averaging_window_in_seconds,
             'sample_time_samples': output_time_samples,
-            'sample_start_datetime': datetime.fromtimestamp(time_obs[start_index]),
-            'sample_end_datetime': datetime.fromtimestamp(time_obs[end_index]),
+            'sample_start_datetime': datetime.utcfromtimestamp(time_obs[start_index]).replace(tzinfo=timezone.utc),
+            'sample_end_datetime': datetime.utcfromtimestamp(time_obs[end_index]).replace(tzinfo=timezone.utc),
             'n_time_samples': len(indexs),
             'sample_start_frequency': start_frequency,
             'sample_end_frequency': end_frequency,
-- 
GitLab