Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
S
scintillation_pipeline
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Iterations
Wiki
Requirements
Jira
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
LDV
scintillation_pipeline
Commits
e582fb56
Commit
e582fb56
authored
4 months ago
by
Maaijke Mevius
Browse files
Options
Downloads
Patches
Plain Diff
automaticly find correct beam for IDOLS lik data
parent
15c42cef
No related branches found
No related tags found
No related merge requests found
Pipeline
#117587
passed
4 months ago
Stage: build
Stage: test
Changes
1
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
scintillation/averaging.py
+20
-19
20 additions, 19 deletions
scintillation/averaging.py
with
20 additions
and
19 deletions
scintillation/averaging.py
+
20
−
19
View file @
e582fb56
...
@@ -165,7 +165,7 @@ def copy_attrs_to_dict(h5_leaf, dict_container: Optional[Dict] = None,
...
@@ -165,7 +165,7 @@ def copy_attrs_to_dict(h5_leaf, dict_container: Optional[Dict] = None,
def
parse_datetime_str
(
datetime_str
):
def
parse_datetime_str
(
datetime_str
):
return
Time
(
datetime_str
.
split
(
'
'
)[
0
],
format
=
'
isot
'
,
scale
=
'
utc
'
).
to_datetime
()
return
Time
(
datetime_str
.
split
(
'
'
)[
0
],
format
=
'
isot
'
,
scale
=
'
utc
'
).
to_datetime
()
.
replace
(
tzinfo
=
timezone
.
utc
)
def
extract_root_metadata
(
dataset
):
def
extract_root_metadata
(
dataset
):
...
@@ -527,8 +527,8 @@ def make_plot(data_array, time_axis, frequency_axis, station_name, plot_full_pat
...
@@ -527,8 +527,8 @@ def make_plot(data_array, time_axis, frequency_axis, station_name, plot_full_pat
label
=
"
Dynspec
"
):
label
=
"
Dynspec
"
):
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
ax
=
plt
.
gca
()
ax
=
plt
.
gca
()
start_time
=
datetime
.
fromtimestamp
(
time_axis
[
0
]).
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
start_time
=
datetime
.
utc
fromtimestamp
(
time_axis
[
0
]).
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
datetime_axis
=
[
datetime
.
fromtimestamp
(
time_s
)
for
time_s
in
time_axis
]
datetime_axis
=
[
datetime
.
utc
fromtimestamp
(
time_s
)
for
time_s
in
time_axis
]
times
=
mdates
.
date2num
(
datetime_axis
)
times
=
mdates
.
date2num
(
datetime_axis
)
title
=
'
{label} {station_name} - {start_time}
'
.
format
(
label
=
label
,
title
=
'
{label} {station_name} - {start_time}
'
.
format
(
label
=
label
,
...
@@ -565,8 +565,8 @@ def make_S4plot(data_array, time_axis, frequency_axis, station_name, plot_full_p
...
@@ -565,8 +565,8 @@ def make_S4plot(data_array, time_axis, frequency_axis, station_name, plot_full_p
label
=
"
S4
"
):
label
=
"
S4
"
):
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
ax
=
plt
.
gca
()
ax
=
plt
.
gca
()
start_time
=
datetime
.
fromtimestamp
(
time_axis
[
0
]).
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
start_time
=
datetime
.
utc
fromtimestamp
(
time_axis
[
0
]).
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
datetime_axis
=
[
datetime
.
fromtimestamp
(
time_s
)
for
time_s
in
time_axis
]
datetime_axis
=
[
datetime
.
utc
fromtimestamp
(
time_s
)
for
time_s
in
time_axis
]
times
=
mdates
.
date2num
(
datetime_axis
)
times
=
mdates
.
date2num
(
datetime_axis
)
title
=
'
{label} {station_name} - {start_time}
'
.
format
(
label
=
label
,
title
=
'
{label} {station_name} - {start_time}
'
.
format
(
label
=
label
,
...
@@ -601,7 +601,7 @@ def make_S4plot(data_array, time_axis, frequency_axis, station_name, plot_full_p
...
@@ -601,7 +601,7 @@ def make_S4plot(data_array, time_axis, frequency_axis, station_name, plot_full_p
def
make_cross_corr_plots
(
plot_full_path
,
cross_corr
,
uvpp
,
time
,
vel
):
def
make_cross_corr_plots
(
plot_full_path
,
cross_corr
,
uvpp
,
time
,
vel
):
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
ax
=
plt
.
gca
()
ax
=
plt
.
gca
()
#start_time = datetime.fromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
#start_time = datetime.
utc
fromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
start_time
=
time
.
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
start_time
=
time
.
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
label
=
"
Cross correlation (at delay 0)
"
label
=
"
Cross correlation (at delay 0)
"
...
@@ -627,7 +627,7 @@ def make_cross_corr_plots(plot_full_path, cross_corr, uvpp, time,vel):
...
@@ -627,7 +627,7 @@ def make_cross_corr_plots(plot_full_path, cross_corr, uvpp, time,vel):
def
make_delay_plots
(
plot_full_path
,
delays
,
uvpp
,
time
,
vel
):
def
make_delay_plots
(
plot_full_path
,
delays
,
uvpp
,
time
,
vel
):
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
fig
=
plt
.
figure
(
figsize
=
(
6
,
4
),
dpi
=
120
)
ax
=
plt
.
gca
()
ax
=
plt
.
gca
()
#start_time = datetime.fromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
#start_time = datetime.
utc
fromtimestamp(time).strftime("%Y/%m/%d %H:%M:%S")
start_time
=
time
.
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
start_time
=
time
.
strftime
(
"
%Y/%m/%d %H:%M:%S
"
)
label
=
"
Delay
"
label
=
"
Delay
"
...
@@ -679,11 +679,11 @@ def create_averaged_dataset(sample_info, data_array, flags=None, bandpass=None):
...
@@ -679,11 +679,11 @@ def create_averaged_dataset(sample_info, data_array, flags=None, bandpass=None):
def
round_up_datetime
(
datet
,
interval
):
def
round_up_datetime
(
datet
,
interval
):
return
datetime
.
fromtimestamp
(
numpy
.
ceil
(
datet
.
timestamp
()
/
interval
)
*
interval
)
return
datetime
.
utc
fromtimestamp
(
numpy
.
ceil
(
datet
.
timestamp
()
/
interval
)
*
interval
)
.
replace
(
tzinfo
=
timezone
.
utc
)
def
round_down_datetime
(
datet
,
interval
):
def
round_down_datetime
(
datet
,
interval
):
return
datetime
.
fromtimestamp
(
numpy
.
floor
(
datet
.
timestamp
()
/
interval
)
*
interval
)
return
datetime
.
utc
fromtimestamp
(
numpy
.
floor
(
datet
.
timestamp
()
/
interval
)
*
interval
)
.
replace
(
tzinfo
=
timezone
.
utc
)
def
split_samples
(
dynspec_name
,
def
split_samples
(
dynspec_name
,
...
@@ -710,11 +710,11 @@ def split_samples(dynspec_name,
...
@@ -710,11 +710,11 @@ def split_samples(dynspec_name,
time_delta
=
metadata
[
'
SAMPLING_TIME
'
]
time_delta
=
metadata
[
'
SAMPLING_TIME
'
]
if
'
DYNSPEC_START_UTC
'
in
metadata
:
if
'
DYNSPEC_START_UTC
'
in
metadata
:
obs_start_time
=
parse_datetime_str
(
metadata
[
'
DYNSPEC_START_UTC
'
])
obs_start_time
=
parse_datetime_str
(
metadata
[
'
DYNSPEC_START_UTC
'
])
.
replace
(
tzinfo
=
timezone
.
utc
)
obs_end_time
=
parse_datetime_str
(
metadata
[
'
DYNSPEC_STOP_UTC
'
])
obs_end_time
=
parse_datetime_str
(
metadata
[
'
DYNSPEC_STOP_UTC
'
])
.
replace
(
tzinfo
=
timezone
.
utc
)
else
:
else
:
obs_start_time
=
parse_datetime_str
(
metadata
[
'
OBSERVATION_START_UTC
'
])
obs_start_time
=
parse_datetime_str
(
metadata
[
'
OBSERVATION_START_UTC
'
])
.
replace
(
tzinfo
=
timezone
.
utc
)
obs_end_time
=
parse_datetime_str
(
metadata
[
'
OBSERVATION_END_UTC
'
])
obs_end_time
=
parse_datetime_str
(
metadata
[
'
OBSERVATION_END_UTC
'
])
.
replace
(
tzinfo
=
timezone
.
utc
)
if
'
AXIS_VALUE_WORLD
'
in
metadata
[
'
SPECTRAL
'
]:
if
'
AXIS_VALUE_WORLD
'
in
metadata
[
'
SPECTRAL
'
]:
frequency
=
metadata
[
'
SPECTRAL
'
][
'
AXIS_VALUE_WORLD
'
]
frequency
=
metadata
[
'
SPECTRAL
'
][
'
AXIS_VALUE_WORLD
'
]
else
:
else
:
...
@@ -732,9 +732,10 @@ def split_samples(dynspec_name,
...
@@ -732,9 +732,10 @@ def split_samples(dynspec_name,
data_array
=
dataset
[
dynspec_name
][
'
DATA
'
]
data_array
=
dataset
[
dynspec_name
][
'
DATA
'
]
nofch
=
1
#TODO check if this is always the case for DYNSPEC data
nofch
=
1
#TODO check if this is always the case for DYNSPEC data
else
:
else
:
data_array
=
dataset
[
dynspec_name
][
'
BEAM_000
'
][
'
STOKES_0
'
]
beam_name
=
[
ibeam
for
ibeam
in
dataset
[
dynspec_name
].
keys
()
if
"
BEAM
"
in
ibeam
][
0
]
data_array
=
dataset
[
dynspec_name
][
beam_name
][
'
STOKES_0
'
]
#take median over channels for raw data
#take median over channels for raw data
nofch
=
dataset
[
dynspec_name
][
'
BEAM_000
'
][
'
STOKES_0
'
].
attrs
[
'
NOF_CHANNELS
'
][
0
]
nofch
=
dataset
[
dynspec_name
][
beam_name
][
'
STOKES_0
'
].
attrs
[
'
NOF_CHANNELS
'
][
0
]
averaging_window_in_samples
=
int
(
numpy
.
ceil
(
averaging_window
/
time_delta
))
averaging_window_in_samples
=
int
(
numpy
.
ceil
(
averaging_window
/
time_delta
))
averaging_window_in_seconds
=
averaging_window_in_samples
*
time_delta
averaging_window_in_seconds
=
averaging_window_in_samples
*
time_delta
S4_60s_window_in_samples
=
int
(
60.
/
time_delta
)
S4_60s_window_in_samples
=
int
(
60.
/
time_delta
)
...
@@ -772,8 +773,8 @@ def split_samples(dynspec_name,
...
@@ -772,8 +773,8 @@ def split_samples(dynspec_name,
'
average_window_samples
'
:
averaging_window_in_samples
,
'
average_window_samples
'
:
averaging_window_in_samples
,
'
average_window_seconds
'
:
averaging_window_in_seconds
,
'
average_window_seconds
'
:
averaging_window_in_seconds
,
'
sample_time_samples
'
:
output_time_samples
,
'
sample_time_samples
'
:
output_time_samples
,
'
sample_start_datetime
'
:
datetime
.
fromtimestamp
(
time_obs
[
start_index
]),
'
sample_start_datetime
'
:
datetime
.
utc
fromtimestamp
(
time_obs
[
start_index
])
.
replace
(
tzinfo
=
timezone
.
utc
)
,
'
sample_end_datetime
'
:
datetime
.
fromtimestamp
(
time_obs
[
end_index
]),
'
sample_end_datetime
'
:
datetime
.
utc
fromtimestamp
(
time_obs
[
end_index
])
.
replace
(
tzinfo
=
timezone
.
utc
)
,
'
n_time_samples
'
:
len
(
indexs
),
'
n_time_samples
'
:
len
(
indexs
),
'
sample_start_frequency
'
:
start_frequency
,
'
sample_start_frequency
'
:
start_frequency
,
'
sample_end_frequency
'
:
end_frequency
}
'
sample_end_frequency
'
:
end_frequency
}
...
@@ -909,8 +910,8 @@ def get_velocities(metadata,
...
@@ -909,8 +910,8 @@ def get_velocities(metadata,
'
average_window_samples
'
:
averaging_window_in_samples
,
'
average_window_samples
'
:
averaging_window_in_samples
,
'
average_window_seconds
'
:
averaging_window_in_seconds
,
'
average_window_seconds
'
:
averaging_window_in_seconds
,
'
sample_time_samples
'
:
output_time_samples
,
'
sample_time_samples
'
:
output_time_samples
,
'
sample_start_datetime
'
:
datetime
.
fromtimestamp
(
time_obs
[
start_index
]),
'
sample_start_datetime
'
:
datetime
.
utc
fromtimestamp
(
time_obs
[
start_index
])
.
replace
(
tzinfo
=
timezone
.
utc
)
,
'
sample_end_datetime
'
:
datetime
.
fromtimestamp
(
time_obs
[
end_index
]),
'
sample_end_datetime
'
:
datetime
.
utc
fromtimestamp
(
time_obs
[
end_index
])
.
replace
(
tzinfo
=
timezone
.
utc
)
,
'
n_time_samples
'
:
len
(
indexs
),
'
n_time_samples
'
:
len
(
indexs
),
'
sample_start_frequency
'
:
start_frequency
,
'
sample_start_frequency
'
:
start_frequency
,
'
sample_end_frequency
'
:
end_frequency
,
'
sample_end_frequency
'
:
end_frequency
,
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment