logger.info(f"Skipping statistic with timestamp: {timestamp}. Only writing 1/{self.decimation_factor} statistics")
logger.debug(f"Skipping statistic with timestamp: {timestamp}. Only writing 1/{self.decimation_factor} statistics")
# increment even though its skipped
# increment even though its skipped
self.statistics_counter+=1
self.statistics_counter+=1
...
@@ -109,7 +108,7 @@ class hdf5_writer:
...
@@ -109,7 +108,7 @@ class hdf5_writer:
# received new statistic, so increment counter
# received new statistic, so increment counter
self.statistics_counter+=1
self.statistics_counter+=1
logger.info(f"starting new matrix with timestamp: {timestamp}")
logger.debug(f"starting new matrix with timestamp: {timestamp}")
# write the finished (and checks if its the first matrix)
# write the finished (and checks if its the first matrix)
ifself.current_matrixisnotNone:
ifself.current_matrixisnotNone:
...
@@ -128,7 +127,7 @@ class hdf5_writer:
...
@@ -128,7 +127,7 @@ class hdf5_writer:
self.statistics_header=None
self.statistics_header=None
defwrite_matrix(self):
defwrite_matrix(self):
logger.info("writing matrix to file")
logger.debug("writing matrix to file")
"""
"""
Writes the finished matrix to the hdf5 file
Writes the finished matrix to the hdf5 file
"""
"""
...
@@ -170,7 +169,6 @@ class hdf5_writer:
...
@@ -170,7 +169,6 @@ class hdf5_writer:
defprocess_packet(self,packet):
defprocess_packet(self,packet):
logger.debug(f"Processing packet")
"""
"""
Adds the newly received statistics packet to the statistics matrix
Adds the newly received statistics packet to the statistics matrix
"""
"""
...
@@ -238,5 +236,7 @@ class hdf5_writer:
...
@@ -238,5 +236,7 @@ class hdf5_writer:
try:
try:
self.write_matrix()
self.write_matrix()
finally:
finally:
filename=str(self.file)
self.file.close()
self.file.close()
logger.debug(f"{self.file} closed")
logger.debug(f"{filename} closed")
logger.debug(f"Received a total of {self.statistics_counter} statistics while running. With {int(self.statistics_counter/self.decimation_factor)} written to disk ")