Skip to content
Snippets Groups Projects
Commit 5cf84939 authored by Jan David Mol's avatar Jan David Mol
Browse files

L2SS-2120: Use static IP range for statistics record jobs

parent f8d7e98d
No related branches found
Tags v0.0.23
1 merge request!21L2SS-2120: Use static IP range for statistics record jobs
stingray:
lba:
sst: 5101
xst: 5102
bst: 5103
sst: 10.99.76.1/16
xst: 10.99.76.2/16
bst: 10.99.76.3/16
hba0:
sst: 5111
xst: 5112
bst: 5113
sst: 10.99.76.4/16
xst: 10.99.76.5/16
bst: 10.99.76.6/16
hba1:
sst: 5121
xst: 5122
bst: 5123
sst: 10.99.76.7/16
xst: 10.99.76.8/16
bst: 10.99.76.9/16
stingray:
lba:
sst: 5101
xst: 5102
bst: 5103
sst: 10.99.76.1/16
xst: 10.99.76.2/16
bst: 10.99.76.3/16
hba:
sst: 5111
xst: 5112
bst: 5113
sst: 10.99.76.4/16
xst: 10.99.76.5/16
bst: 10.99.76.6/16
......@@ -43,12 +43,19 @@ job "statistics" {
}
[[ range $af, $fields := $.stingray ]]
[[ range $st, $port := $fields ]]
[[ range $st, $ip := $fields ]]
group "stingray-[[ $af ]]-[[ $st ]]" {
count = 1
network {
mode = "cni/station"
mode = "cni/statistics"
cni {
args {
IP = "[[ $ip ]]",
GATEWAY = "10.99.250.250"
}
}
}
service {
......@@ -66,7 +73,7 @@ job "statistics" {
service {
name = "stingray-[[ $af ]]-[[ $st ]]-udp"
port = [[ $port ]]
port = 5001
address_mode = "alloc"
}
......@@ -95,7 +102,7 @@ job "statistics" {
"[[ $.station ]]",
"[[ $af ]]",
"[[ $st ]]",
"udp://0.0.0.0:[[ $port ]]",
"udp://0.0.0.0:5001",
"--port=6001"
]
}
......
......@@ -83,21 +83,24 @@ def main(argv=None):
with streams.create(args.destination, True, minio_client) as writer:
with streams.create(args.source, False, minio_client) as reader:
if args.datatype == "packet":
for packet in reader:
writer.put_packet(packet)
metric_nr_packets_processed.inc()
metric_nr_bytes_read.set(reader.num_bytes_read)
metric_nr_bytes_written.set(writer.num_bytes_written)
elif args.datatype == "json":
while data := reader.get_json():
writer.put_json(data)
metric_nr_packets_processed.inc()
metric_nr_bytes_read.set(reader.num_bytes_read)
metric_nr_bytes_written.set(writer.num_bytes_written)
try:
if args.datatype == "packet":
for packet in reader:
writer.put_packet(packet)
metric_nr_packets_processed.inc()
metric_nr_bytes_read.set(reader.num_bytes_read)
metric_nr_bytes_written.set(writer.num_bytes_written)
elif args.datatype == "json":
while data := reader.get_json():
writer.put_json(data)
metric_nr_packets_processed.inc()
metric_nr_bytes_read.set(reader.num_bytes_read)
metric_nr_bytes_written.set(writer.num_bytes_written)
except Exception: # pylint: disable=broad-exception-caught
logger.exception("Caught exception while forwarding packets")
logger.info("End of packet stream. Shutting down.")
......
......@@ -229,10 +229,12 @@ def main(argv=None):
zmq_url = f"tcp://*:{args.port}"
topic = f"{args.type}/{args.antenna_field}/{args.station}"
logger.info("Publishing on %s with topic %s", zmq_url, topic)
with ZeroMQPublisher(zmq_url, [topic]) as publisher:
logger.info("Waiting for publisher to start...")
while not publisher.is_running:
time.sleep(1)
logger.info("Publisher started")
collector = CollectPacketsPerTimestamp()
......@@ -246,15 +248,18 @@ def main(argv=None):
if send_message(publisher, message):
metric_nr_messages_published.inc()
# process stream
with streams.create(args.source) as stream:
for packet in read_packets(stream, metric_labels):
for packets_of_same_timestamp in collector.put_packet(packet):
process_packets(packets_of_same_timestamp)
# process remainder
for packets_of_same_timestamp in collector.done():
process_packets(packets_of_same_timestamp)
try:
# process stream
with streams.create(args.source) as stream:
for packet in read_packets(stream, metric_labels):
for packets_of_same_timestamp in collector.put_packet(packet):
process_packets(packets_of_same_timestamp)
# process remainder
for packets_of_same_timestamp in collector.done():
process_packets(packets_of_same_timestamp)
except Exception: # pylint: disable=broad-exception-caught
logger.exception("Caught exception while processing packets")
logger.info("End of packet stream. Shutting down.")
......
......@@ -3,7 +3,6 @@
"""Implements a storage class to write text data to a S3 backend in blocks"""
import asyncio
import io
import logging
from datetime import datetime, timezone, timedelta
......@@ -58,7 +57,7 @@ class Storage:
def __exit__(self, *args):
if self.current_block:
block = self.current_block
asyncio.run(self._complete_current_block(block))
self._complete_current_block(block)
self.current_block = None
def _init_bucket(self):
......@@ -79,7 +78,7 @@ class Storage:
),
)
async def _complete_current_block(self, block):
def _complete_current_block(self, block):
block.seek(io.SEEK_SET, 0)
timestamp = datetime.now(timezone.utc)
size = len(block.getvalue())
......@@ -107,7 +106,7 @@ class Storage:
logger.debug("Current block is expired, complete block and start new")
block = self.current_block
self.current_block = None
asyncio.run(self._complete_current_block(block))
self._complete_current_block(block)
self.current_block = Block(self.duration)
data = line.encode() + b"\n"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment