diff --git a/docker-compose/logstash/README.md b/docker-compose/logstash/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..156aaae85a210ee963a83baadc39ccdadbbef756
--- /dev/null
+++ b/docker-compose/logstash/README.md
@@ -0,0 +1,41 @@
+# Logstash
+
+Grafana Loki has a Logstash output plugin called logstash-output-loki that enables shipping logs to a Loki instance
+
+## Usage and configuration
+
+To configure Logstash to forward logs to Loki, simply add the loki output to your Logstash configuration file as documented below:
+
+    output {
+        loki {
+            [url => "" | default = none | required=true]
+
+            [tenant_id => string | default = nil | required=false]
+
+            [message_field => string | default = "message" | required=false]
+            
+            [include_fields => array | default = [] | required=false]
+
+            [batch_wait => number | default = 1(s) | required=false]
+
+            [batch_size => number | default = 102400(bytes) | required=false]
+
+            [min_delay => number | default = 1(s) | required=false]
+
+            [max_delay => number | default = 300(s) | required=false]
+
+            [retries => number | default = 10 | required=false]
+
+            [username => string | default = nil | required=false]
+
+            [password => secret | default = nil | required=false]
+
+            [cert => path | default = nil | required=false]
+
+            [key => path | default = nil| required=false]
+
+            [ca_cert => path | default = nil | required=false]
+
+            [insecure_skip_verify => boolean | default = false | required=false]
+        }
+    }
diff --git a/docker-compose/logstash/logstash/conf.d/30-output.conf b/docker-compose/logstash/logstash/conf.d/30-output.conf
index 9bad161d264c5c42268f9d94d40c1a39d5cae691..7ade7b0f01d59e5cbaf63f814043a2914759d277 100644
--- a/docker-compose/logstash/logstash/conf.d/30-output.conf
+++ b/docker-compose/logstash/logstash/conf.d/30-output.conf
@@ -5,6 +5,6 @@ output {
   #   index => "logstash-%{+YYYY.MM.dd}"
   # }
   loki {
-    url => "http://localhost:3100/loki/api/v1/push"
+    url => "http://loki:3100/loki/api/v1/push"
   }
 }
diff --git a/docker-compose/logstash/loki.conf b/docker-compose/logstash/loki.conf
new file mode 100644
index 0000000000000000000000000000000000000000..31d534391a957f3e3dea7c5e6cad54aa8e614bf3
--- /dev/null
+++ b/docker-compose/logstash/loki.conf
@@ -0,0 +1,152 @@
+input {
+  beats {
+    port => 5044
+    ssl => true
+    ssl_certificate => "/etc/pki/tls/certs/logstash-beats.crt"
+    ssl_key => "/etc/pki/tls/private/logstash-beats.key"
+  }
+}
+
+input {
+  syslog {
+    port => 1514
+  }
+}
+
+input {
+  tcp {
+    port => 5959
+    codec => json
+  }
+}
+
+filter {
+  if [type] == "syslog" {
+    grok {
+      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
+      add_field => [ "received_at", "%{@timestamp}" ]
+      add_field => [ "received_from", "%{host}" ]
+    }
+    syslog_pri { }
+    date {
+      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
+    }
+  }
+}
+
+filter {
+  if [type] == "nginx-access" {
+    grok {
+      match => { "message" => "%{NGINXACCESS}" }
+    }
+  }
+}
+
+filter {
+  if [program] == "grafana" {
+    kv { }
+    mutate {
+      rename => {
+        "t" => "timestamp"
+        "lvl" => "level"
+        "msg" => "message"
+      }
+      uppercase => [ "level" ]
+    }
+    date {
+      match => [ "timestamp", "ISO8601" ]
+    }
+  }
+}
+
+filter {
+  if [program] == "prometheus" {
+    kv { }
+    mutate {
+      rename => {
+        "ts" => "timestamp"
+        "msg" => "message"
+      }
+      uppercase => [ "level" ]
+    }
+    date {
+      match => [ "timestamp", "ISO8601" ]
+    }
+  }
+}
+
+filter {
+  if [program] == "prometheus" {
+    kv { }
+    mutate {
+      rename => {
+        "ts" => "timestamp"
+        "msg" => "message"
+      }
+      uppercase => [ "level" ]
+    }
+    date {
+      match => [ "timestamp", "ISO8601" ]
+    }
+  }
+}
+
+filter {
+  if [program] == "tango-rest" {
+    grok {
+      match => {
+        "message" => "%{TIMESTAMP_ISO8601:timestamp} %{WORD:level} %{GREEDYDATA:message}"
+      }
+      "overwrite" => [ "timestamp", "level", "message" ]
+    }
+    date {
+      match => [ "timestamp", "YYYY-MM-dd HH:mm:ss,SSS" ]
+      timezone => "UTC"
+    }
+  }
+}
+
+filter {
+  # mark all our mariadb instances
+  grok {
+    match => {
+      "program" => [ "archiver-maria-db", "tangodb" ]
+    }
+    add_tag => [ "mariadb" ]
+  }
+
+  # parse mariadb output
+  if "mariadb" in [tags] {
+    grok {
+      match => {
+        "message" => [
+          "%{TIMESTAMP_ISO8601:timestamp} .%{WORD:level}. %{GREEDYDATA:message}",
+          "%{TIMESTAMP_ISO8601:timestamp} 0 .%{WORD:level}. %{GREEDYDATA:message}"
+        ]
+      }
+      "overwrite" => [ "timestamp", "level", "message" ]
+    }
+    mutate {
+      gsub => [
+        "level", "Note", "Info"
+      ]
+      uppercase => [ "level" ]
+    }
+    date {
+      match => [ "timestamp", "YYYY-MM-dd HH:mm:ssZZ", "YYYY-MM-dd HH:mm:ss", "YYYY-MM-dd  H:mm:ss"  ]
+      timezone => "UTC"
+    }
+  }
+}
+
+output {
+  # elasticsearch {
+  #   hosts => ["localhost"]
+  #   manage_template => false
+  #   index => "logstash-%{+YYYY.MM.dd}"
+  # }
+  loki {
+    url => "http://loki:3100/loki/api/v1/push"
+  }
+}
+