diff --git a/doc/source/logstash.rst b/doc/source/logstash.rst
index 83c4b75a17..50db5842fc 100644
--- a/doc/source/logstash.rst
+++ b/doc/source/logstash.rst
@@ -172,23 +172,23 @@ schema.
 
 The config file that tells Logstash how to do this flattening can be
 found at
-:cgit_file:`modules/openstack_project/templates/logstash/indexer.conf.erb`
+https://git.openstack.org/cgit/openstack-infra/logstash-filters/tree/filters/openstack-filters.conf
 
 This works via the tags that are associated with a given message.
 
 The tags in
-:cgit_file:`modules/openstack_project/templates/logstash/indexer.conf.erb`
+https://git.openstack.org/cgit/openstack-infra/logstash-filters/tree/filters/openstack-filters.conf
 are used to tell logstash how to parse a given file's messages, based
 on the file's message format.
 
 When adding a new file to be indexed to
 http://git.openstack.org/cgit/openstack-infra/project-config/tree/roles/submit-logstash-jobs/defaults/main.yaml
-at least one tag from the indexer.conf.erb file should be associated
+at least one tag from the openstack-filters.conf file should be associated
 with the new file.  One can expect to see '{%logmessage%}' instead of
 actual message data if indexing is not working properly.
 
 In the event a new file's format is not covered, a patch for
-:cgit_file:`modules/openstack_project/templates/logstash/indexer.conf.erb`
+https://git.openstack.org/cgit/openstack-infra/logstash-filters/tree/filters/openstack-filters.conf
 should be submitted with an appropriate parsing pattern.
 
 ElasticSearch
diff --git a/modules/openstack_project/templates/logstash/agent.conf.erb b/modules/openstack_project/templates/logstash/agent.conf.erb
deleted file mode 100644
index ba67e52504..0000000000
--- a/modules/openstack_project/templates/logstash/agent.conf.erb
+++ /dev/null
@@ -1,47 +0,0 @@
-input {
-  pipe {
-    command => "python3 /usr/local/bin/log-pusher.py -r -z tcp://jenkins.openstack.org:8888 -l http://logs.openstack.org -f console.html"
-    format => "json"
-    message_format => "%{event_message}"
-    tags => ["jenkins", "console"]
-    type => "jenkins_console"
-  }
-}
-
-# You can check grok patterns at http://grokdebug.herokuapp.com/
-filter {
-  grep {
-    type => "jenkins_console"
-    # Drop matches.
-    negate => true
-    match => ["@message", "^</?pre>$"]
-  }
-  multiline {
-    type => "jenkins_console"
-    negate => true
-    pattern => "^%{DATESTAMP} \|"
-    what => "previous"
-  }
-  grok {
-    type => "jenkins_console"
-    pattern => [ "^%{DATESTAMP:logdate} \| %{GREEDYDATA:logmessage}" ]
-    add_field => [ "received_at", "%{@timestamp}" ]
-  }
-  date {
-    type => "jenkins_console"
-    exclude_tags => "_grokparsefailure"
-    match => [ "logdate", "yyyy-MM-dd HH:mm:ss.SSS" ]
-  }
-  mutate {
-    type => "jenkins_console"
-    exclude_tags => "_grokparsefailure"
-    replace => [ "@message", "%{logmessage}" ]
-  }
-  mutate {
-    type => "jenkins_console"
-    exclude_tags => "_grokparsefailure"
-    remove => [ "logdate", "logmessage" ]
-  }
-}
-
-<%= scope.function_template(['openstack_project/logstash/redis-output.conf.erb']) %>
diff --git a/modules/openstack_project/templates/logstash/indexer.conf.erb b/modules/openstack_project/templates/logstash/indexer.conf.erb
deleted file mode 100644
index 6065d44190..0000000000
--- a/modules/openstack_project/templates/logstash/indexer.conf.erb
+++ /dev/null
@@ -1,124 +0,0 @@
-input {
-  tcp {
-    host => "localhost"
-    port => 9999
-    codec => json_lines {}
-    type => "jenkins"
-  }
-}
-
-# You can check grok patterns at http://grokdebug.herokuapp.com/
-filter {
-  if "screen" in [tags] and [message] =~ "^\+ " {
-    drop {}
-  }
-  if "console" in [tags] or "console.html" in [tags] {
-    if [message] == "<pre>" or [message] == "</pre>" {
-      drop {}
-    }
-    multiline {
-      negate => true
-      pattern => "^%{TIMESTAMP_ISO8601} \|"
-      what => "previous"
-      stream_identity => "%{host}.%{filename}"
-    }
-    grok {
-      # Do multiline matching as the above mutliline filter may add newlines
-      # to the log messages.
-      match => { "message" => "(?m)^%{TIMESTAMP_ISO8601:logdate} \| %{GREEDYDATA:logmessage}" }
-      add_field => { "received_at" => "%{@timestamp}" }
-    }
-  } else if "oslofmt" in [tags] {
-    multiline {
-      negate => true
-      pattern => "^(%{TIMESTAMP_ISO8601}|%{SYSLOGTIMESTAMP}) "
-      what => "previous"
-      stream_identity => "%{host}.%{filename}"
-    }
-    multiline {
-      negate => false
-      # NOTE(mriedem): oslo.log 1.2.0 changed the logging_exception_prefix
-      # config option from using TRACE to ERROR so we have to handle both.
-      #
-      # NOTE(sdague): stack traces always include process id, so
-      # NUMBER being required element here is important, otherwise
-      # ERROR messages just fold into the previous messages, which are
-      # typically INFO.
-      pattern => "^(%{TIMESTAMP_ISO8601}|%{SYSLOGTIMESTAMP})%{SPACE}%{NUMBER}%{SPACE}(TRACE|ERROR)"
-      what => "previous"
-      stream_identity => "%{host}.%{filename}"
-    }
-    grok {
-      # Do multiline matching as the above mutliline filter may add newlines
-      # to the log messages.
-      # TODO move the LOGLEVELs into a proper grok pattern.
-      match => { "message" => "(?m)^(%{TIMESTAMP_ISO8601:logdate}|%{SYSLOGTIMESTAMP:logdate})%{SPACE}(%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:|%{NUMBER:pid})?%{SPACE}?(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR) \[?\b%{NOTSPACE:module}\b\]?%{SPACE}?%{GREEDYDATA:logmessage}?" }
-      add_field => { "received_at" => "%{@timestamp}" }
-    }
-  } else if "apachecombined" in [tags] {
-    grok {
-      match => { "message" => "%{COMBINEDAPACHELOG}" }
-      add_field => { "received_at" => "%{@timestamp}" }
-      add_field => { "logdate" => "%{timestamp}" }
-      add_field => { "logmessage" => "%{verb} %{request} %{response}" }
-    }
-  } else if "apacheerror" in [tags] {
-    grok {
-      match => { "message" => "\[(?<logdate>%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}%{SPACE}%{TZ}?)\]%{SPACE}\[%{LOGLEVEL:loglevel}\]%{SPACE}%{GREEDYDATA:logmessage}" }
-      add_field => { "received_at" => "%{@timestamp}" }
-    }
-  } else if "libvirt" in [tags] {
-    grok {
-      # libvirtd grok filter adapted from
-      # https://github.com/OpenStratus/openstack-logstash/blob/master/agent.conf
-      match => { "message" => "%{TIMESTAMP_ISO8601:logdate}:%{SPACE}%{NUMBER:pid}:%{SPACE}%{LOGLEVEL:loglevel}%{SPACE}:%{SPACE}%{GREEDYDATA:logmessage}" }
-      add_field => { "received_at" => "%{@timestamp}" }
-    }
-  } else if "syslog" in [tags] {
-    grok {
-      # Syslog grok filter adapted from
-      # http://cookbook.logstash.net/recipes/syslog-pri/syslog.conf
-      match => { "message" => "%{SYSLOGTIMESTAMP:logdate}%{SPACE}%{SYSLOGHOST:syslog_host}?%{SPACE}%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:? %{GREEDYDATA:logmessage}" }
-      add_field => { "received_at" => "%{@timestamp}" }
-    }
-  }
-
-  # Filters below here should be consistent for all Jenkins log formats.
-  # Remove DEBUG logs to reduce the amount of data that needs to be processed.
-  if [loglevel] == "DEBUG" {
-    drop {}
-  }
-
-  if ! ("_grokparsefailure" in [tags]) {
-    date {
-      match => [ "logdate",
-                 "yyyy-MM-dd HH:mm:ss.SSS",
-                 "yyyy-MM-dd HH:mm:ss.SSSSSS",
-                 "yyyy-MM-dd HH:mm:ss,SSS",
-                 "yyyy-MM-dd HH:mm:ss",
-                 "MMM  d HH:mm:ss",
-                 "MMM dd HH:mm:ss",
-                 "dd/MMM/yyyy:HH:mm:ss Z",
-                 "yyyy-MM-dd HH:mm:ss.SSSZ",
-                 "E MMM dd HH:mm:ss yyyy Z",
-                 "E MMM dd HH:mm:ss yyyy",
-                 "ISO8601"
-               ]
-      timezone => "UTC"
-    }
-    mutate {
-      replace => { "message" => "%{logmessage}" }
-    }
-    mutate {
-      remove_field => [ "logdate", "logmessage" ]
-    }
-  }
-}
-
-output {
-  elasticsearch {
-    hosts => <%= @elasticsearch_nodes.map { |node| node + ":9200" }.inspect %>
-    manage_template => false
-    flush_size => 1024
-  }
-}