# CONFIGURED BY ANSIBLE # we classify stats by metric type # however the problem is that we generate duplicates # the trick we used is to use regex within the log_to_metrics filter against specific label_field # as there is custom label_field for every parser, instead of a shared e.g. suricata_stat [INPUT] name tail {% if stats is defined %} path {{stats}} {% else %} path /var/log/suricata/stats.log {% endif %} tag suricata_stats alias suricata_stats # errors first and before flow [FILTER] name parser match suricata_stats key_name log parser suricata_errors preserve_key false reserve_data false # memuse before flow [FILTER] name parser match suricata_stats key_name log parser suricata_memuse preserve_key false reserve_data false # flow before packets [FILTER] name parser match suricata_stats key_name log parser suricata_flow preserve_key false reserve_data false # bytes before packets [FILTER] name parser match suricata_stats key_name log parser suricata_bytes preserve_key false reserve_data false [FILTER] name parser match suricata_stats key_name log parser suricata_packets preserve_key false reserve_data false # remove unparsed and remaining lines from suricata_stats stream # just in case need the suricata_stats tag for some reason [FILTER] name grep match suricata_stats exclude log .* # errors first and before flow [FILTER] name log_to_metrics match suricata_stats tag suricata_metrics.errors metric_mode gauge metric_name suricata_errors # we have multiple stats included in that metric label_field suricata_errors regex suricata_errors .* value_field value metric_description suricata errors #flush_interval_sec 1 # memuse before flow [FILTER] name log_to_metrics match suricata_stats tag suricata_metrics.memuse metric_mode gauge metric_name suricata_memuse # we have multiple stats included in that metric label_field suricata_memuse regex suricata_memuse .* value_field value metric_description suricata memory usage #flush_interval_sec 1 # flow before packets [FILTER] name log_to_metrics match suricata_stats tag suricata_metrics.flow metric_mode gauge metric_name suricata_flow # we have multiple stats included in that metric label_field suricata_flow regex suricata_flow .* value_field value metric_description suricata flow sessions #flush_interval_sec 1 # bytes before packets [FILTER] name log_to_metrics match suricata_stats tag suricata_metrics.bytes metric_mode gauge metric_name suricata_bytes # we have multiple stats included in that metric label_field suricata_bytes regex suricata_bytes .* value_field value metric_description suricata traffic bytes #flush_interval_sec 1 [FILTER] name log_to_metrics match suricata_stats tag suricata_metrics.packets metric_mode gauge metric_name suricata_packets # we have multiple stats included in that metric label_field suricata_packets regex suricata_packets .* value_field value metric_description suricata packets #flush_interval_sec 1 #[OUTPUT] # name file # #match suricata_stats # match suricata_metrics.* # path /var/log # file fluent-bit.log [OUTPUT] name prometheus_remote_write match suricata_metrics.* host {{tsdb_host}} port {{tsdb_port}} {% if metrics_ssl == 'yes' %} tls on tls.verify {{ssl_verify}} http_user {{tsdb_http_user}} http_passwd {{tsdb_http_passwd}} {% endif %} {% if metrics_type == 'cluster' %} uri /insert/0/prometheus/api/v1/write {% elif metrics_type == 'standalone' %} uri /api/v1/write {% endif %} log_response_payload true add_label sensor flb@{{inventory_hostname_short}} add_label instance {{inventory_hostname_short}} add_label host {{inventory_hostname_short}} add_label site {{site}} add_label job flb-metrics-suricata