job "vector" { datacenters = ["alo"] type = "system" group "vector" { network { port "api" { #host_network = "tailscale" to = 8686 } port "metrics" { to = 9598 } } ephemeral_disk { size = 500 sticky = true } task "vector" { driver = "docker" config { # debian instead of alpine to get journalctl image = "timberio/vector:0.31.X-debian" ports = ["api", "metrics"] volumes = [ "/var/run/docker.sock:/var/run/docker.sock:ro", "/var/log/glusterfs:/var/log/glusterfs:ro", "/var/log/journal:/var/log/journal:ro", "/etc/machine-id:/etc/machine-id:ro", ] hostname = "${node.unique.name}" } env { VECTOR_CONFIG = "local/vector.toml" VECTOR_REQUIRE_HEALTHY = "true" } service { name = "vector-api" check { port = "api" type = "http" path = "/health" interval = "30s" timeout = "5s" } } service { name = "vector" port = "metrics" tags = ["metrics"] } resources { cpu = 500 # 500 MHz memory = 256 # 256MB } template { destination = "local/vector.toml" change_mode = "signal" change_signal = "SIGHUP" # overriding the delimiters to [[ ]] to avoid conflicts with Vector's native templating, which also uses {{ }} left_delimiter = "[[" right_delimiter = "]]" data=<.+) (?P\d+) "(?P.+)" "(?P.+)" (?P\d+)ms$') if length(result) > 0 { . |= parse_apache_log!(result[0].log, "combined") .remote_host = del(.host) .backend_url = result[0].backend_url .frontend_name = result[0].frontend_name .duration_ms = to_int!(result[0].duration_ms) .total_requests = to_int!(result[0].total_requests) } .host = del(.label."com.hashicorp.nomad.node_name") ''' [sources.glusterfs_raw] type = "file" include = [ "/var/log/glusterfs/**/*.log" ] [sources.journald_raw] type = "journald" [sinks.loki] type = "loki" inputs = [ "vector_raw", "docker_transformed", "glusterfs_raw", "journald_raw", ] endpoint = "http://[[ range service "loki" ]][[ .Address ]]:[[ .Port ]][[ end ]]" encoding.codec = "json" buffer.type = "memory" out_of_order_action = "accept" request.concurrency = "adaptive" remove_label_fields = true healthcheck.enabled = true [sinks.loki.labels] host = "{{host}}" [sources.internal_metrics] type = "internal_metrics" [sources.host_metrics] type = "host_metrics" [sinks.prometheus] type = "prometheus_exporter" inputs = [ "internal_metrics", "host_metrics", ] EOH } kill_timeout = "30s" } } }