Tools: How to Set Up Log Aggregation for Multiple Servers (ELK Stack Basics) - Expert Insights
How to Set Up Log Aggregation for Multiple Servers (ELK Stack Basics)
The Lightweight Alternative First: Loki + Grafana
ELK Stack Setup (More Powerful)
Install with Docker Compose
Logstash Pipeline Config
Install Filebeat on Each Server
What to Search for in Kibana When you have one server, tail -f /var/log/app.log works fine. When you have three servers and something goes wrong, you need to search logs across all of them simultaneously. That's what log aggregation solves. Before we go full ELK, consider Loki — it's significantly lighter and integrates with Grafana (which you may already have from monitoring): ELK = Elasticsearch + Logstash + Kibana Once data is flowing, useful queries: I built ARIA to solve exactly this.
Try it free at step2dev.com — no credit card needed. Templates let you quickly answer FAQs or store snippets for re-use. Hide child comments as well For further actions, you may consider blocking this person and/or reporting abuse
# -weight: 500;">docker-compose.yml for Loki stack
version: '3.8'
services: loki: image: grafana/loki:2.9.0 ports: - "3100:3100" volumes: - loki-data:/loki promtail: image: grafana/promtail:2.9.0 volumes: - /var/log:/var/log:ro - ./promtail-config.yml:/etc/promtail/config.yml command: -config.file=/etc/promtail/config.yml grafana: image: grafana/grafana:latest ports: - "3000:3000" volumes: loki-data:
# -weight: 500;">docker-compose.yml for Loki stack
version: '3.8'
services: loki: image: grafana/loki:2.9.0 ports: - "3100:3100" volumes: - loki-data:/loki promtail: image: grafana/promtail:2.9.0 volumes: - /var/log:/var/log:ro - ./promtail-config.yml:/etc/promtail/config.yml command: -config.file=/etc/promtail/config.yml grafana: image: grafana/grafana:latest ports: - "3000:3000" volumes: loki-data:
# -weight: 500;">docker-compose.yml for Loki stack
version: '3.8'
services: loki: image: grafana/loki:2.9.0 ports: - "3100:3100" volumes: - loki-data:/loki promtail: image: grafana/promtail:2.9.0 volumes: - /var/log:/var/log:ro - ./promtail-config.yml:/etc/promtail/config.yml command: -config.file=/etc/promtail/config.yml grafana: image: grafana/grafana:latest ports: - "3000:3000" volumes: loki-data:
# promtail-config.yml
server: http_listen_port: 9080 positions: filename: /tmp/positions.yaml clients: - url: http://loki:3100/loki/api/v1/push scrape_configs: - job_name: system static_configs: - targets: - localhost labels: job: varlogs host: server-1 __path__: /var/log/*.log - job_name: app static_configs: - targets: - localhost labels: job: app host: server-1 __path__: /var/log/myapp/*.log
# promtail-config.yml
server: http_listen_port: 9080 positions: filename: /tmp/positions.yaml clients: - url: http://loki:3100/loki/api/v1/push scrape_configs: - job_name: system static_configs: - targets: - localhost labels: job: varlogs host: server-1 __path__: /var/log/*.log - job_name: app static_configs: - targets: - localhost labels: job: app host: server-1 __path__: /var/log/myapp/*.log
# promtail-config.yml
server: http_listen_port: 9080 positions: filename: /tmp/positions.yaml clients: - url: http://loki:3100/loki/api/v1/push scrape_configs: - job_name: system static_configs: - targets: - localhost labels: job: varlogs host: server-1 __path__: /var/log/*.log - job_name: app static_configs: - targets: - localhost labels: job: app host: server-1 __path__: /var/log/myapp/*.log
version: '3.8'
services: elasticsearch: image: -weight: 500;">docker.elastic.co/elasticsearch/elasticsearch:8.12.0 environment: - discovery.type=single-node - xpack.security.enabled=false - "ES_JAVA_OPTS=-Xms512m -Xmx512m" ports: - "9200:9200" volumes: - esdata:/usr/share/elasticsearch/data logstash: image: -weight: 500;">docker.elastic.co/logstash/logstash:8.12.0 volumes: - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf ports: - "5044:5044" depends_on: - elasticsearch kibana: image: -weight: 500;">docker.elastic.co/kibana/kibana:8.12.0 ports: - "5601:5601" environment: - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 depends_on: - elasticsearch volumes: esdata:
version: '3.8'
services: elasticsearch: image: -weight: 500;">docker.elastic.co/elasticsearch/elasticsearch:8.12.0 environment: - discovery.type=single-node - xpack.security.enabled=false - "ES_JAVA_OPTS=-Xms512m -Xmx512m" ports: - "9200:9200" volumes: - esdata:/usr/share/elasticsearch/data logstash: image: -weight: 500;">docker.elastic.co/logstash/logstash:8.12.0 volumes: - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf ports: - "5044:5044" depends_on: - elasticsearch kibana: image: -weight: 500;">docker.elastic.co/kibana/kibana:8.12.0 ports: - "5601:5601" environment: - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 depends_on: - elasticsearch volumes: esdata:
version: '3.8'
services: elasticsearch: image: -weight: 500;">docker.elastic.co/elasticsearch/elasticsearch:8.12.0 environment: - discovery.type=single-node - xpack.security.enabled=false - "ES_JAVA_OPTS=-Xms512m -Xmx512m" ports: - "9200:9200" volumes: - esdata:/usr/share/elasticsearch/data logstash: image: -weight: 500;">docker.elastic.co/logstash/logstash:8.12.0 volumes: - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf ports: - "5044:5044" depends_on: - elasticsearch kibana: image: -weight: 500;">docker.elastic.co/kibana/kibana:8.12.0 ports: - "5601:5601" environment: - ELASTICSEARCH_HOSTS=http://elasticsearch:9200 depends_on: - elasticsearch volumes: esdata:
# logstash.conf
input { beats { port => 5044 }
} filter { if [fields][app] == "nodejs" { json { source => "message" } date { match => ["timestamp", "ISO8601"] } } grok { match => { "message" => "%{COMBINEDAPACHELOG}" } }
} output { elasticsearch { hosts => ["elasticsearch:9200"] index => "logs-%{+YYYY.MM.dd}" }
}
# logstash.conf
input { beats { port => 5044 }
} filter { if [fields][app] == "nodejs" { json { source => "message" } date { match => ["timestamp", "ISO8601"] } } grok { match => { "message" => "%{COMBINEDAPACHELOG}" } }
} output { elasticsearch { hosts => ["elasticsearch:9200"] index => "logs-%{+YYYY.MM.dd}" }
}
# logstash.conf
input { beats { port => 5044 }
} filter { if [fields][app] == "nodejs" { json { source => "message" } date { match => ["timestamp", "ISO8601"] } } grok { match => { "message" => "%{COMBINEDAPACHELOG}" } }
} output { elasticsearch { hosts => ["elasticsearch:9200"] index => "logs-%{+YYYY.MM.dd}" }
}
# Install on each server you want to collect logs from
-weight: 500;">curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-8.12.0-amd64.deb
-weight: 600;">sudo dpkg -i filebeat-8.12.0-amd64.deb -weight: 600;">sudo nano /etc/filebeat/filebeat.yml
# Install on each server you want to collect logs from
-weight: 500;">curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-8.12.0-amd64.deb
-weight: 600;">sudo dpkg -i filebeat-8.12.0-amd64.deb -weight: 600;">sudo nano /etc/filebeat/filebeat.yml
# Install on each server you want to collect logs from
-weight: 500;">curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-8.12.0-amd64.deb
-weight: 600;">sudo dpkg -i filebeat-8.12.0-amd64.deb -weight: 600;">sudo nano /etc/filebeat/filebeat.yml
filebeat.inputs: - type: log enabled: true paths: - /var/log/myapp/*.log fields: app: nodejs server: production-1 output.logstash: hosts: ["your-elk-server:5044"]
filebeat.inputs: - type: log enabled: true paths: - /var/log/myapp/*.log fields: app: nodejs server: production-1 output.logstash: hosts: ["your-elk-server:5044"]
filebeat.inputs: - type: log enabled: true paths: - /var/log/myapp/*.log fields: app: nodejs server: production-1 output.logstash: hosts: ["your-elk-server:5044"]
-weight: 600;">sudo -weight: 500;">systemctl -weight: 500;">enable filebeat
-weight: 600;">sudo -weight: 500;">systemctl -weight: 500;">start filebeat
-weight: 600;">sudo -weight: 500;">systemctl -weight: 500;">enable filebeat
-weight: 600;">sudo -weight: 500;">systemctl -weight: 500;">start filebeat
-weight: 600;">sudo -weight: 500;">systemctl -weight: 500;">enable filebeat
-weight: 600;">sudo -weight: 500;">systemctl -weight: 500;">start filebeat
# All errors in last hour
level:error # Errors from specific server
level:error AND server:production-1 # 5xx responses
response:>499 # Slow requests (>1000ms)
duration:>1000
# All errors in last hour
level:error # Errors from specific server
level:error AND server:production-1 # 5xx responses
response:>499 # Slow requests (>1000ms)
duration:>1000
# All errors in last hour
level:error # Errors from specific server
level:error AND server:production-1 # 5xx responses
response:>499 # Slow requests (>1000ms)
duration:>1000