Add: docker-compose.yml, config yml(s)

This commit is contained in:
Jay 2023-01-06 18:19:38 +09:00
parent 5bfcd73011
commit 8ca41adfa0
6 changed files with 222 additions and 0 deletions

85
docker-compose.yml Normal file
View File

@ -0,0 +1,85 @@
version: '2'
services:
es:
image: 'docker.elastic.co/elasticsearch/elasticsearch-oss:7.3.1'
pull_policy: if_not_present
container_name: elasticsearch_container
ports:
- "9200:9200"
- "9300:9300"
volumes:
- ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
- ./elasticsearch/data:/usr/share/elasticsearch/data
- /etc/localtime:/etc/localtime:ro
environment:
ES_JAVA_OPTS: "-Xmx1024m -Xms1024m"
networks:
- elk_network
# links:
# - kb
# - ls
# - fb
kb:
image: 'docker.elastic.co/kibana/kibana-oss:7.3.1'
pull_policy: if_not_present
container_name: kibana_container
ports:
- "5601:5601"
volumes:
- ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro
- /etc/localtime:/etc/localtime:ro
depends_on:
- fb
networks:
- elk_network
# links:
# - es
# - ls
# - fb
ls:
image: 'docker.elastic.co/logstash/logstash-oss:7.3.1'
pull_policy: if_not_present
container_name: logstash_container
ports:
- "5000:5000"
- "9600:9600"
- "5044:5044"
volumes:
- ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro
- ./logstash/pipeline:/usr/share/logstash/pipeline:ro
- /etc/localtime:/etc/localtime:ro
environment:
LS_JAVA_OPTS: "-Xmx1024m -Xms1024m"
depends_on:
- es
networks:
- elk_network
# links:
# - kb
# - es
# - fb
fb:
image: 'docker.elastic.co/beats/filebeat-oss:7.3.1'
pull_policy: if_not_present
container_name: filebeat_container
depends_on:
- ls
networks:
- elk_network
# links:
# - kb
# - ls
# - es
networks:
elk_network:
driver: bridge

View File

@ -0,0 +1,9 @@
---
## Default Elasticsearch configuration from Elasticsearch base image.
## https://github.com/elastic/elasticsearch/blob/main/distribution/docker/src/docker/config/elasticsearch.yml
#
cluster.name: docker-cluster
network.host: 0.0.0.0
discovery.type: single-node

View File

@ -0,0 +1,59 @@
#=========================== Filebeat inputs =============================
filebeat.inputs:
# Each - is an input. Most options can be set at the input level, so
# you can use different inputs for various configurations.
# Below are the input specific configurations.
- type: log
# Change to true to enable this input configuration.
enabled: true
# Paths that should be crawled and fetched. Glob based paths.
# 아파치 로그의 위치를 지정 *로 와일드카드 형식을 사용할 수 있음
# Logstash에서 Tags를 이용하여 필터 규칙을 사용하기 때문에 사용
paths:
- /var/log/httpd/access_*
tags : ["apache"]
#============================== Kibana =====================================
# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API.
# This requires a Kibana endpoint configuration.
setup.kibana:
# Kibana Host
# Scheme and port can be left out and will be set to the default (http and 5601)
# In case you specify and additional path, the scheme is required: http://localhost:5601/path
# IPv6 addresses should always be defined as: https://[2001:db8::1]:5601
# Kibana Server IP입력
host: "kb:5601"
# Elasticsearch로는 로그를 보내지 않을 것이기 때문에 다 주석처리
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
# Array of hosts to connect to.
#hosts: ["0.0.0.0:9200"]
#Optional protocol and basic auth credentials.
#protocol: "https"
#username: "elastic"
#password: "changeme"
#----------------------------- Logstash output --------------------------------
output.logstash:
# The Logstash hosts
# Logstash Server IP 입력
hosts: ["ls:5000"]
#================================ Processors =====================================
# 로그 파일을 보낼 때 서버의 Host정보와 Cloud정보를 보낼지 설정하는 부분
# 필요 여부에 따라 주석 처리 하거나 하지 않습니다.
# Configure processors to enhance or manipulate events generated by the beat.
#processors:
# - add_host_metadata: ~
# - add_cloud_metadata: ~

9
kibana/config/kibana.yml Normal file
View File

@ -0,0 +1,9 @@
---
## Default Kibana configuration from Kibana base image.
## https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.ts
#
server.name: kibana
server.host: 0.0.0.0
elasticsearch.hosts: [ http://es:9200 ]

View File

@ -0,0 +1,9 @@
---
## Default Logstash configuration from Logstash base image.
## https://github.com/elastic/logstash/blob/main/docker/data/logstash/config/logstash-full.yml
#
http.host: 0.0.0.0
node.name: logstash

View File

@ -0,0 +1,51 @@
# input {
# stdin {}
# beats {
# port => 5044
# }
# tcp {
# port => 5000
# }
# }
# ## Add your filters / logstash plugins configuration here
# output {
# elasticsearch {
# hosts => ["es_ctnr:9200"]
# }
# stdout { codec => rubydebug }
# }
#filebeat 사용 선언 및 수신할 IP와 포트 지정
input {
beats {
port => 5000
host => "0.0.0.0"
}
}
#grok 형식으로 들어오는 로그를 가공하기 위해 필터 사용
# tags에 apache가 있을 경우 메시지 필드를 공용 아파치 로그로 변환하고 접속지의 IP를 기반으로 정보 가공 내용 추가하기
filter {
if "apache" in [tags]{
grok {
match => { "message" => "%{COMMONAPACHELOG}" }
}
geoip {
source => "clientip"
target => "geoip"
}
}
}
# Logstash의 가공한 정보를 어디에 출력할지 설정
# 모든 데이터를 elk-%{+YYYY.MM.dd}라는 이름의 인덱스를 만들어서 Elasticsearch로 보내도록 설정
output {
elasticsearch {
hosts => "http://es:9200"
index => "elk-%{+YYYY.MM.dd}"
}
}