ELK Stack 구축 & Elastic APM 구축 (feat. docker-compose)

이준섭·2023년 4월 3일

Elastic

목록 보기
1/3
post-thumbnail

1. Docker-compose 설치

※ Window 는 Docker for Desktop을 설치하면 자동으로 docker-compose 설치 가능 함

sudo curl -L https://github.com/docker/compose/releases/download/v2.1.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose

sudo chmod +x /usr/local/bin/docker-compose

docker-compose --version

2. Git 에서 ELK Stack clone 하기

mkdir elk

cd elk

git clone https://github.com/teichae/docker-elk

3. elasticsearch.yml 파일 수정

path : /elasticsearch/config/elasticsearch.yml

---
## Default Elasticsearch configuration from Elasticsearch base image.
## https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml
#
cluster.name: "docker-cluster"
network.host: 0.0.0.0

## Use single node discovery in order to disable production mode and avoid bootstrap checks
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
#
discovery.type: single-node

## X-Pack settings
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html
#
#xpack.license.self_generated.type: trial
#아래 설정은 계정 설정 하기 위한 설정이다.
xpack.security.enabled: true
#xpack.monitoring.collection.enabled: true

4. Kibana.yml 파일 수정

path: /kibana/config/kibana.yml

---
## Default Kibana configuration from Kibana base image.
## https://github.com/elastic/kibana/blob/master/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.js
#
server.name: kibana
server.host: "0"
elasticsearch.hosts: [ "http://elasticsearch:9200" ]
#xpack.monitoring.ui.container.elasticsearch.enabled: true

## X-Pack security credentials
# .env파일에 있는 Elastic 계정 정보
elasticsearch.username: ${ELASTIC_USERNAME}
elasticsearch.password: ${ELASTIC_PASSWORD}

5. logstash.conf 파일 수정

path : /logstash/pipeline/logstash.conf

input {
        tcp {
                port => 5000
                codec => json_lines
        }
}

output {
        elasticsearch {
                hosts => "elasticsearch:9200"
                index => "<사용할 인덱스 >"
                ## .env 파일에 있는 Elastic 계정 정보
                user => "${ELASTIC_USERNAME}"
                password => "${ELASTIC_PASSWORD}" 
                ## Index Lifecycle Management 설정
                ilm_enabled => true
                ilm_rollover_alias => "<사용할 ILM Alias >"
                ilm_pattern => "000001"
                ilm_policy => "<사용할 ILM Policy >"
       }
}

※ 실행 후 ILM이 생성되어있지 않으면 Logstash가 뜨지 않음 그래서 docker-compose를 실행 후 Kibana에 들어가서 ILM을 만들어줘야함

수정 후
docker restart logstash

6. docker-compose.yml 변경

version: '2'

services:
  elasticsearch:
    container_name: elasticsearch
    build:
      context: elasticsearch/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - ./data:/var/lib/elasticsearch/nodes/0
      - ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
      - /etc/localtime:/etc/localtime:ro
    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      TZ: "Asia/Seoul"            
      ES_JAVA_OPTS: "-Xmx1G -Xms1G"
      ELASTIC_PASSWORD: $ELASTIC_PASSWORD
    networks:
      - elk

  logstash:
    container_name: logstash
    build:
      context: logstash/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro
      - ./logstash/pipeline:/usr/share/logstash/pipeline:ro
      - /etc/localtime:/etc/localtime:ro
    ports:
      - "5000:5000"
      - "9600:9600"
    environment:
      TZ: "Asia/Seoul"      
      LS_JAVA_OPTS: "-Xmx1G -Xms1G"
      ELASTIC_USERNAME: $ELASTIC_USERNAME
      ELASTIC_PASSWORD: $ELASTIC_PASSWORD
    networks:
      - elk
    depends_on:
      - elasticsearch

  kibana:
    container_name: kibana
    build:
      context: kibana/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro
      - /etc/localtime:/etc/localtime:ro
    ports:
      - "5601:5601"
    environment:
      TZ: "Asia/Seoul"
      ELASTIC_USERNAME: $ELASTIC_USERNAME
      ELASTIC_PASSWORD: $ELASTIC_PASSWORD    
    networks:
      - elk
    depends_on:
      - elasticsearch
  apm-server:
    container_name: apm-server
    image: docker.elastic.co/apm/apm-server:$ELK_VERSION
    logging:
      driver: "json-file"
      options:
        max-file: "5"
        max-size: "1024m"
    ports:
      - 8200:8200
    depends_on:
      - elasticsearch
      - kibana
    networks:
      - elk
    command: >
      apm-server -e
        -E apm-server.rum.enabled=true
        -E apm-server.rum.allow_origins=["*"]
        -E apm-server.rum.allow_headers=[]
        -E setup.kibana.host=kibana:5601
        -E apm-server.kibana.username=$ELASTIC_USERNAME
        -E apm-server.kibana.password=$ELASTIC_PASSWORD
        -E setup.template.settings.index.number_of_replicas=0
        -E apm-server.kibana.enabled=true
        -E apm-server.kibana.host=kibana:5601
        -E output.elasticsearch.hosts=["elasticsearch:9200"]
        -E output.elasticsearch.username=$ELASTIC_USERNAME
        -E output.elasticsearch.password=$ELASTIC_PASSWORD
networks:
  elk:
    driver: bridge

7. .env 파일 변경

path : /.env

ELK_VERSION=7.16.1
ELASTIC_USERNAME=<Elastic ID>
ELASTIC_PASSWORD=<Elastic PW>

8. docker-compose 실행

docker-compose up -d

9. Spring Boot Logback 설정

현재는 Logback 파일을 Java Config로 설정하였음

# logstash dependency 추가

implementation 'net.logstash.logback:logstash-logback-encoder:6.6'

LogbackConfig 파일 설정

@Configuration
public class LogbackConfig {
    
    @Value("${elastic.logstash.tcp.url}")
    private String logstashUrl;
    @Value("${spring.application.name}")
    private String springAppName;
    @Value("${spring.application.profile}")
    private String profile;

    private final String CONSOLE_APPENDER_NAME = "console";
    private final String LOGSTASH_APPENDER_NAME = "logstash";

    //프로퍼티
    private final LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
    private ConsoleAppender<ILoggingEvent> consoleAppender;
    private LogstashTcpSocketAppender logstashTcpSocketAppender;

    /**
     * Log Configuration
     */
    @Bean
    public void logConfig() {
        consoleAppender = getLogConsoleAppender();
        logstashTcpSocketAppender = getLogstashTcpSocketAppender();
        createLoggers();
    }

    /**
     * profile 별로 로그 어펜더와 로깅레벨 설정
     */
    private void createLoggers() {
        Logger logger = loggerContext.getLogger("root");
        logger.setAdditive(true);
        switch (profile) {
            case "dev", "stg" -> {
                logger.setLevel(INFO);
                logger.addAppender(consoleAppender);
                logger.addAppender(logstashTcpSocketAppender);
            }
            case "prod" -> {
                logger.setLevel(WARN);
                logger.addAppender(consoleAppender);
                logger.addAppender(logstashTcpSocketAppender);
            }
            default -> {
                logger.setLevel(INFO);
                logger.addAppender(consoleAppender);
            }
        }
    }

    /**
     * Logstash용 Appender
     * @return
     */
    private LogstashTcpSocketAppender getLogstashTcpSocketAppender() {
        LogstashTcpSocketAppender logstashAppender = new LogstashTcpSocketAppender();
        logstashAppender.setName(LOGSTASH_APPENDER_NAME);
        logstashAppender.setContext(loggerContext);
        logstashAppender.addDestination(logstashUrl);
        logstashAppender.setEncoder(createLogstashEncoder());
        logstashAppender.start();
        return logstashAppender;
    }

    /**
     * Console 로그 어펜더 세팅
     * @return
     */
    private ConsoleAppender<ILoggingEvent> getLogConsoleAppender() {
        PatternLayoutEncoder consoleLogEncoder = createLogEncoder();
        return createLogConsoleAppender(consoleLogEncoder);
    }

    private ConsoleAppender<ILoggingEvent> createLogConsoleAppender(PatternLayoutEncoder consoleLogEncoder) {
        ConsoleAppender<ILoggingEvent> logConsoleAppender = new ConsoleAppender<>();
        logConsoleAppender.setName(CONSOLE_APPENDER_NAME);
        logConsoleAppender.setContext(loggerContext);
        logConsoleAppender.setEncoder(consoleLogEncoder);
        logConsoleAppender.start();
        return logConsoleAppender;
    }

    /**
     * Logstash에 필요한 필드 세팅
     * @return
     */
    private LoggingEventCompositeJsonEncoder createLogstashEncoder() {
        LoggingEventCompositeJsonEncoder logstashEncoder = new LoggingEventCompositeJsonEncoder();
        logstashEncoder.setContext(loggerContext);
        logstashEncoder.getProviders().addProvider(createTimestampProvider());
        logstashEncoder.getProviders().addProvider(withPattern(String.format("{ \"springAppName\": \"%s\" }", springAppName), new LoggingEventPatternJsonProvider()));
        logstashEncoder.getProviders().addProvider(withPattern(String.format("{ \"profile\": \"%s\" }", profile), new LoggingEventPatternJsonProvider()));
        logstashEncoder.getProviders().addProvider(new MdcJsonProvider());
        logstashEncoder.getProviders().addProvider(new ContextJsonProvider<>());
        logstashEncoder.getProviders().addProvider(new LogLevelJsonProvider());
        logstashEncoder.getProviders().addProvider(new LoggerNameJsonProvider());
        logstashEncoder.getProviders().addProvider(new ThreadNameJsonProvider());
        logstashEncoder.getProviders().addProvider(new MessageJsonProvider());
        logstashEncoder.getProviders().addProvider(new LogstashMarkersJsonProvider());
        logstashEncoder.getProviders().addProvider(new StackTraceJsonProvider());
        logstashEncoder.start();
        return logstashEncoder;
    }

    private AbstractPatternJsonProvider<ILoggingEvent> withPattern(
            String pattern,
            AbstractPatternJsonProvider<ILoggingEvent> provider
    ) {

        provider.setContext(loggerContext);
        provider.setPattern(pattern);
        return provider;
    }

    /**
     * Timestamp 세팅
     * @return
     */
    private LoggingEventFormattedTimestampJsonProvider createTimestampProvider() {
        LoggingEventFormattedTimestampJsonProvider timestampJsonProvider = new LoggingEventFormattedTimestampJsonProvider();
        timestampJsonProvider.setFieldName("log_timestamp");
        timestampJsonProvider.setPattern("yyy-MM-dd HH:mm:ss:SSS");
        timestampJsonProvider.setTimeZone("Asia/Seoul");
        return timestampJsonProvider;
    }
    private PatternLayoutEncoder createLogEncoder() {
        PatternLayoutEncoder encoder = new PatternLayoutEncoder();
        encoder.setContext(loggerContext);
        encoder.start();
        return encoder;
    }


}

0개의 댓글