无法在Docker中的日志文件中写入日志信息

发布于 2025-01-21 18:18:24 字数 4283 浏览 3 评论 0原文

我在Docker工作时生成日志信息有问题。在Localhost中的日志文件中编写日志没有问题。

在Docker期间实现CRUD过程时,我看不到任何新日志。

如何将日志文件( springboot-elk.log )连接到docker?

我该如何修复?

是显示屏幕截图的文件:

这 我的项目链接:我的项目

这是 docker-compose.yml 显示下面

version: '3.8'
services:
  logstash:
    image: docker.elastic.co/logstash/logstash:7.15.2
    user: root
    command: -f /etc/logstash/conf.d/
    volumes:
      - ./elk/logstash/:/etc/logstash/conf.d/
      - ./Springboot-Elk.log:/tmp/logs/Springboot-Elk.log
    ports:
      - "5000:5000"
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    depends_on:
      - elasticsearch

  filebeat:
    build:
      context: ./filebeat
      dockerfile: Dockerfile
    links:
      - "logstash:logstash"
    volumes:
      - /var/run/docker.sock:/host_docker/docker.sock
      - /var/lib/docker:/host_docker/var/lib/docker
    depends_on:
      - logstash
  kibana:
    image: docker.elastic.co/kibana/kibana:7.15.2
    user: root
    volumes:
      - ./elk/kibana/:/usr/share/kibana/config/
    ports:
      - "5601:5601"
    depends_on:
      - elasticsearch
    entrypoint: ["./bin/kibana", "--allow-root"]

  elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch:7.15.2
    user: root
    volumes:
      - ./elk/elasticsearch/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
  app:
    image: 'springbootelk:latest'
    build:
      context: .
      dockerfile: Dockerfile
    container_name: SpringBootElk
    depends_on:
      - db
      - logstash
    ports:
      - '8077:8077'
    environment:
      - SPRING_DATASOURCE_URL=jdbc:mysql://db:3306/springbootexample?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=Turkey
      - SPRING_DATASOURCE_USERNAME=springexample
      - SPRING_DATASOURCE_PASSWORD=111111
      - SPRING_JPA_HIBERNATE_DDL_AUTO=update
  db:
    container_name: db
    image: 'mysql:latest'
    ports:
      - "3366:3306"
    restart: always
    environment:
      MYSQL_DATABASE: ${MYSQL_DATABASE}
      MYSQL_USER: ${MYSQL_USER}
      MYSQL_PASSWORD: ${MYSQL_PASSWORD}
      MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
    volumes:
      - db-data:/var/lib/mysql

# Volumes
volumes:
  db-data:

logstash.conf 如下所示

    input {
        beats {
                port => 5000
        }
        file {
                path => "/tmp/logs/Springboot-Elk.log"
                sincedb_path => "/dev/null"
                start_position => "beginning"
        }
}
output {
        stdout{
                codec => rubydebug
        }
        elasticsearch {
                hosts => "elasticsearch:9200"
                index => "dockerlogs"
        }
}

filebeat.yml 文件如下所示。

filebeat.inputs:
  - type: docker
    enabled: true
    containers:
      ids:
        - "*"
      path: "/host_docker/var/lib/docker/containers"


processors:
  - add_docker_metadata:
      host: "unix:///host_docker/docker.sock"

filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false

output.logstash:
  hosts: ["logstash:5000"]

log files:
logging.level: info
logging.to_files: false
logging.to_syslog: false
loggins.metrice.enabled: false
logging.files:
  path: /var/log/filebeat
  name: filebeat
  keepfiles: 7
  permissions: 0644
ssl.verification_mode: none

这是 dockerfile filebeat.yml

FROM docker.elastic.co/beats/filebeat:7.15.2

COPY filebeat.yml /usr/share/filebeat/filebeat.yml

USER root
RUN mkdir /usr/share/filebeat/dockerlogs
RUN chown -R root /usr/share/filebeat/
RUN chmod -R go-w /usr/share/filebeat/

因为我想在 logstash中看到logs ,我运行此命令docker容器logs -f。 我看不到在Person Controller和PersonService中定义的任何日志。 这是屏幕截图

I have a problem about generating log info when I work in Docker. There is no issue to write logs in log file in localhost.

I cannot see any new logs when I implement the CRUD process during docker.

How can I connect log file(Springboot-Elk.log) to Docker?

How can I fix it?

Here is the file showing screenshots : Link

Here is my project link : My Project

Here is the docker-compose.yml shown below

version: '3.8'
services:
  logstash:
    image: docker.elastic.co/logstash/logstash:7.15.2
    user: root
    command: -f /etc/logstash/conf.d/
    volumes:
      - ./elk/logstash/:/etc/logstash/conf.d/
      - ./Springboot-Elk.log:/tmp/logs/Springboot-Elk.log
    ports:
      - "5000:5000"
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    depends_on:
      - elasticsearch

  filebeat:
    build:
      context: ./filebeat
      dockerfile: Dockerfile
    links:
      - "logstash:logstash"
    volumes:
      - /var/run/docker.sock:/host_docker/docker.sock
      - /var/lib/docker:/host_docker/var/lib/docker
    depends_on:
      - logstash
  kibana:
    image: docker.elastic.co/kibana/kibana:7.15.2
    user: root
    volumes:
      - ./elk/kibana/:/usr/share/kibana/config/
    ports:
      - "5601:5601"
    depends_on:
      - elasticsearch
    entrypoint: ["./bin/kibana", "--allow-root"]

  elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch:7.15.2
    user: root
    volumes:
      - ./elk/elasticsearch/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
  app:
    image: 'springbootelk:latest'
    build:
      context: .
      dockerfile: Dockerfile
    container_name: SpringBootElk
    depends_on:
      - db
      - logstash
    ports:
      - '8077:8077'
    environment:
      - SPRING_DATASOURCE_URL=jdbc:mysql://db:3306/springbootexample?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=Turkey
      - SPRING_DATASOURCE_USERNAME=springexample
      - SPRING_DATASOURCE_PASSWORD=111111
      - SPRING_JPA_HIBERNATE_DDL_AUTO=update
  db:
    container_name: db
    image: 'mysql:latest'
    ports:
      - "3366:3306"
    restart: always
    environment:
      MYSQL_DATABASE: ${MYSQL_DATABASE}
      MYSQL_USER: ${MYSQL_USER}
      MYSQL_PASSWORD: ${MYSQL_PASSWORD}
      MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
    volumes:
      - db-data:/var/lib/mysql

# Volumes
volumes:
  db-data:

Here is logstash.conf shown below

    input {
        beats {
                port => 5000
        }
        file {
                path => "/tmp/logs/Springboot-Elk.log"
                sincedb_path => "/dev/null"
                start_position => "beginning"
        }
}
output {
        stdout{
                codec => rubydebug
        }
        elasticsearch {
                hosts => "elasticsearch:9200"
                index => "dockerlogs"
        }
}

filebeat.yml file is shown below.

filebeat.inputs:
  - type: docker
    enabled: true
    containers:
      ids:
        - "*"
      path: "/host_docker/var/lib/docker/containers"


processors:
  - add_docker_metadata:
      host: "unix:///host_docker/docker.sock"

filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false

output.logstash:
  hosts: ["logstash:5000"]

log files:
logging.level: info
logging.to_files: false
logging.to_syslog: false
loggins.metrice.enabled: false
logging.files:
  path: /var/log/filebeat
  name: filebeat
  keepfiles: 7
  permissions: 0644
ssl.verification_mode: none

Here is the Dockerfile of filebeat.yml

FROM docker.elastic.co/beats/filebeat:7.15.2

COPY filebeat.yml /usr/share/filebeat/filebeat.yml

USER root
RUN mkdir /usr/share/filebeat/dockerlogs
RUN chown -R root /usr/share/filebeat/
RUN chmod -R go-w /usr/share/filebeat/

As I want to see logs in logstash , I run this command docker container logs -f .
I cannot see any logs defined in PersonController and PersonService there.
Here is the screenshot
enter image description here

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(4

浅浅淡淡 2025-01-28 18:18:24

这是我的回答下面所示。

在我修订了下面显示的logstash.conf文件之后,我的问题被消失了。

input {
        tcp {
                port => 5000
        }
        beats {
                port => 5044
        }
        file {
                path => "/tmp/logs/Springboot-Elk.log"
                sincedb_path => "/dev/null"
                start_position => "beginning"
        }
}
output {
        stdout{
                codec => rubydebug
        }
        elasticsearch {
                hosts => "elasticsearch:9200"
                index => "dockerlogs"
        }
}

Here is my answer shown below.

After I revised logstash.conf file shown below, my issue was dissappeared.

input {
        tcp {
                port => 5000
        }
        beats {
                port => 5044
        }
        file {
                path => "/tmp/logs/Springboot-Elk.log"
                sincedb_path => "/dev/null"
                start_position => "beginning"
        }
}
output {
        stdout{
                codec => rubydebug
        }
        elasticsearch {
                hosts => "elasticsearch:9200"
                index => "dockerlogs"
        }
}
¢蛋碎的人ぎ生 2025-01-28 18:18:24

与Docker合作时,最好将所有日志写入控制台。这将使在Kubernetes或其他Ochestrators中运行时,可以公开日志。在春季框架上,您可以通过更改为consoleappender来实现这一目标。示例Bellow显示了如何在log4j.xml中实现这一目标。将文件放在您的资源文件夹中并添加log4j依赖项(ref: https://www.baeldungungungungungungungungungungungungungungungungungung .com/spring-boot-logging ):

<configuration>
<appender name="consoleAppender" class="ch.qos.logback.core.ConsoleAppender">
    <encoder class="net.logstash.logback.encoder.LogstashEncoder">
        <timeZone>UTC</timeZone>
    </encoder>
</appender>
<logger name="com.yourcompany.packagename" level="INFO">
    <appender-ref ref="consoleAppender" />
</logger>
<root level="ERROR">
    <appender-ref ref="consoleAppender" />
</root>
</configuration>

您仍然可以通过在上面的配置中添加另一个appender将日志配置为磁盘,但是您需要在docker-compose文件中添加一个安装点,以指向日志目录在您的申请上。

很高兴注意到Docker是无状态的,因此当您重新启动容器时,日志会丢失。

When working with docker, its good to write all logs to the console. This will allow it to expose logs when run in Kubernetes or other ochestrators. On spring framework, you can achieve this by changing to ConsoleAppender. The example bellow shows how to achieve this in log4j.xml. Place the file to your resources folder and add the log4j dependencies(Ref: https://www.baeldung.com/spring-boot-logging ):

<configuration>
<appender name="consoleAppender" class="ch.qos.logback.core.ConsoleAppender">
    <encoder class="net.logstash.logback.encoder.LogstashEncoder">
        <timeZone>UTC</timeZone>
    </encoder>
</appender>
<logger name="com.yourcompany.packagename" level="INFO">
    <appender-ref ref="consoleAppender" />
</logger>
<root level="ERROR">
    <appender-ref ref="consoleAppender" />
</root>
</configuration>

You can still configure log to disk by adding another appender in the configuration above, but you need to add a mount point to your docker-compose file to point to the logs directory on your application.

It is good to note that Docker is stateless and therefore logs are lost when you restart the container.

我一直都在从未离去 2025-01-28 18:18:24

对我来说,它有效。但是我的麋鹿堆在Docker中不运行。

这是我的LogStash配置(TCP和UDP的相同配置):

input {
    tcp {
        port => 5144
        codec => "json"
        type => "logback"
    }

    udp {
        port => 5144
        codec => "json"
        type => "logback"
    }
}

output {
    if [type]=="logback" {
         elasticsearch {
             hosts => ["localhost:9200"]
             index => "logback-%{+YYYY.MM.dd}"
        }
    }
}

您也必须在Kibana中设置记录索引。

这是我的logback-spring.xml:

<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <springProperty name="mvnVersion" source="info.app.version"/>
    <springProperty name="appName" source="info.app.name"/>
    <springProperty name="tcpLogHost" source="logstash.tcpHost"/>
    <springProperty name="udpLogHost" source="logstash.udpHost"/>
    <springProperty name="udpLogPort" source="logstash.udpPort"/>

    <appender name="stashUdp" class="net.logstash.logback.appender.LogstashUdpSocketAppender">
        <host>${udpLogHost}</host>
        <port>${udpLogPort}</port>
        <layout class="net.logstash.logback.layout.LogstashLayout">
            <customFields>{"appName":"${appName}","env":"${env}","mvnVersion":"${mvnVersion}"}</customFields>
        </layout>
    </appender>

    <appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
        <encoder>
            <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{100} - %msg%n</pattern>
        </encoder>
    </appender>

    <root level="debug">
        <appender-ref ref="stashUdp" />
        <appender-ref ref="stdout" />
    </root>

</configuration>

我正在直接记录(UDP)到麋鹿堆栈。

你需要

<dependency>
    <groupId>net.logstash.logback</groupId>
    <artifactId>logstash-logback-encoder</artifactId>
    <version>${logstash.logback.version}</version>
</dependency>

For me it works. But my ELK Stack is running not in docker.

This is my logstash config (same config for TCP and UDP):

input {
    tcp {
        port => 5144
        codec => "json"
        type => "logback"
    }

    udp {
        port => 5144
        codec => "json"
        type => "logback"
    }
}

output {
    if [type]=="logback" {
         elasticsearch {
             hosts => ["localhost:9200"]
             index => "logback-%{+YYYY.MM.dd}"
        }
    }
}

And you have to setup the logback index in Kibana too.

This is my logback-spring.xml:

<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <springProperty name="mvnVersion" source="info.app.version"/>
    <springProperty name="appName" source="info.app.name"/>
    <springProperty name="tcpLogHost" source="logstash.tcpHost"/>
    <springProperty name="udpLogHost" source="logstash.udpHost"/>
    <springProperty name="udpLogPort" source="logstash.udpPort"/>

    <appender name="stashUdp" class="net.logstash.logback.appender.LogstashUdpSocketAppender">
        <host>${udpLogHost}</host>
        <port>${udpLogPort}</port>
        <layout class="net.logstash.logback.layout.LogstashLayout">
            <customFields>{"appName":"${appName}","env":"${env}","mvnVersion":"${mvnVersion}"}</customFields>
        </layout>
    </appender>

    <appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
        <encoder>
            <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{100} - %msg%n</pattern>
        </encoder>
    </appender>

    <root level="debug">
        <appender-ref ref="stashUdp" />
        <appender-ref ref="stdout" />
    </root>

</configuration>

I'm logging directly (UDP) to ELK Stack.

And you need

<dependency>
    <groupId>net.logstash.logback</groupId>
    <artifactId>logstash-logback-encoder</artifactId>
    <version>${logstash.logback.version}</version>
</dependency>
晨与橙与城 2025-01-28 18:18:24

根据您的存储库,您具有logBack用于日志记录的配置,但示例logback 并添加了log4j在您的Maven依赖项中支持。要使logBack工作您只需不排除默认的日志库,然后添加logBack dependenty的较新版本(因为由弹簧启动依赖项的LogBack的Spring Boot依赖版本指定了在您的配置logstash appender -net.logstash.logback.appender.logstashtcpsocketappender中指定。
FE:

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.6.6</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.spingbootelk</groupId>
    <artifactId>main</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>main</name>
    <description>The usage of ELK in Spring Boot</description>
    <properties>
        <java.version>11</java.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>

        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <scope>runtime</scope>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-jpa</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>

        <dependency>
            <groupId>net.logstash.logback</groupId>
            <artifactId>logstash-logback-encoder</artifactId>
            <version>7.1.1</version>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>

</project>

但是最好在Internet上找到一个好的指南如何将Spring Boot应用程序记录到正确的LogStash。而且我不建议您将存储库中的配置用于生产目的。

According to your repository you have logback configuration for logging, but excuded logback and added log4j support in your maven dependencies. To make logback work you just need not to exclude default logging library, and add newer version of the logback dependency (because specified by spring boot dependency version of logback doesn't contain specified in your configuration logstash appender - net.logstash.logback.appender.LogstashTcpSocketAppender).
F.e.:

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.6.6</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.spingbootelk</groupId>
    <artifactId>main</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>main</name>
    <description>The usage of ELK in Spring Boot</description>
    <properties>
        <java.version>11</java.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>

        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <scope>runtime</scope>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-jpa</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>

        <dependency>
            <groupId>net.logstash.logback</groupId>
            <artifactId>logstash-logback-encoder</artifactId>
            <version>7.1.1</version>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>

</project>

But it is better to find a good guide on the internet how to setup logging of your spring boot application to logstash properly. And I don't suggest you to use configuration in your repository for production purposes.

~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文