Elasticsearch无法到达:[http:// kafka:9200/] [manticore :: socketException]连接拒绝

发布于 2025-02-10 22:30:33 字数 2956 浏览 1 评论 0原文

我正在尝试使用Logstash,Elastic Search,Kafka和Kibana运行Spring应用程序。

[main] Attempted to resurrect connection to dead ES instance, but got an error. {:url=>"http://kafka:9200/", :error_type=>LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError, :error=>"Elasticsearch Unreachable: [http://kafka:9200/][Manticore::SocketException] Connection refused (Connection refused)"}

上面的错误是在LogStash容器

Docker-Compose.yml

version: '3.7'
services:

  zookeeper:
    image: wurstmeister/zookeeper
    container_name: zookeeper
    ports:
      - "2181:2181"
    #restart: always
    networks:
      - tweetapp-network
  
  kafka:
    image: wurstmeister/kafka
    container_name: kafka
    #restart: always
    ports: 
      - "9092:9092"
    depends_on:
      - zookeeper
    environment:
      KAFKA_ADVERTISED_HOST_NAME: kafka
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_CREATE_TOPICS: "tweetapp-logs:1:1, Tweets:1:1"
    volumes:
      - /var/run/docker.sock:/var/run/docker.sock
    networks:
      - tweetapp-network
  
  mongodb:
    image: mongo
    container_name: mongodb
   # restart: always
    ports:
      - "27017:27017"
#    volumes:
#      - mongodb-volume:/data/db
    networks:
      - tweetapp-network
  
  springboot:
    image: tweetapp
    #restart: always
    ports:
      - "8080:8080"
    depends_on:
      - mongodb
      - kafka
      - elasticsearch
      - logstash
      - kibana
    networks:
      - tweetapp-network
  
  logstash:
    image: logstash:7.7.0
    container_name: logstash 
    hostname: logstash
    ports:
      - "9600:9600"
    volumes:
      - .\logstash:/usr/share/logstash/pipeline/
    links:
      - elasticsearch:elasticsearch
    depends_on:
      - elasticsearch
    networks:
      - tweetapp-network
  
  elasticsearch:
    image: elasticsearch:7.7.0
    container_name: elasticsearch
    hostname: elasticsearch
    ports:
      - "9200:9200"
    environment:
      - discovery.type=single-node
    networks:
      - tweetapp-network
      
  kibana:
    image: kibana:7.7.0
    container_name: kibana
    hostname: kibana
    ports:
      - "5601:5601"
    links:
      - elasticsearch:elasticsearch
    depends_on:
      - elasticsearch
    networks:
      - tweetapp-network
     
# Networks to be created to facilitate communication between containers
networks:
  tweetapp-network:

下重复的,我确保弹性搜索正在工作http:// localhost:9200/,对于此URL,我将获得JSON输出。

logstash.config

input {
    kafka {
            bootstrap_servers => "kafka:9092"
            topics => ["tweetapp-logs"]
    }
}
filter {
grok {
    match => [ "message", "%{GREEDYDATA}" ]
  } 
}
output {
   elasticsearch {
      hosts => ["kafka:9200"]
      index => "tweetapp"
      workers => 1
    }
}

我是Docker-Compose,Elastic Search和Kafka的新手。任何帮助将不胜感激。

I am trying to run Spring application with logstash, Elastic Search, Kafka and Kibana.

[main] Attempted to resurrect connection to dead ES instance, but got an error. {:url=>"http://kafka:9200/", :error_type=>LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError, :error=>"Elasticsearch Unreachable: [http://kafka:9200/][Manticore::SocketException] Connection refused (Connection refused)"}

The above error is being repeated under logstash container

docker-compose.yml

version: '3.7'
services:

  zookeeper:
    image: wurstmeister/zookeeper
    container_name: zookeeper
    ports:
      - "2181:2181"
    #restart: always
    networks:
      - tweetapp-network
  
  kafka:
    image: wurstmeister/kafka
    container_name: kafka
    #restart: always
    ports: 
      - "9092:9092"
    depends_on:
      - zookeeper
    environment:
      KAFKA_ADVERTISED_HOST_NAME: kafka
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_CREATE_TOPICS: "tweetapp-logs:1:1, Tweets:1:1"
    volumes:
      - /var/run/docker.sock:/var/run/docker.sock
    networks:
      - tweetapp-network
  
  mongodb:
    image: mongo
    container_name: mongodb
   # restart: always
    ports:
      - "27017:27017"
#    volumes:
#      - mongodb-volume:/data/db
    networks:
      - tweetapp-network
  
  springboot:
    image: tweetapp
    #restart: always
    ports:
      - "8080:8080"
    depends_on:
      - mongodb
      - kafka
      - elasticsearch
      - logstash
      - kibana
    networks:
      - tweetapp-network
  
  logstash:
    image: logstash:7.7.0
    container_name: logstash 
    hostname: logstash
    ports:
      - "9600:9600"
    volumes:
      - .\logstash:/usr/share/logstash/pipeline/
    links:
      - elasticsearch:elasticsearch
    depends_on:
      - elasticsearch
    networks:
      - tweetapp-network
  
  elasticsearch:
    image: elasticsearch:7.7.0
    container_name: elasticsearch
    hostname: elasticsearch
    ports:
      - "9200:9200"
    environment:
      - discovery.type=single-node
    networks:
      - tweetapp-network
      
  kibana:
    image: kibana:7.7.0
    container_name: kibana
    hostname: kibana
    ports:
      - "5601:5601"
    links:
      - elasticsearch:elasticsearch
    depends_on:
      - elasticsearch
    networks:
      - tweetapp-network
     
# Networks to be created to facilitate communication between containers
networks:
  tweetapp-network:

I made sure that Elastic Search is working http://localhost:9200/ , for this URL I get JSON output.

logstash.config

input {
    kafka {
            bootstrap_servers => "kafka:9092"
            topics => ["tweetapp-logs"]
    }
}
filter {
grok {
    match => [ "message", "%{GREEDYDATA}" ]
  } 
}
output {
   elasticsearch {
      hosts => ["kafka:9200"]
      index => "tweetapp"
      workers => 1
    }
}

I am new to docker-compose, Elastic search and Kafka. Any help will be appreciated.

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(1

说不完的你爱 2025-02-17 22:30:33

好像你很不幸。

在主机名中替换Elasticsearch而不是Kafka

尝试以下方法:

input {kafka {bootstrap_servers => “ Kafka:9092”主题=> [“ TweetApp-logs”]}} filter {grok {match => [“消息”,“%{greedydata}”]}}} output {elasticsearch {hosts => [“ elasticsearch:9200”] index => “ TweetApp”工人=> 1}}}

Seems like you are unlucky.

Replace Elasticsearch instead of kafka in host name

Try this:

input { kafka { bootstrap_servers => "kafka:9092" topics => ["tweetapp-logs"] } } filter { grok { match => [ "message", "%{GREEDYDATA}" ] } } output { elasticsearch { hosts => ["elasticsearch:9200"] index => "tweetapp" workers => 1 } }

~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文