docker-compose java.net.UnknownHostException

docker-compose java.net.UnknownHostException,第1张

historyserver      | Caused by: java.lang.RuntimeException: Could not resolve Kerberos principal name: java.net.UnknownHostException: historyserver: historyserver: Name or service not known
historyserver      |    at org.apache.hadoop.security.AuthenticationFilterInitializer.getFilterConfigMap(AuthenticationFilterInitializer.java:90)
historyserver      |    at org.apache.hadoop.http.HttpServer2.getFilterProperties(HttpServer2.java:454)
historyserver      |    at org.apache.hadoop.http.HttpServer2.constructSecretProvider(HttpServer2.java:445)
historyserver      |    at org.apache.hadoop.http.HttpServer2.<init>(HttpServer2.java:339)
historyserver      |    ... 8 more
historyserver      | Caused by: java.net.UnknownHostException: historyserver: historyserver: Name or service not known
historyserver      |    at java.net.InetAddress.getLocalHost(InetAddress.java:1496)
historyserver      |    at org.apache.hadoop.security.SecurityUtil.getLocalHostName(SecurityUtil.java:190)
historyserver      |    at 
...

注释 network_mode: ‘host’

[root@localhost module]# cat docker-compose-hadoop-cluster.yaml
version: "2.2"
services:
  namenode:
    image: bde2020/hadoop-namenode:1.1.0-hadoop2.7.1-java8
    hostname: namenode
    container_name: namenode
    ports:
      - 9000:9000
      - 50070:50070
    restart: always
    #network_mode: 'host'
    environment:
      - CLUSTER_NAME=test
      - HDFS_CONF_dfs_permissions=false
    env_file:
      - ./hadoop.env

  resourcemanager:
    image: bde2020/hadoop-resourcemanager:1.1.0-hadoop2.7.1-java8
    hostname: resourcemanager
    container_name: resourcemanager
    ports:
      - 8030:8030
      - 8031:8031
      - 8032:8032
      - 8033:8033
      - 8088:8088
    restart: always
    #network_mode: 'host'
    depends_on:
      - namenode
      - datanode1
      - datanode2
      - datanode3
    env_file:
      - ./hadoop.env

  historyserver:
    image: bde2020/hadoop-historyserver:1.1.0-hadoop2.7.1-java8
    hostname: historyserver
    container_name: historyserver
    ports:
      - 8188:8188
    restart: always
    #network_mode: 'host'
    depends_on:
      - namenode
      - datanode1
      - datanode2
      - datanode3
    #volumes:
    #  - ./hadoop/historyserver:/hadoop/yarn/timeline
    env_file:
      - ./hadoop.env

  nodemanager1:
    image: bde2020/hadoop-nodemanager:1.1.0-hadoop2.7.1-java8
    hostname: nodemanager1
    container_name: nodemanager1
    ports:
      - 8040:8040
      - 8041:8041
      - 8042:8042
    restart: always
    #network_mode: 'host'
    depends_on:
      - namenode
      - datanode1
      - datanode2
      - datanode3
    env_file:
      - ./hadoop.env


  datanode1:
    image: bde2020/hadoop-datanode:1.1.0-hadoop2.7.1-java8
    hostname: datanode1
    container_name: datanode1
    restart: always
    #network_mode: 'host'
    environment:
      - HDFS_CONF_dfs_datanode_address=0.0.0.0:50010
      - HDFS_CONF_dfs_datanode_ipc_address=0.0.0.0:50020
      - HDFS_CONF_dfs_datanode_http_address=0.0.0.0:50075
    ports:
      - 50010:50010
      - 50020:50020
      - 50075:50075
    depends_on:
      - namenode
    #volumes:
    #  - ./hadoop/datanode1:/hadoop/dfs/data
    env_file:
      - ./hadoop.env

  datanode2:
    image: bde2020/hadoop-datanode:1.1.0-hadoop2.7.1-java8
    hostname: datanode2
    container_name: datanode2
    restart: always
    #network_mode: 'host'
    environment:
      - HDFS_CONF_dfs_datanode_address=0.0.0.0:50012
      - HDFS_CONF_dfs_datanode_ipc_address=0.0.0.0:50022
      - HDFS_CONF_dfs_datanode_http_address=0.0.0.0:50072
    ports:
      - 50012:50012
      - 50022:50022
      - 50072:50072
    depends_on:
      - namenode
    #volumes:
    #  - ./hadoop/datanode2:/hadoop/dfs/data
    env_file:
      - ./hadoop.env

  datanode3:
    image: bde2020/hadoop-datanode:1.1.0-hadoop2.7.1-java8
    hostname: datanode3
    container_name: datanode3
    restart: always
    #network_mode: 'host'
    environment:
      - HDFS_CONF_dfs_datanode_address=0.0.0.0:50013
      - HDFS_CONF_dfs_datanode_ipc_address=0.0.0.0:50023
      - HDFS_CONF_dfs_datanode_http_address=0.0.0.0:50073
    ports:
      - 50013:50013
      - 50023:50023
      - 50073:50073
    depends_on:
      - namenode
    #volumes:
    #  - ./hadoop/datanode3:/hadoop/dfs/data
    env_file:
      - ./hadoop.env

欢迎分享,转载请注明来源:内存溢出

原文地址: https://outofmemory.cn/langs/874170.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-05-13
下一篇 2022-05-13

发表评论

登录后才能评论

评论列表(0条)

保存