Star

nebula v2.0.0-rc1版docker swarm文件配置问题

  • nebula 版本:v2.0.0-rc1
  • 部署方式(分布式 / 单机 / Docker / DBaaS):docker swarm
  • 硬件信息
    • 磁盘( 必须为 SSD ,不支持 HDD)
    • CPU、内存信息:

如下,是我的启动脚本

version: '3.8'
services:
  metad0:
    image: vesoft/nebula-metad:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --local_ip=9.134.1.150
      - --ws_ip=9.134.1.150
      - --port=9559
      - --data_path=/data/meta
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM-1-150-centos
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.1.150:19559/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 19559
        published: 19559
        protocol: tcp
        mode: host
      - target: 19560
        published: 19560
        protocol: tcp
        mode: host
      - target: 9559
        published: 9559
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/data/meta:/data/meta
      - /data/nebula/logs/meta:/logs
    networks:
      - nebula-net

  metad1:
    image: vesoft/nebula-metad:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --local_ip=9.134.3.25
      - --ws_ip=9.134.3.25
      - --port=9559
      - --data_path=/data/meta
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM-3-25-centos
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.3.25:19559/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s     
    ports:
      - target: 19559
        published: 19559
        protocol: tcp
        mode: host
      - target: 19560
        published: 19560
        protocol: tcp
        mode: host
      - target: 9559
        published: 9559
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/data/meta:/data/meta
      - /data/nebula/logs/meta:/logs
    networks:
      - nebula-net

  metad2:
    image: vesoft/nebula-metad:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --local_ip=9.134.55.213
      - --ws_ip=9.134.55.213
      - --port=9559
      - --data_path=/data/meta
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM_55_213_centos
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.55.213:19559/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 19559
        published: 19559
        protocol: tcp
        mode: host
      - target: 19560
        published: 19560
        protocol: tcp
        mode: host
      - target: 9559
        published: 9559
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/data/meta:/data/meta
      - /data/nebula/logs/meta:/logs
    networks:
      - nebula-net

  storaged0:
    image: vesoft/nebula-storaged:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --local_ip=9.134.1.150
      - --ws_ip=9.134.1.150
      - --port=9779
      - --data_path=/data/storaged
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
      - --raft_rpc_timeout_ms=5000
      - --heartbeat_interval_secs=30
      - --enable_rocksdb_prefix_filtering=true	  
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM-1-150-centos
    depends_on:
      - metad0
      - metad1
      - metad2
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.1.150:19779/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 19779
        published: 19779
        protocol: tcp
        mode: host
      - target: 19780
        published: 19780
        protocol: tcp
        mode: host
      - target: 9779
        published: 9779
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/data/storaged:/data/storaged
      - /data/nebula/logs/storaged:/logs
    networks:
      - nebula-net
      
  storaged1:
    image: vesoft/nebula-storaged:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --local_ip=9.134.3.25
      - --ws_ip=9.134.3.25
      - --port=9779
      - --data_path=/data/storaged
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
      - --raft_rpc_timeout_ms=5000
      - --heartbeat_interval_secs=30
      - --enable_rocksdb_prefix_filtering=true
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM-3-25-centos
    depends_on:
      - metad0
      - metad1
      - metad2
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.3.25:19779/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 19779
        published: 19779
        protocol: tcp
        mode: host
      - target: 19780
        published: 19780
        protocol: tcp
        mode: host
      - target: 9779
        published: 9779
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/data/storaged:/data/storaged
      - /data/nebula/logs/storaged:/logs
    networks:
      - nebula-net

  storaged2:
    image: vesoft/nebula-storaged:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --local_ip=9.134.55.213
      - --ws_ip=9.134.55.213
      - --port=9779
      - --data_path=/data/storaged
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
      - --raft_rpc_timeout_ms=5000
      - --heartbeat_interval_secs=30
      - --enable_rocksdb_prefix_filtering=true
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM_55_213_centos
    depends_on:
      - metad0
      - metad1
      - metad2
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.55.213:19779/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 19779
        published: 19779
        protocol: tcp
        mode: host
      - target: 19780
        published: 19780
        protocol: tcp
        mode: host
      - target: 9779
        published: 9779
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/data/storaged:/data/storaged
      - /data/nebula/logs/storaged:/logs
    networks:
      - nebula-net
      
  graphd0:
    image: vesoft/nebula-graphd:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --port=9699
      - --ws_ip=9.134.1.150
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM-1-150-centos
    depends_on:
      - metad0
      - metad1
      - metad2
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.1.150:19699/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 9699
        published: 9699
        protocol: tcp
        mode: host
      - target: 19699
        published: 19699
        protocol: tcp
        mode: host
      - target: 19670
        published: 19670
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/logs/graphd:/logs
    networks:
      - nebula-net

  graphd1:
    image: vesoft/nebula-graphd:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --port=9699
      - --ws_ip=9.134.3.25
      - --log_dir=/logs
      - --v=2
      - --minloglevel=2
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM-3-25-centos
    depends_on:
      - metad0
      - metad1
      - metad2
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.3.25:19699/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 9699
        published: 9699
        protocol: tcp
        mode: host
      - target: 19699
        published: 19699
        protocol: tcp
        mode: host
      - target: 19670
        published: 19670
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/logs/graphd:/logs
    networks:
      - nebula-net
      
  graphd2:
    image: vesoft/nebula-graphd:v2.0.0-rc1
    env_file:
      - ./nebula.env
    command:
      - --meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559
      - --port=9699
      - --ws_ip=9.134.55.213
      - --log_dir=/logs
      - --v=0
      - --minloglevel=2
    deploy:
      replicas: 1
      restart_policy:
        condition: on-failure
      placement:
        constraints:
          - node.hostname == VM_55_213_centos
    depends_on:
      - metad0
      - metad1
      - metad2
    healthcheck:
      test: ["CMD", "curl", "-f", "http://9.134.55.213:19699/status"]
      interval: 30s
      timeout: 10s
      retries: 3
      start_period: 20s      
    ports:
      - target: 9699
        published: 9699
        protocol: tcp
        mode: host
      - target: 19699
        published: 19699
        protocol: tcp
        mode: host
      - target: 19670
        published: 19670
        protocol: tcp
        mode: host
    volumes:
      - /data/nebula/logs/graphd:/logs
    networks:
      - nebula-net
      
networks:
  nebula-net:
    external: true
    attachable: true
    name: host

meta和storage可以启动,但是graph无法启动

graph报如下错误日志

E0112 08:07:25.332816    13 MetaClient.cpp:581] Send request to [9.134.3.25:9559], exceed retry limit
E0112 08:07:25.334055     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
E0112 08:07:30.348592    14 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0112 08:07:30.348659     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
E0112 08:07:35.358232    15 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0112 08:07:35.358297     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
E0112 08:07:40.371402    16 MetaClient.cpp:581] Send request to [9.134.1.150:9559], exceed retry limit
E0112 08:07:40.371459     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
E0112 08:07:46.617467    17 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0112 08:07:46.617542    21 MetaClient.cpp:121] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused


Running on machine: VM-1-150-centos
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
E0112 10:08:24.978436    13 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0112 10:08:24.978682     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport
19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Conn
ection refused
E0112 10:08:29.992230    14 MetaClient.cpp:581] Send request to [9.134.3.25:9559], exceed retry limit
E0112 10:08:29.992303     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport
19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Conn
ection refused

使用的是host模式,我使用telnet 9.134.55.213 9559 到对应机器是通的,请帮忙看下什么问题

额,你为啥把graphd端口都改了

graphd 这个报错不会导致graphd起不来的。还有你的日志级别最好不要把–minloglevel设置为2,设置为0就好,有问题好看日志。还有v可以改为0。请问这个打印是一直都打吗?你确认下你的graphd所在容器能不能和meta的容器通信。

是可以通信的,我beta版本都是正常的,rc1版本我只是更新了下端口,我改为0看下

改成0并没有额外的日志

docker ps -a 截图下

可以把图截全点吗,把port也给我截出来,谢谢

CONTAINER ID        IMAGE                                                 COMMAND                  CREATED             STATUS                      PORTS               NAMES
a563b8369d49        vesoft/nebula-graphd:v2.0.0-rc1                       "./bin/nebula-graphd…"   2 hours ago         Exited (0) 2 hours ago                          nebula_graphd1.1.3vfszlduwlpp05793bo63n8z0
53247005fa05        vesoft/nebula-storaged:v2.0.0-rc1                     "./bin/nebula-storag…"   2 hours ago         Up 2 hours (healthy)                            nebula_storaged1.1.1tvxybz26jqzqkfvgvr0i2c26
158f5f76618f        vesoft/nebula-metad:v2.0.0-rc1                        "./bin/nebula-metad …"   2 hours ago         Up 2 hours (healthy)                            nebula_metad1.1.8ewiza5into8jpq82niey1yp5

docker inspect graph-id 的数据

[
    {
        "Id": "a563b8369d4933c71d7d64c4bdbc2a39c9c9e6d2012836cac9cf6c8c5d32aa3c",
        "Created": "2021-01-12T10:08:39.919176612Z",
        "Path": "./bin/nebula-graphd",
        "Args": [
            "--flagfile=./etc/nebula-graphd.conf",
            "--daemonize=false",
            "--meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559",
            "--port=9699",
            "--ws_ip=9.134.3.25",
            "--log_dir=/logs",
            "--v=2",
            "--minloglevel=0"
        ],
        "State": {
            "Status": "exited",
            "Running": false,
            "Paused": false,
            "Restarting": false,
            "OOMKilled": false,
            "Dead": false,
            "Pid": 0,
            "ExitCode": 0,
            "Error": "",
            "StartedAt": "2021-01-12T10:08:40.069661927Z",
            "FinishedAt": "2021-01-12T10:10:12.80554888Z",
            "Health": {
                "Status": "unhealthy",
                "FailingStreak": 3,
                "Log": [
                    {
                        "Start": "2021-01-12T18:09:10.069839075+08:00",
                        "End": "2021-01-12T18:09:10.312778237+08:00",
                        "ExitCode": 7,
                        "Output": "  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current\n                                 Dload  Upload   Total   Spent    Left  Speed\n\r  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (7) Failed connect to 9.134.3.25:19699; Connection refused\n"
                    },
                    {
                        "Start": "2021-01-12T18:09:40.323965289+08:00",
                        "End": "2021-01-12T18:09:40.537800339+08:00",
                        "ExitCode": 7,
                        "Output": "  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current\n                                 Dload  Upload   Total   Spent    Left  Speed\n\r  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (7) Failed connect to 9.134.3.25:19699; Connection refused\n"
                    },
                    {
                        "Start": "2021-01-12T18:10:10.547992013+08:00",
                        "End": "2021-01-12T18:10:10.745305733+08:00",
                        "ExitCode": 7,
                        "Output": "  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current\n                                 Dload  Upload   Total   Spent    Left  Speed\n\r  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (7) Failed connect to 9.134.3.25:19699; Connection refused\n"
                    }
                ]
            }
        },
        "Image": "sha256:52dc9e59d0a7a0d690bec90221de3a346f14187e340a7bf57c726f605504e5de",
        "ResolvConfPath": "/var/lib/docker/containers/a563b8369d4933c71d7d64c4bdbc2a39c9c9e6d2012836cac9cf6c8c5d32aa3c/resolv.conf",
        "HostnamePath": "/var/lib/docker/containers/a563b8369d4933c71d7d64c4bdbc2a39c9c9e6d2012836cac9cf6c8c5d32aa3c/hostname",
        "HostsPath": "/var/lib/docker/containers/a563b8369d4933c71d7d64c4bdbc2a39c9c9e6d2012836cac9cf6c8c5d32aa3c/hosts",
        "LogPath": "/var/lib/docker/containers/a563b8369d4933c71d7d64c4bdbc2a39c9c9e6d2012836cac9cf6c8c5d32aa3c/a563b8369d4933c71d7d64c4bdbc2a39c9c9e6d2012836cac9cf6c8c5d32aa3c-json.log",
        "Name": "/nebula_graphd1.1.3vfszlduwlpp05793bo63n8z0",
        "RestartCount": 0,
        "Driver": "overlay2",
        "Platform": "linux",
        "MountLabel": "",
        "ProcessLabel": "",
        "AppArmorProfile": "",
        "ExecIDs": null,
        "HostConfig": {
            "Binds": null,
            "ContainerIDFile": "",
            "LogConfig": {
                "Type": "json-file",
                "Config": {}
            },
            "NetworkMode": "host",
            "PortBindings": {
                "19670/tcp": [
                    {
                        "HostIp": "",
                        "HostPort": "19670"
                    }
                ],
                "19699/tcp": [
                    {
                        "HostIp": "",
                        "HostPort": "19699"
                    }
                ],
                "9699/tcp": [
                    {
                        "HostIp": "",
                        "HostPort": "9699"
                    }
                ]
            },
            "RestartPolicy": {
                "Name": "",
                "MaximumRetryCount": 0
            },
            "AutoRemove": false,
            "VolumeDriver": "",
            "VolumesFrom": null,
            "CapAdd": null,
            "CapDrop": null,
            "Capabilities": null,
            "Dns": null,
            "DnsOptions": null,
            "DnsSearch": null,
            "ExtraHosts": null,
            "GroupAdd": null,
            "IpcMode": "private",
            "Cgroup": "",
            "Links": null,
            "OomScoreAdj": 0,
            "PidMode": "",
            "Privileged": false,
            "PublishAllPorts": false,
            "ReadonlyRootfs": false,
            "SecurityOpt": null,
            "UTSMode": "",
            "UsernsMode": "",
            "ShmSize": 67108864,
            "Runtime": "runc",
            "ConsoleSize": [
                0,
                0
            ],
            "Isolation": "default",
            "CpuShares": 0,
            "Memory": 0,
            "NanoCpus": 0,
            "CgroupParent": "",
            "BlkioWeight": 0,
            "BlkioWeightDevice": null,
            "BlkioDeviceReadBps": null,
            "BlkioDeviceWriteBps": null,
            "BlkioDeviceReadIOps": null,
            "BlkioDeviceWriteIOps": null,
            "CpuPeriod": 0,
            "CpuQuota": 0,
            "CpuRealtimePeriod": 0,
            "CpuRealtimeRuntime": 0,
            "CpusetCpus": "",
            "CpusetMems": "",
            "Devices": null,
            "DeviceCgroupRules": null,
            "DeviceRequests": null,
            "KernelMemory": 0,
            "KernelMemoryTCP": 0,
            "MemoryReservation": 0,
            "MemorySwap": 0,
            "MemorySwappiness": null,
            "OomKillDisable": false,
            "PidsLimit": null,
            "Ulimits": null,
            "CpuCount": 0,
            "CpuPercent": 0,
            "IOMaximumIOps": 0,
            "IOMaximumBandwidth": 0,
            "Mounts": [
                {
                    "Type": "bind",
                    "Source": "/data/nebula/logs/graphd",
                    "Target": "/logs"
                }
            ],
            "MaskedPaths": [
                "/proc/asound",
                "/proc/acpi",
                "/proc/kcore",
                "/proc/keys",
                "/proc/latency_stats",
                "/proc/timer_list",
                "/proc/timer_stats",
                "/proc/sched_debug",
                "/proc/scsi",
                "/sys/firmware"
            ],
            "ReadonlyPaths": [
                "/proc/bus",
                "/proc/fs",
                "/proc/irq",
                "/proc/sys",
                "/proc/sysrq-trigger"
            ]
        },
        "GraphDriver": {
            "Data": {
                "LowerDir": "/var/lib/docker/overlay2/e2ec0b39632c91c2dadd2197f223f8a08deeb1530479a219c6cb78d09d3ed310-init/diff:/var/lib/docker/overlay2/2137deb69b385d913cd80e522fc560a91fe8ef2778499d193f4725dd9d0c72f6/diff:/var/lib/docker/overlay2/fdeb6f03aeac61d48547bc681341a3ef1e308ef91c08697b054a8dc69056149d/diff:/var/lib/docker/overlay2/12712ae0e4ad038751dd64ad16e34b9be1f325849927e697f7e4000a8ed6c2cf/diff:/var/lib/docker/overlay2/52dd0a8dc2a841cb9a217fe2d9e72b031d5ee0f51cd28b8ab40cde96362382f4/diff",
                "MergedDir": "/var/lib/docker/overlay2/e2ec0b39632c91c2dadd2197f223f8a08deeb1530479a219c6cb78d09d3ed310/merged",
                "UpperDir": "/var/lib/docker/overlay2/e2ec0b39632c91c2dadd2197f223f8a08deeb1530479a219c6cb78d09d3ed310/diff",
                "WorkDir": "/var/lib/docker/overlay2/e2ec0b39632c91c2dadd2197f223f8a08deeb1530479a219c6cb78d09d3ed310/work"
            },
            "Name": "overlay2"
        },
        "Mounts": [
            {
                "Type": "bind",
                "Source": "/data/nebula/logs/graphd",
                "Destination": "/logs",
                "Mode": "",
                "RW": true,
                "Propagation": "rprivate"
            }
        ],
        "Config": {
            "Hostname": "VM-3-25-centos",
            "Domainname": "",
            "User": "",
            "AttachStdin": false,
            "AttachStdout": false,
            "AttachStderr": false,
            "ExposedPorts": {
                "13000/tcp": {},
                "13002/tcp": {},
                "19670/tcp": {},
                "19699/tcp": {},
                "3699/tcp": {},
                "9699/tcp": {}
            },
            "Tty": false,
            "OpenStdin": false,
            "StdinOnce": false,
            "Env": [
                "TZ=UTC",
                "USER=root",
                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
            ],
            "Cmd": [
                "--meta_server_addrs=9.134.1.150:9559,9.134.3.25:9559,9.134.55.213:9559",
                "--port=9699",
                "--ws_ip=9.134.3.25",
                "--log_dir=/logs",
                "--v=2",
                "--minloglevel=0"
            ],
            "Healthcheck": {
                "Test": [
                    "CMD",
                    "curl",
                    "-f",
                    "http://9.134.3.25:19699/status"
                ],
                "Interval": 30000000000,
                "Timeout": 10000000000,
                "StartPeriod": 20000000000,
                "Retries": 3
            },
            "Image": "vesoft/nebula-graphd:v2.0.0-rc1@sha256:297e9a653776d4f34bc5be1731c9a6c7cb0831fb07bcfcba23065aed46822d23",
            "Volumes": null,
            "WorkingDir": "/usr/local/nebula",
            "Entrypoint": [
                "./bin/nebula-graphd",
                "--flagfile=./etc/nebula-graphd.conf",
                "--daemonize=false"
            ],
            "OnBuild": null,
            "Labels": {
                "com.docker.stack.namespace": "nebula",
                "com.docker.swarm.node.id": "85a1r4rigtrxl5xa9oc67z1os",
                "com.docker.swarm.service.id": "ih7kmifeys91ffy48hzv8wzff",
                "com.docker.swarm.service.name": "nebula_graphd1",
                "com.docker.swarm.task": "",
                "com.docker.swarm.task.id": "3vfszlduwlpp05793bo63n8z0",
                "com.docker.swarm.task.name": "nebula_graphd1.1.3vfszlduwlpp05793bo63n8z0",
                "org.label-schema.build-date": "20201113",
                "org.label-schema.license": "GPLv2",
                "org.label-schema.name": "CentOS Base Image",
                "org.label-schema.schema-version": "1.0",
                "org.label-schema.vendor": "CentOS",
                "org.opencontainers.image.created": "2020-11-13 00:00:00+00:00",
                "org.opencontainers.image.licenses": "GPL-2.0-only",
                "org.opencontainers.image.title": "CentOS Base Image",
                "org.opencontainers.image.vendor": "CentOS"
            }
        },
        "NetworkSettings": {
            "Bridge": "",
            "SandboxID": "5d530f6d3d5961e4d99f00c3fa281a1d6dbf6ff1b762706b33d1470aba0955a7",
            "HairpinMode": false,
            "LinkLocalIPv6Address": "",
            "LinkLocalIPv6PrefixLen": 0,
            "Ports": {},
            "SandboxKey": "/var/run/docker/netns/default",
            "SecondaryIPAddresses": null,
            "SecondaryIPv6Addresses": null,
            "EndpointID": "",
            "Gateway": "",
            "GlobalIPv6Address": "",
            "GlobalIPv6PrefixLen": 0,
            "IPAddress": "",
            "IPPrefixLen": 0,
            "IPv6Gateway": "",
            "MacAddress": "",
            "Networks": {
                "host": {
                    "IPAMConfig": {},
                    "Links": null,
                    "Aliases": null,
                    "NetworkID": "1909c106459268dc26f0e833b1bee03327612585da932bb675b38bb4b4efae61",
                    "EndpointID": "",
                    "Gateway": "",
                    "IPAddress": "",
                    "IPPrefixLen": 0,
                    "IPv6Gateway": "",
                    "GlobalIPv6Address": "",
                    "GlobalIPv6PrefixLen": 0,
                    "MacAddress": "",
                    "DriverOpts": null
                }
            }
        }
    }
]

你再启动下graphd,然后把graphd的日志再贴下,还有把和他同个ip的meta和storage的日志也贴下

graph日志

Log file created at: 2021/01/12 12:28:48
Running on machine: VM-3-25-centos
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0112 12:28:48.858804     1 GraphDaemon.cpp:106] Starting Graph HTTP Service
I0112 12:28:48.860983     6 WebService.cpp:143] Web service started on HTTP[19669], HTTP2[19670]
I0112 12:28:48.861039     1 GraphDaemon.cpp:120] Number of networking IO threads: 8
I0112 12:28:48.861060     1 GraphDaemon.cpp:129] Number of worker threads: 8
I0112 12:28:48.861600     1 MetaClient.cpp:47] Create meta client to [9.134.55.213:9559]
I0112 12:28:48.861932     1 GflagsManager.cpp:138] Prepare to register 5 gflags to meta
I0112 12:28:48.861954     1 MetaClient.cpp:2275] Send heartbeat to [9.134.55.213:9559], clusterId 0
I0112 12:28:48.862226    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:28:48.862244    13 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:28:48.862272    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:28:48.862294    13 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0112 12:28:48.862694    13 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f4a2d010, fd=48 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:28:49.864410    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:28:49.864445    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:28:49.864455    13 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 2 times
I0112 12:28:49.864861    13 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f4a2d010, fd=52 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:28:50.866274    13 ThriftClientManager.inl:21] Getting a client to [9.134.3.25:9559]
I0112 12:28:50.866302    13 ThriftClientManager.inl:33] There is no existing client to [9.134.3.25:9559], trying to create one
I0112 12:28:50.866313    13 MetaClient.cpp:542] Send request to meta [9.134.3.25:9559]
I0112 12:28:50.866322    13 ThriftClientManager.inl:55] Connecting to [9.134.3.25:9559] for 3 times
I0112 12:28:50.866520    13 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f4a2d010, fd=56 host=9.134.3.25:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:28:51.867955    13 ThriftClientManager.inl:21] Getting a client to [9.134.1.150:9559]
I0112 12:28:51.867987    13 ThriftClientManager.inl:33] There is no existing client to [9.134.1.150:9559], trying to create one
I0112 12:28:51.868000    13 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0112 12:28:51.868010    13 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 4 times
I0112 12:28:51.868347    13 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f4a2d010, fd=60 host=9.134.1.150:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
E0112 12:28:51.868424    13 MetaClient.cpp:581] Send request to [9.134.1.150:9559], exceed retry limit
E0112 12:28:51.868646     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:28:51.868659     1 MetaClient.cpp:85] Waiting for the metad to be ready!
I0112 12:28:53.868741     1 MetaClient.cpp:2275] Send heartbeat to [9.134.3.25:9559], clusterId 0
I0112 12:28:53.869113    14 ThriftClientManager.inl:21] Getting a client to [9.134.3.25:9559]
I0112 12:28:53.869139    14 ThriftClientManager.inl:33] There is no existing client to [9.134.3.25:9559], trying to create one
I0112 12:28:53.869165    14 MetaClient.cpp:542] Send request to meta [9.134.3.25:9559]
I0112 12:28:53.869174    14 ThriftClientManager.inl:55] Connecting to [9.134.3.25:9559] for 1 times
I0112 12:28:53.869573    14 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f3c2c010, fd=66 host=9.134.3.25:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:28:54.871345    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:28:54.871387    14 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:28:54.871454    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:28:54.871515    14 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 2 times
I0112 12:28:54.871888    14 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f3c2c010, fd=70 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:28:55.873366    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:28:55.873445    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:28:55.873497    14 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 3 times
I0112 12:28:55.873867    14 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f3c2c010, fd=74 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:28:56.875340    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:28:56.875439    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:28:56.875505    14 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 4 times
I0112 12:28:56.875980    14 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f3c2c010, fd=78 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
E0112 12:28:56.876121    14 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0112 12:28:56.876202     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:28:56.876267     1 MetaClient.cpp:85] Waiting for the metad to be ready!
I0112 12:28:58.876381     1 MetaClient.cpp:2275] Send heartbeat to [9.134.3.25:9559], clusterId 0
I0112 12:28:58.876535    15 ThriftClientManager.inl:21] Getting a client to [9.134.3.25:9559]
I0112 12:28:58.876598    15 ThriftClientManager.inl:33] There is no existing client to [9.134.3.25:9559], trying to create one
I0112 12:28:58.876659    15 MetaClient.cpp:542] Send request to meta [9.134.3.25:9559]
I0112 12:28:58.876708    15 ThriftClientManager.inl:55] Connecting to [9.134.3.25:9559] for 1 times
I0112 12:28:58.877045    15 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f2e2c010, fd=78 host=9.134.3.25:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:28:59.878481    15 ThriftClientManager.inl:21] Getting a client to [9.134.1.150:9559]
I0112 12:28:59.878587    15 ThriftClientManager.inl:33] There is no existing client to [9.134.1.150:9559], trying to create one
I0112 12:28:59.878640    15 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0112 12:28:59.878697    15 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 2 times
I0112 12:28:59.879142    15 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f2e2c010, fd=78 host=9.134.1.150:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:00.879562    15 ThriftClientManager.inl:21] Getting a client to [9.134.1.150:9559]
I0112 12:29:00.879664    15 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0112 12:29:00.879719    15 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 3 times
I0112 12:29:00.880117    15 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f2e2c010, fd=78 host=9.134.1.150:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:01.881300    15 ThriftClientManager.inl:21] Getting a client to [9.134.3.25:9559]
I0112 12:29:01.881445    15 MetaClient.cpp:542] Send request to meta [9.134.3.25:9559]
I0112 12:29:01.881513    15 ThriftClientManager.inl:55] Connecting to [9.134.3.25:9559] for 4 times
I0112 12:29:01.881834    15 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f2e2c010, fd=78 host=9.134.3.25:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
E0112 12:29:01.881994    15 MetaClient.cpp:581] Send request to [9.134.3.25:9559], exceed retry limit
E0112 12:29:01.882083     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:01.882138     1 MetaClient.cpp:85] Waiting for the metad to be ready!
I0112 12:29:03.882266     1 MetaClient.cpp:2275] Send heartbeat to [9.134.1.150:9559], clusterId 0
I0112 12:29:03.882424    16 ThriftClientManager.inl:21] Getting a client to [9.134.1.150:9559]
I0112 12:29:03.882483    16 ThriftClientManager.inl:33] There is no existing client to [9.134.1.150:9559], trying to create one
I0112 12:29:03.882542    16 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0112 12:29:03.882603    16 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 1 times
I0112 12:29:03.882995    16 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f202c010, fd=78 host=9.134.1.150:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:04.884446    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:04.884548    16 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:29:04.884611    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:04.884665    16 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 2 times
I0112 12:29:04.885100    16 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f202c010, fd=78 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:05.886267    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:05.886399    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:05.886456    16 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 3 times
I0112 12:29:05.886909    16 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f202c010, fd=78 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:06.888073    16 ThriftClientManager.inl:21] Getting a client to [9.134.1.150:9559]
I0112 12:29:06.888180    16 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0112 12:29:06.888233    16 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 4 times
I0112 12:29:06.888612    16 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f202c010, fd=78 host=9.134.1.150:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
E0112 12:29:06.888736    16 MetaClient.cpp:581] Send request to [9.134.1.150:9559], exceed retry limit
E0112 12:29:06.888829     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:06.888979     1 MetaClient.cpp:96] Register time task for heartbeat!
W0112 12:29:06.889078     1 QueryEngine.cpp:45] Failed to synchronously wait for meta service ready
I0112 12:29:06.889227     1 GraphDaemon.cpp:161] Starting nebula-graphd on 0.0.0.0:9699
I0112 12:29:06.889309     1 ThriftServer.cpp:266] libevent 2.1.11-stable method epoll
I0112 12:29:06.890086     1 ThriftServer.cpp:397] Using 0 SSL handshake threads
I0112 12:29:06.890456    23 PosixThreadFactory.cpp:194] setpriority failed (are you root?) with error 13: Permission denied
I0112 12:29:06.890534    24 PosixThreadFactory.cpp:194] setpriority failed (are you root?) with error 13: Permission denied
I0112 12:29:06.891582    22 PosixThreadFactory.cpp:194] setpriority failed (are you root?) with error 13: Permission denied
I0112 12:29:06.890614    27 PosixThreadFactory.cpp:194] setpriority failed (are you root?) with error 13: Permission denied
I0112 12:29:06.891566    25 PosixThreadFactory.cpp:194] setpriority failed (are you root?) with error 13: Permission denied
I0112 12:29:06.890566    26 PosixThreadFactory.cpp:194] setpriority failed (are you root?) with error 13: Permission denied
I0112 12:29:10.695664    21 MetaClient.cpp:2275] Send heartbeat to [9.134.55.213:9559], clusterId 0
I0112 12:29:10.696406    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:10.696476    17 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:29:10.696552    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:10.696602    17 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0112 12:29:10.697284    17 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f122d010, fd=99 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:11.698732    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:11.698830    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:11.698881    17 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 2 times
I0112 12:29:11.699312    17 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f122d010, fd=99 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:12.700485    17 ThriftClientManager.inl:21] Getting a client to [9.134.1.150:9559]
I0112 12:29:12.700592    17 ThriftClientManager.inl:33] There is no existing client to [9.134.1.150:9559], trying to create one
I0112 12:29:12.700659    17 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0112 12:29:12.700723    17 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 3 times
I0112 12:29:12.701160    17 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f122d010, fd=99 host=9.134.1.150:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:13.702330    17 ThriftClientManager.inl:21] Getting a client to [9.134.3.25:9559]
I0112 12:29:13.702430    17 ThriftClientManager.inl:33] There is no existing client to [9.134.3.25:9559], trying to create one
I0112 12:29:13.702488    17 MetaClient.cpp:542] Send request to meta [9.134.3.25:9559]
I0112 12:29:13.702535    17 ThriftClientManager.inl:55] Connecting to [9.134.3.25:9559] for 4 times
I0112 12:29:13.702787    17 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f122d010, fd=99 host=9.134.3.25:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
E0112 12:29:13.702924    17 MetaClient.cpp:581] Send request to [9.134.3.25:9559], exceed retry limit
E0112 12:29:13.703019    21 MetaClient.cpp:121] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:16.704267    21 MetaClient.cpp:2275] Send heartbeat to [9.134.55.213:9559], clusterId 0
I0112 12:29:16.705015    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:16.705083    18 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:29:16.705149    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:16.705200    18 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0112 12:29:16.705647    18 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f042d010, fd=99 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:17.707113    18 ThriftClientManager.inl:21] Getting a client to [9.134.3.25:9559]
I0112 12:29:17.707221    18 ThriftClientManager.inl:33] There is no existing client to [9.134.3.25:9559], trying to create one
I0112 12:29:17.707289    18 MetaClient.cpp:542] Send request to meta [9.134.3.25:9559]
I0112 12:29:17.707340    18 ThriftClientManager.inl:55] Connecting to [9.134.3.25:9559] for 2 times
I0112 12:29:17.707595    18 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f042d010, fd=99 host=9.134.3.25:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:18.708772    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:18.708868    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:18.708938    18 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 3 times
I0112 12:29:18.709388    18 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7fa1f042d010, fd=99 host=9.134.55.213:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0112 12:29:19.710572    18 ThriftClientManager.inl:21] Getting a client to [9.134.1.150:9559]
I0112 12:29:19.710676    18 ThriftClientManager.inl:33] There is no existing client to [9.134.1.150:9559], trying to create one
I0112 12:29:19.710732    18 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0112 12:29:19.710798    18 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 4 times
E0112 12:29:19.713142    21 MetaClient.cpp:121] Heartbeat failed, status:LeaderChanged: Leader changed!
I0112 12:29:22.716150    21 MetaClient.cpp:2275] Send heartbeat to [9.134.55.213:9559], clusterId 0
I0112 12:29:22.716889    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.716967    19 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:29:22.717051    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.717106    19 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0112 12:29:22.720376    19 MetaClient.cpp:2294] Metad last update time: 1609922881976
I0112 12:29:22.720577    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.720659    20 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:29:22.720726    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.720780    20 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0112 12:29:22.721753    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.721817    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.721869    13 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0112 12:29:22.722664    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.722745    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.722801    14 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0112 12:29:22.724390    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.724455    15 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0112 12:29:22.724545    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.724612    15 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0112 12:29:22.725486    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.725553    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.725600    16 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0112 12:29:22.726524    21 MetaClient.cpp:211] Load space 1, parts num:100
I0112 12:29:22.726596    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.726653    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.726716    17 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0112 12:29:22.727581    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.727639    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.727689    18 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0112 12:29:22.728425    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.728477    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.728945    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.729023    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.729528    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.729580    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.730060    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.730123    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.730576    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.730628    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.731092    21 MetaClient.cpp:211] Load space 6, parts num:10
I0112 12:29:22.731178    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.731237    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.731704    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.731755    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.732172    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.732219    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.732607    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.732662    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.733073    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.733132    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.733649    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.733698    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.734148    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.734205    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.734668    21 MetaClient.cpp:211] Load space 29, parts num:15
I0112 12:29:22.734740    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.734797    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.735275    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.735334    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.735832    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.735888    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.736358    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.736411    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.736826    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.736887    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.737356    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.737411    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.737856    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.737916    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.738469    21 MetaClient.cpp:211] Load space 45, parts num:100
I0112 12:29:22.738538    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.738587    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.738992    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.739045    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.739444    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.739498    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.739886    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.739944    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.740319    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.740368    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.740974    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.741031    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.741456    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.741509    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.741968    21 MetaClient.cpp:2777] Register gflags ok 5
I0112 12:29:22.742043    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0112 12:29:22.742096    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0112 12:29:22.742555    21 MetaClient.cpp:2940] Update config v from 2 to 0

meta日志

Running on machine: VM-3-25-centos
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0112 12:29:52.503140     1 MetaDaemon.cpp:214] identify myself as [9.134.3.25:9559]
I0112 12:29:52.508020     1 NebulaStore.cpp:47] Start the raft service...
I0112 12:29:52.508395     1 RaftexService.cpp:65] Init thrift server for raft service, port: 9560
I0112 12:29:52.508471    50 RaftexService.cpp:99] Starting the Raftex Service
I0112 12:29:52.512262    50 RaftexService.cpp:87] Starting the Raftex Service on 9560
I0112 12:29:52.512290    50 RaftexService.cpp:111] Start the Raftex Service successfully
I0112 12:29:52.512415     1 NebulaStore.cpp:75] Scan the local path, and init the spaces_
I0112 12:29:52.512461     1 NebulaStore.cpp:81] Scan path "/data/meta/0"
I0112 12:29:52.548903    68 EventListner.h:18] Rocksdb start compaction column family: default because of LevelL0FilesNum, status: OK, compacted 5 files into 0, base level is 0, output level is 1
I0112 12:29:52.549932     1 RocksEngine.cpp:105] open rocksdb on /data/meta/nebula/0/data
I0112 12:29:52.549948     1 NebulaStore.cpp:111] Load space 0 from disk
I0112 12:29:52.549968     1 NebulaStore.cpp:146] Need to open 1 parts of space 0
I0112 12:29:52.561275    68 EventListner.h:30] Rocksdb compaction completed column family: default because of LevelL0FilesNum, status: OK, compacted 5 files into 1, base level is 0, output level is 1
W0112 12:29:52.804730    43 RaftexService.cpp:182] Cannot find the part 0 in the graph space 0
I0112 12:29:52.820319    46 FileBasedWal.cpp:65] [Port: 9560, Space: 0, Part: 0] lastLogId in wal is 3493517, lastLogTerm is 35, path is /data/meta/nebula/0/wal/0/0000000000003341908.wal
I0112 12:29:52.820427    46 RaftPart.cpp:295] [Port: 9560, Space: 0, Part: 0] There are 2 peer hosts, and total 3 copies. The quorum is 2, as learner 0, lastLogId 3493517, lastLogTerm 35, committedLogId 3493517, term 35
I0112 12:29:52.820439    46 RaftPart.cpp:308] [Port: 9560, Space: 0, Part: 0] Add peer [9.134.1.150:9560]
I0112 12:29:52.820456    46 RaftPart.cpp:308] [Port: 9560, Space: 0, Part: 0] Add peer [9.134.55.213:9560]
I0112 12:29:52.820755    46 NebulaStore.cpp:177] Load part 0, 0 from disk
I0112 12:29:52.820775     1 NebulaStore.cpp:192] Load space 0 complete
I0112 12:29:52.820781     1 NebulaStore.cpp:201] Init data from partManager for [9.134.3.25:9559]
I0112 12:29:52.820793     1 NebulaStore.cpp:293] Data space 0 has existed!
I0112 12:29:52.820801     1 NebulaStore.cpp:322] [Space: 0, Part: 0] has existed!
I0112 12:29:52.820811     1 NebulaStore.cpp:68] Register handler...
I0112 12:29:52.820816     1 MetaDaemon.cpp:99] Waiting for the leader elected...
I0112 12:29:52.820822     1 MetaDaemon.cpp:112] Leader has not been elected, sleep 1s
I0112 12:29:53.820906     1 MetaDaemon.cpp:112] Leader has not been elected, sleep 1s
I0112 12:29:54.821034     1 MetaDaemon.cpp:112] Leader has not been elected, sleep 1s
I0112 12:29:55.033748    43 RaftPart.cpp:1747] [Port: 9560, Space: 0, Part: 0] The current role is Follower. Will follow the new leader 9.134.55.213:9560 [Term: 39]
I0112 12:29:55.033787    43 RaftPart.cpp:1625] [Port: 9560, Space: 0, Part: 0] Local is missing logs from id 3493517. Need to catch up
I0112 12:29:55.033824    47 Part.cpp:191] [Port: 9560, Space: 0, Part: 0] Find the new leader [9.134.55.213:9560]
I0112 12:29:55.821226     1 MetaDaemon.cpp:144] Nebula store init succeeded, clusterId 6252170666233338811
I0112 12:29:55.821261     1 MetaDaemon.cpp:227] Start http service
I0112 12:29:55.821494     1 MetaDaemon.cpp:152] Starting Meta HTTP Service
I0112 12:29:55.823123    92 WebService.cpp:143] Web service started on HTTP[19559], HTTP2[19560]
I0112 12:29:55.823742     1 JobManager.cpp:58] JobManager initialized
I0112 12:29:55.823774   108 JobManager.cpp:81] JobManager::runJobBackground() enter
I0112 12:29:55.826138     1 StatsManager.cpp:94] registerHisto, bucketSize: 1000, min: 1, max: 1000000
I0112 12:29:55.826155     1 MetaDaemon.cpp:284] The meta deamon start on [9.134.3.25:9559]
I0112 12:29:59.646749   122 HBProcessor.cpp:29] Receive heartbeat from [0.0.0.0:9699], role = GRAPH
E0112 12:29:59.646935   122 RaftPart.cpp:367] [Port: 9560, Space: 0, Part: 0] The partition is not a leader
E0112 12:29:59.647140   122 RaftPart.cpp:687] [Port: 9560, Space: 0, Part: 0] Cannot append logs, clean the buffer

storaged日志

Log file created at: 2021/01/12 12:28:58
Running on machine: VM-3-25-centos
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0112 12:28:58.520768     1 StorageDaemon.cpp:82] host = [9.134.3.25:9779]
I0112 12:28:58.525691     1 MetaClient.cpp:47] Create meta client to [9.134.3.25:9559]
I0112 12:28:58.526126     1 GflagsManager.cpp:138] Prepare to register 12 gflags to meta
W0112 12:28:58.526160     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0112 12:29:01.531749    46 MetaClient.cpp:581] Send request to [9.134.1.150:9559], exceed retry limit
E0112 12:29:01.531956     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:01.531986     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0112 12:29:03.532081     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0112 12:29:06.537518    47 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0112 12:29:06.537593     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:06.537606     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0112 12:29:08.537693     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0112 12:29:11.543298    48 MetaClient.cpp:581] Send request to [9.134.1.150:9559], exceed retry limit
E0112 12:29:11.543368     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:11.543380     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0112 12:29:13.543478     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0112 12:29:16.549551    49 MetaClient.cpp:581] Send request to [9.134.1.150:9559], exceed retry limit
E0112 12:29:16.549628     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:16.549643     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0112 12:29:18.549729     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0112 12:29:21.554584    50 MetaClient.cpp:581] Send request to [9.134.3.25:9559], exceed retry limit
E0112 12:29:21.554664     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0112 12:29:21.554679     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0112 12:29:23.554770     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
I0112 12:29:24.560086    51 MetaClient.cpp:2283] Persisit the cluster Id from metad 6252170666233338811
I0112 12:29:24.560132    51 FileBasedClusterIdMan.cpp:23] Remove the existed file cluster.id
I0112 12:29:24.560173    51 FileBasedClusterIdMan.cpp:36] Persiste clusterId 6252170666233338811 succeeded!
I0112 12:29:24.580257     1 MetaClient.cpp:2777] Register gflags ok 12
I0112 12:29:24.580881     1 MetaClient.cpp:2940] Update config rocksdb_column_family_options from {"write_buffer_size":"67108864","max_write_buffer_number":"4","max_bytes_for_level_base":"268435456"} to {"disable_auto_compactions":"true"}
I0112 12:29:24.580922     1 MetaClient.cpp:2940] Update config wal_ttl from 14400 to 3600

现在不是正常了吗,我看meta收到graph的心跳了,而且graph也像meta注册了数据。

并不行哈,


console也无法访问

你上面截图的graph的日志表示他已经启动了,而且没有任务报错,你确定那个日志就是全部了吗?后面是不是还有日志,有的话贴出来下。

都是tail -n100 *.INFO截取的日志,还有日志时间为什么和机器时间不一致?
graph 日志

# tail -n100 nebula-graphd.INFO 
I0113 02:00:56.267719    15 ThriftClientManager.inl:33] There is no existing client to [9.134.1.150:9559], trying to create one
I0113 02:00:56.267792    15 MetaClient.cpp:542] Send request to meta [9.134.1.150:9559]
I0113 02:00:56.267843    15 ThriftClientManager.inl:55] Connecting to [9.134.1.150:9559] for 1 times
I0113 02:00:56.268271    15 AsyncSocket.cpp:2229] AsyncSocket::handleConnect(this=0x7f915762c010, fd=78 host=9.134.1.150:9559) exception: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused)
I0113 02:00:57.273788    15 ThriftClientManager.inl:21] Getting a client to [9.134.3.25:9559]
I0113 02:00:57.273910    15 ThriftClientManager.inl:33] There is no existing client to [9.134.3.25:9559], trying to create one
I0113 02:00:57.273998    15 MetaClient.cpp:542] Send request to meta [9.134.3.25:9559]
I0113 02:00:57.274053    15 ThriftClientManager.inl:55] Connecting to [9.134.3.25:9559] for 2 times
I0113 02:00:58.280983    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.281083    15 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0113 02:00:58.281149    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.281199    15 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 3 times
I0113 02:00:58.285775    15 MetaClient.cpp:2294] Metad last update time: 1609922881976
I0113 02:00:58.285939    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.286043    16 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0113 02:00:58.286116    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.286175    16 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0113 02:00:58.287809    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.287895    17 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0113 02:00:58.287963    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.288017    17 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0113 02:00:58.289535    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.289605    18 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0113 02:00:58.289669    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.289722    18 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0113 02:00:58.290673    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.290750    19 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0113 02:00:58.290812    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.290868    19 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0113 02:00:58.291864    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.291952    20 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0113 02:00:58.292023    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.292080    20 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 1 times
I0113 02:00:58.293752     1 MetaClient.cpp:211] Load space 1, parts num:100
I0113 02:00:58.293843    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.293917    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.293979    13 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0113 02:00:58.295040    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.295114    14 ThriftClientManager.inl:33] There is no existing client to [9.134.55.213:9559], trying to create one
I0113 02:00:58.295169    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.295219    14 ThriftClientManager.inl:55] Connecting to [9.134.55.213:9559] for 5 times
I0113 02:00:58.296674    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.296742    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.297785    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.297861    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.298424    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.298507    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.299692    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.299751    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.300230    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.300303    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.300773     1 MetaClient.cpp:211] Load space 6, parts num:10
I0113 02:00:58.300844    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.300906    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.301409    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.301506    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.302021    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.302081    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.303139    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.303200    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.304255    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.304327    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.304827    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.304893    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.305977    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.306033    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.306529     1 MetaClient.cpp:211] Load space 29, parts num:15
I0113 02:00:58.306607    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.306665    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.307173    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.307227    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.307768    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.307826    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.308312    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.308367    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.309434    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.309481    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.310518    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.310585    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.311079    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.311151    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.312399     1 MetaClient.cpp:211] Load space 45, parts num:100
I0113 02:00:58.312485    18 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.312542    18 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.312999    19 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.313060    19 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.313550    20 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.313606    20 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.314033    13 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.314095    13 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.314549    14 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.314604    14 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.315623    15 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.315675    15 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.316718    16 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.316772    16 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.317253     1 MetaClient.cpp:2777] Register gflags ok 5
I0113 02:00:58.317340    17 ThriftClientManager.inl:21] Getting a client to [9.134.55.213:9559]
I0113 02:00:58.317400    17 MetaClient.cpp:542] Send request to meta [9.134.55.213:9559]
I0113 02:00:58.318512     1 MetaClient.cpp:2940] Update config v from 2 to 0

meta日志

 tail -n100 nebula-metad.INFO 
Log file created at: 2021/01/13 02:00:52
Running on machine: VM-3-25-centos
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0113 02:00:52.533753     1 MetaDaemon.cpp:214] identify myself as [9.134.3.25:9559]
I0113 02:00:52.538251     1 NebulaStore.cpp:47] Start the raft service...
I0113 02:00:52.538626     1 RaftexService.cpp:65] Init thrift server for raft service, port: 9560
I0113 02:00:52.538709    50 RaftexService.cpp:99] Starting the Raftex Service
I0113 02:00:52.542376    50 RaftexService.cpp:87] Starting the Raftex Service on 9560
I0113 02:00:52.542394    50 RaftexService.cpp:111] Start the Raftex Service successfully
I0113 02:00:52.542485     1 NebulaStore.cpp:75] Scan the local path, and init the spaces_
I0113 02:00:52.542557     1 NebulaStore.cpp:81] Scan path "/data/meta/0"
I0113 02:00:52.567085     1 RocksEngine.cpp:105] open rocksdb on /data/meta/nebula/0/data
I0113 02:00:52.567102     1 NebulaStore.cpp:111] Load space 0 from disk
I0113 02:00:52.567175     1 NebulaStore.cpp:146] Need to open 1 parts of space 0
I0113 02:00:52.838467    46 FileBasedWal.cpp:65] [Port: 9560, Space: 0, Part: 0] lastLogId in wal is 3493779, lastLogTerm is 39, path is /data/meta/nebula/0/wal/0/0000000000003341908.wal
I0113 02:00:52.838567    46 RaftPart.cpp:295] [Port: 9560, Space: 0, Part: 0] There are 2 peer hosts, and total 3 copies. The quorum is 2, as learner 0, lastLogId 3493779, lastLogTerm 39, committedLogId 3493778, term 39
I0113 02:00:52.838575    46 RaftPart.cpp:308] [Port: 9560, Space: 0, Part: 0] Add peer [9.134.1.150:9560]
I0113 02:00:52.838595    46 RaftPart.cpp:308] [Port: 9560, Space: 0, Part: 0] Add peer [9.134.55.213:9560]
I0113 02:00:52.838932    46 NebulaStore.cpp:177] Load part 0, 0 from disk
I0113 02:00:52.838966     1 NebulaStore.cpp:192] Load space 0 complete
I0113 02:00:52.838989     1 NebulaStore.cpp:201] Init data from partManager for [9.134.3.25:9559]
I0113 02:00:52.839008     1 NebulaStore.cpp:293] Data space 0 has existed!
I0113 02:00:52.839015     1 NebulaStore.cpp:322] [Space: 0, Part: 0] has existed!
I0113 02:00:52.839063     1 NebulaStore.cpp:68] Register handler...
I0113 02:00:52.839082     1 MetaDaemon.cpp:99] Waiting for the leader elected...
I0113 02:00:52.839087     1 MetaDaemon.cpp:112] Leader has not been elected, sleep 1s
I0113 02:00:53.682977    43 RaftPart.cpp:1360] [Port: 9560, Space: 0, Part: 0] Recieved a VOTING request: space = 0, partition = 0, candidateAddr = 9.134.55.213:9560, term = 54, lastLogId = 3493779, lastLogTerm = 39
I0113 02:00:53.683010    43 RaftPart.cpp:1393] [Port: 9560, Space: 0, Part: 0] The partition currently is a Follower, lastLogId 3493779, lastLogTerm 39, committedLogId 3493778, term 39
I0113 02:00:53.683020    43 RaftPart.cpp:1458] [Port: 9560, Space: 0, Part: 0] The partition will vote for the candidate
I0113 02:00:53.684757    43 RaftPart.cpp:1747] [Port: 9560, Space: 0, Part: 0] The current role is Follower. Will follow the new leader 9.134.55.213:9560 [Term: 54]
I0113 02:00:53.684813    46 Part.cpp:191] [Port: 9560, Space: 0, Part: 0] Find the new leader [9.134.55.213:9560]
I0113 02:00:53.881263     1 MetaDaemon.cpp:144] Nebula store init succeeded, clusterId 6252170666233338811
I0113 02:00:53.881291     1 MetaDaemon.cpp:227] Start http service
I0113 02:00:53.881531     1 MetaDaemon.cpp:152] Starting Meta HTTP Service
I0113 02:00:53.883127    92 WebService.cpp:143] Web service started on HTTP[19559], HTTP2[19560]
I0113 02:00:53.883776     1 JobManager.cpp:58] JobManager initialized
I0113 02:00:53.883802   108 JobManager.cpp:81] JobManager::runJobBackground() enter
I0113 02:00:53.886381     1 StatsManager.cpp:94] registerHisto, bucketSize: 1000, min: 1, max: 1000000
I0113 02:00:53.886423     1 MetaDaemon.cpp:284] The meta deamon start on [9.134.3.25:9559]
I0113 02:00:54.176229   121 HBProcessor.cpp:29] Receive heartbeat from [0.0.0.0:9699], role = GRAPH
E0113 02:00:54.176301   121 RaftPart.cpp:367] [Port: 9560, Space: 0, Part: 0] The partition is not a leader
E0113 02:00:54.176486   121 RaftPart.cpp:687] [Port: 9560, Space: 0, Part: 0] Cannot append logs, clean the buffer
I0113 02:00:57.275537   121 HBProcessor.cpp:29] Receive heartbeat from [0.0.0.0:9699], role = GRAPH
I0113 02:01:09.390996   121 HBProcessor.cpp:29] Receive heartbeat from [9.134.1.150:9779], role = STORAGE
I0113 02:01:09.391109   121 HBProcessor.cpp:36] Set clusterId for new host [9.134.1.150:9779]!

storage日志

Log file created at: 2021/01/13 02:00:34
Running on machine: VM-3-25-centos
Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
I0113 02:00:34.770138     1 StorageDaemon.cpp:82] host = [9.134.3.25:9779]
I0113 02:00:34.775146     1 MetaClient.cpp:47] Create meta client to [9.134.55.213:9559]
I0113 02:00:34.775642     1 GflagsManager.cpp:138] Prepare to register 12 gflags to meta
W0113 02:00:34.775681     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0113 02:00:37.789971    46 MetaClient.cpp:581] Send request to [9.134.1.150:9559], exceed retry limit
E0113 02:00:37.790172     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0113 02:00:37.790185     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0113 02:00:39.790287     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0113 02:00:42.804893    47 MetaClient.cpp:581] Send request to [9.134.3.25:9559], exceed retry limit
E0113 02:00:42.804976     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0113 02:00:42.804991     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0113 02:00:44.805083     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0113 02:00:47.822683    48 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0113 02:00:47.822759     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0113 02:00:47.822772     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0113 02:00:49.822867     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
E0113 02:00:52.836247    49 MetaClient.cpp:581] Send request to [9.134.55.213:9559], exceed retry limit
E0113 02:00:52.836344     1 MetaClient.cpp:60] Heartbeat failed, status:RPC failure in MetaClient: N6apache6thrift9transport19TTransportExceptionE: AsyncSocketException: connect failed, type = Socket not open, errno = 111 (Connection refused): Connection refused
I0113 02:00:52.836382     1 MetaClient.cpp:85] Waiting for the metad to be ready!
W0113 02:00:54.836477     1 FileBasedClusterIdMan.cpp:46] Open file failed, error No such file or directory
I0113 02:00:54.840898    50 MetaClient.cpp:2283] Persisit the cluster Id from metad 6252170666233338811
I0113 02:00:54.840937    50 FileBasedClusterIdMan.cpp:23] Remove the existed file cluster.id
I0113 02:00:54.840981    50 FileBasedClusterIdMan.cpp:36] Persiste clusterId 6252170666233338811 succeeded!
I0113 02:00:54.879956     1 MetaClient.cpp:2777] Register gflags ok 12
I0113 02:00:54.881131     1 MetaClient.cpp:2940] Update config rocksdb_column_family_options from {"write_buffer_size":"67108864","max_write_buffer_number":"4","max_bytes_for_level_base":"268435456"} to {"disable_auto_compactions":"true"}
I0113 02:00:54.881158     1 MetaClient.cpp:2940] Update config wal_ttl from 14400 to 3600

都是tail -n100 *.INFO截取的日志,还有日志时间为什么和机器时间不一致?

这是因为你的docker容器用的时间是utc

你可以docker log graph_container_id 看下graph的容器有没有异常,日志没有任何异常,是不是容器有啥异常所以退出了。

docker 对应的容器早就退出了

CONTAINER ID        IMAGE                                                 COMMAND                  CREATED             STATUS                       PORTS               NAMES
5d22fd2c8092        vesoft/nebula-metad:v2.0.0-rc1                        "./bin/nebula-metad …"   59 minutes ago      Up 59 minutes (healthy)                          nebula_metad1.1.s5w0p6iz0ls8jbv0w7iuw6u5c
c158daa5b4ab        vesoft/nebula-graphd:v2.0.0-rc1                       "./bin/nebula-graphd…"   About an hour ago   Exited (0) 58 minutes ago                        nebula_graphd1.1.o4kjt3748tjwf4v9opimfhyc0
be2b42846bb6        vesoft/nebula-storaged:v2.0.0-rc1                     "./bin/nebula-storag…"   About an hour ago   Up About an hour (healthy)                       nebula_storaged1.1.vry4gljoslj0mnzu2uq2nfcvp

我通过得到logPath,查看logPath里面是没有信息的

# docker inspect c158daa5b4ab |grep log
            "--log_dir=/logs",
            "--minloglevel=0"
        "LogPath": "/var/lib/docker/containers/c158daa5b4ab15faf0d9582b9a6de541abd47f64be477e6838bacfd3c6ee6cb8/c158daa5b4ab15faf0d9582b9a6de541abd47f64be477e6838bacfd3c6ee6cb8-json.log",
                    "Source": "/data/nebula/logs/graphd",
                    "Target": "/logs"
                "Source": "/data/nebula/logs/graphd",
                "Destination": "/logs",
                "--log_dir=/logs",
                "--minloglevel=0"

你可以先把你的服务停止,把你的data目录的数据先拷贝备份,然后删除掉data目录,重新启动试下?

浙ICP备20010487号