spark connect连接报错

提问参考模版:

  • nebula 版本: 3.2.0
  • 部署方式:分布式 / 单机
  • 安装方式:RPM
  • 是否为线上版本:Y
  • 硬件信息
    • 磁盘( 推荐使用 SSD)
    • CPU、内存信息

2022-09-08 19:12:19,935 ERROR org.apache.spark.deploy.yarn.ApplicationMaster               - User class threw exception: com.facebook.thrift.transport.TTransportException: java.net.ConnectException: Connection timed out (Connection timed out)
com.facebook.thrift.transport.TTransportException: java.net.ConnectException: Connection timed out (Connection timed out)
        at com.facebook.thrift.transport.TSocket.open(TSocket.java:206) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.client.meta.MetaClient.getClient(MetaClient.java:148) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.client.meta.MetaClient.doConnect(MetaClient.java:127) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.client.meta.MetaClient.connect(MetaClient.java:116) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.connector.nebula.MetaProvider.<init>(MetaProvider.scala:53) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.connector.reader.NebulaSourceReader.getSchema(NebulaSourceReader.scala:44) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.connector.reader.NebulaSourceReader.getSchema(NebulaSourceReader.scala:35) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.connector.reader.NebulaDataSourceVertexReader$$anonfun$1.apply(NebulaSourceReader.scala:114) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.connector.reader.NebulaDataSourceVertexReader$$anonfun$1.apply(NebulaSourceReader.scala:112) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.immutable.Range.foreach(Range.scala:160) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.AbstractTraversable.map(Traversable.scala:104) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.vesoft.nebula.connector.reader.NebulaDataSourceVertexReader.planInputPartitions(NebulaSourceReader.scala:112) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExec.partitions$lzycompute(DataSourceV2ScanExec.scala:76) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExec.partitions(DataSourceV2ScanExec.scala:75) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExec.outputPartitioning(DataSourceV2ScanExec.scala:65) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$org$apache$spark$sql$execution$exchange$EnsureRequirements$$ensureDistributionAndOrdering$1.apply(EnsureRequirements.scala:55) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$org$apache$spark$sql$execution$exchange$EnsureRequirements$$ensureDistributionAndOrdering$1.apply(EnsureRequirements.scala:54) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.immutable.List.foreach(List.scala:392) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.immutable.List.map(List.scala:296) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at org.apache.spark.sql.execution.exchange.EnsureRequirements.org$apache$spark$sql$execution$exchange$EnsureRequirements$$ensureDistributionAndOrdering(EnsureRequirements.scala:54) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$apply$1.applyOrElse(EnsureRequirements.scala:209) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$apply$1.applyOrElse(EnsureRequirements.scala:201) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:280) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:280) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:279) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:328) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:277) ~[spark-catalyst_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.exchange.EnsureRequirements.apply(EnsureRequirements.scala:201) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.exchange.EnsureRequirements.apply(EnsureRequirements.scala:37) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$prepareForExecution$2.apply(QueryExecution.scala:103) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$prepareForExecution$2.apply(QueryExecution.scala:103) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.immutable.List.foldLeft(List.scala:84) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at org.apache.spark.sql.execution.QueryExecution.prepareForExecution(QueryExecution.scala:103) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:84) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:84) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$toString$3.apply(QueryExecution.scala:235) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$toString$3.apply(QueryExecution.scala:235) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution.stringOrError(QueryExecution.scala:127) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.QueryExecution.toString(QueryExecution.scala:235) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:128) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:76) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:260) ~[spark-sql_2.11-2.4.5.jar:2.4.5]
        at com.vesoft.nebula.connector.connector.package$NebulaDataFrameWriter.writeVertices(package.scala:287) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$.writeVertex(NebulaSpaceSparkExchange.scala:101) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$$anonfun$main$1.apply(NebulaSpaceSparkExchange.scala:47) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$$anonfun$main$1.apply(NebulaSpaceSparkExchange.scala:39) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at scala.collection.immutable.List.foreach(List.scala:392) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$.main(NebulaSpaceSparkExchange.scala:39) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange.main(NebulaSpaceSparkExchange.scala) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_111-AliJVM]
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_111-AliJVM]
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_111-AliJVM]
        at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_111-AliJVM]
        at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:685) [spark-yarn_2.11-2.4.5.jar:2.4.5]
Caused by: java.net.ConnectException: Connection timed out (Connection timed out)
        at java.net.PlainSocketImpl.socketConnect(Native Method) ~[?:1.8.0_111-AliJVM]
        at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_111-AliJVM]
        at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_111-AliJVM]
        at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_111-AliJVM]
        at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_111-AliJVM]
        at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_111-AliJVM]
        at com.facebook.thrift.transport.TSocket.open(TSocket.java:201) ~[631988F94E0738220007408A_db7c992bd1d0469182b5845cde9fc4f2.jar:?]
        ... 67 more
2022-09-08 19:12:19,949 INFO  org.apache.spark.deploy.yarn.ApplicationMaster               - Final app status: FAILED, exitCode: 15, (reason: User class threw exception: com.facebook.thrift.transport.TTransportException: java.net.ConnectException: Connection timed out (Connection timed out)
        at com.facebook.thrift.transport.TSocket.open(TSocket.java:206)
        at com.vesoft.nebula.client.meta.MetaClient.getClient(MetaClient.java:148)
        at com.vesoft.nebula.client.meta.MetaClient.doConnect(MetaClient.java:127)
        at com.vesoft.nebula.client.meta.MetaClient.connect(MetaClient.java:116)
        at com.vesoft.nebula.connector.nebula.MetaProvider.<init>(MetaProvider.scala:53)
        at com.vesoft.nebula.connector.reader.NebulaSourceReader.getSchema(NebulaSourceReader.scala:44)
        at com.vesoft.nebula.connector.reader.NebulaSourceReader.getSchema(NebulaSourceReader.scala:35)
        at com.vesoft.nebula.connector.reader.NebulaDataSourceVertexReader$$anonfun$1.apply(NebulaSourceReader.scala:114)
        at com.vesoft.nebula.connector.reader.NebulaDataSourceVertexReader$$anonfun$1.apply(NebulaSourceReader.scala:112)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.immutable.Range.foreach(Range.scala:160)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at com.vesoft.nebula.connector.reader.NebulaDataSourceVertexReader.planInputPartitions(NebulaSourceReader.scala:112)
        at org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExec.partitions$lzycompute(DataSourceV2ScanExec.scala:76)
        at org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExec.partitions(DataSourceV2ScanExec.scala:75)
        at org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExec.outputPartitioning(DataSourceV2ScanExec.scala:65)
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$org$apache$spark$sql$execution$exchange$EnsureRequirements$$ensureDistributionAndOrdering$1.apply(EnsureRequirements.scala:55)
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$org$apache$spark$sql$execution$exchange$EnsureRequirements$$ensureDistributionAndOrdering$1.apply(EnsureRequirements.scala:54)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.immutable.List.foreach(List.scala:392)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.immutable.List.map(List.scala:296)
        at org.apache.spark.sql.execution.exchange.EnsureRequirements.org$apache$spark$sql$execution$exchange$EnsureRequirements$$ensureDistributionAndOrdering(EnsureRequirements.scala:54)
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$apply$1.applyOrElse(EnsureRequirements.scala:209)
        at org.apache.spark.sql.execution.exchange.EnsureRequirements$$anonfun$apply$1.applyOrElse(EnsureRequirements.scala:201)
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:280)
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:280)
        at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69)
        at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:279)
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277)
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:277)
        at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:328)
        at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186)
        at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326)
        at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:277)
        at org.apache.spark.sql.execution.exchange.EnsureRequirements.apply(EnsureRequirements.scala:201)
        at org.apache.spark.sql.execution.exchange.EnsureRequirements.apply(EnsureRequirements.scala:37)
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$prepareForExecution$2.apply(QueryExecution.scala:103)
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$prepareForExecution$2.apply(QueryExecution.scala:103)
        at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
        at scala.collection.immutable.List.foldLeft(List.scala:84)
        at org.apache.spark.sql.execution.QueryExecution.prepareForExecution(QueryExecution.scala:103)
        at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:84)
        at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:84)
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$toString$3.apply(QueryExecution.scala:235)
        at org.apache.spark.sql.execution.QueryExecution$$anonfun$toString$3.apply(QueryExecution.scala:235)
        at org.apache.spark.sql.execution.QueryExecution.stringOrError(QueryExecution.scala:127)
        at org.apache.spark.sql.execution.QueryExecution.toString(QueryExecution.scala:235)
        at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
        at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:128)
        at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:76)
        at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676)
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:260)
        at com.vesoft.nebula.connector.connector.package$NebulaDataFrameWriter.writeVertices(package.scala:287)
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$.writeVertex(NebulaSpaceSparkExchange.scala:101)
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$$anonfun$main$1.apply(NebulaSpaceSparkExchange.scala:47)
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$$anonfun$main$1.apply(NebulaSpaceSparkExchange.scala:39)
        at scala.collection.immutable.List.foreach(List.scala:392)
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange$.main(NebulaSpaceSparkExchange.scala:39)
        at com.shizhuang.duapp.risk.NebulaSpaceSparkExchange.main(NebulaSpaceSparkExchange.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:685)
Caused by: java.net.ConnectException: Connection timed out (Connection timed out)
        at java.net.PlainSocketImpl.socketConnect(Native Method)
        at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
        at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
        at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
        at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
        at java.net.Socket.connect(Socket.java:589)
        at com.facebook.thrift.transport.TSocket.open(TSocket.java:201)
        ... 67 more

连接超时,确认你yarn每台机器是否可以正常访问nebula服务,以及你的nebula服务是否都运行正常。

不知道你的代码是读还是写,nebula graphd、metad、storaged都确认下。

是读取数据,成功过几次,后面又失败了,看底层代码是网络层面问题

此话题已在最后回复的 30 天后被自动关闭。不再允许新回复。