“计算节点的应用程序启动异常,详情请看系统监控-实验监控-查看日志!”
Caused by: org.apache.spark.SparkException: Invalid Spark URL: spark://HeartbeatReceiver@乐乐的本子:7777
at org.apache.spark.rpc.RpcEndpointAddress$.apply(RpcEndpointAddress.scala:66) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:140) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.util.RpcUtils$.makeDriverRef(RpcUtils.scala:36) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.executor.Executor.(Executor.scala:218) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.scheduler.local.LocalEndpoint.(LocalSchedulerBackend.scala:64) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSchedulerBackend.scala:132) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:220) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.SparkContext.(SparkContext.scala:579) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2672) ~[spark-core_2.12-3.1.2.jar:3.1.2]
at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:945) ~[spark-sql_2.12-3.1.2.jar:3.1.2]
at scala.Option.getOrElse(Option.scala:189) ~[scala-library-2.12.10.jar:?]
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:939) ~[spark-sql_2.12-3.1.2.jar:3.1.2]
at smartbix.datamining.engine.agent.spark.DefaultSparkSessionFactory.getOrCreate(DefaultSparkSessionFactory.java:96) ~[EngineAgent-1.0.jar:?]
... 5 more