spark hive
org.apache.hive.service.cli.HiveSQLException:Errorwhileprocessingstatement:FAILED:ExecutionError,returncode1fromorg.apache.hadoop.hive.ql.exec.spark.SparkTask.Failedtocreatesparkclient.atorg.apache.hive.service.cli.operation.Operation.toSQLException(Operation.java:380)atorg.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:257)atorg.apache.hive.service.cli.operation.SQLOperation.access$800(SQLOperation.java:91)atorg.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:348)atjava.security.AccessController.doPrivileged(NativeMethod)atjavax.security.auth.Subject.doAs(Subject.java:422)atorg.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)atorg.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:362)atjava.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)atjava.util.concurrent.FutureTask.run(FutureTask.java:266)atjava.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)atjava.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)atjava.lang.Thread.run(Thread.java:748)Causedby:org.apache.hadoop.hive.ql.metadata.HiveException:Failedtocreatesparkclient.atorg.apache.hadoop.hive.ql.exec.spark.session.SparkSessionImpl.open(SparkSessionImpl.java:64)atorg.apache.hadoop.hive.ql.exec.spark.session.SparkSessionManagerImpl.getSession(SparkSessionManagerImpl.java:115)atorg.apache.hadoop.hive.ql.exec.spark.SparkUtilities.getSparkSession(SparkUtilities.java:126)atorg.apache.hadoop.hive.ql.exec.spark.SparkTask.execute(SparkTask.java:103)atorg.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:199)atorg.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)atorg.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2183)atorg.apache.hadoop.hive.ql.Driver.execute(Driver.java:1839)atorg.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1526)atorg.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)atorg.apache.hadoop.hive.ql.Driver.run(Driver.java:1232)atorg.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:255)...11moreCausedby:java.lang.RuntimeException:java.util.concurrent.ExecutionException:java.lang.RuntimeException:Cancelclient'5e600ca4-e724-434c-a48b-c29097c39c7d'.Error:ChildprocessexitedbeforeconnectingbackwitherrorlogWarning:Ignoringnon-sparkconfigproperty:hive.spark.client.server.connect.timeout=90000Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.rpc.threads=8Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.connect.timeout=1000Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.secret.bits=256Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.rpc.max.size=5242880018/12/2110:36:08WARNNativeCodeLoader:Unabletoloadnative-hadooplibraryforyourplatform...usingbuiltin-javaclasseswhereapplicable18/12/2110:36:08INFORemoteDriver:Connectingto:dda:11184Exceptioninthread"main"java.lang.NoSuchFieldError:SPARK_RPC_SERVER_ADDRESSatorg.apache.hive.spark.client.rpc.RpcConfiguration.<clinit>(RpcConfiguration.java:47)atorg.apache.hive.spark.client.RemoteDriver.<init>(RemoteDriver.java:134)atorg.apache.hive.spark.client.RemoteDriver.main(RemoteDriver.java:516)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:894)atorg.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)atorg.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)atorg.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)18/12/2110:36:08INFOShutdownHookManager:Shutdownhookcalled18/12/2110:36:08INFOShutdownHookManager:Deletingdirectory/tmp/spark-6a9d937a-5e5a-4314-846e-a2feb5d7fde9atcom.google.common.base.Throwables.propagate(Throwables.java:160)atorg.apache.hive.spark.client.SparkClientImpl.<init>(SparkClientImpl.java:125)atorg.apache.hive.spark.client.SparkClientFactory.createClient(SparkClientFactory.java:80)atorg.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.createRemoteClient(RemoteHiveSparkClient.java:101)atorg.apache.hadoop.hive.ql.exec.spark.RemoteHiveSparkClient.<init>(RemoteHiveSparkClient.java:97)atorg.apache.hadoop.hive.ql.exec.spark.HiveSparkClientFactory.createHiveSparkClient(HiveSparkClientFactory.java:73)atorg.apache.hadoop.hive.ql.exec.spark.session.SparkSessionImpl.open(SparkSessionImpl.java:62)...22moreCausedby:java.util.concurrent.ExecutionException:java.lang.RuntimeException:Cancelclient'5e600ca4-e724-434c-a48b-c29097c39c7d'.Error:ChildprocessexitedbeforeconnectingbackwitherrorlogWarning:Ignoringnon-sparkconfigproperty:hive.spark.client.server.connect.timeout=90000Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.rpc.threads=8Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.connect.timeout=1000Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.secret.bits=256Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.rpc.max.size=5242880018/12/2110:36:08WARNNativeCodeLoader:Unabletoloadnative-hadooplibraryforyourplatform...usingbuiltin-javaclasseswhereapplicable18/12/2110:36:08INFORemoteDriver:Connectingto:dda:11184Exceptioninthread"main"java.lang.NoSuchFieldError:SPARK_RPC_SERVER_ADDRESSatorg.apache.hive.spark.client.rpc.RpcConfiguration.<clinit>(RpcConfiguration.java:47)atorg.apache.hive.spark.client.RemoteDriver.<init>(RemoteDriver.java:134)atorg.apache.hive.spark.client.RemoteDriver.main(RemoteDriver.java:516)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:894)atorg.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)atorg.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)atorg.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)18/12/2110:36:08INFOShutdownHookManager:Shutdownhookcalled18/12/2110:36:08INFOShutdownHookManager:Deletingdirectory/tmp/spark-6a9d937a-5e5a-4314-846e-a2feb5d7fde9atio.netty.util.concurrent.AbstractFuture.get(AbstractFuture.java:41)atorg.apache.hive.spark.client.SparkClientImpl.<init>(SparkClientImpl.java:109)...27moreCausedby:java.lang.RuntimeException:Cancelclient'5e600ca4-e724-434c-a48b-c29097c39c7d'.Error:ChildprocessexitedbeforeconnectingbackwitherrorlogWarning:Ignoringnon-sparkconfigproperty:hive.spark.client.server.connect.timeout=90000Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.rpc.threads=8Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.connect.timeout=1000Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.secret.bits=256Warning:Ignoringnon-sparkconfigproperty:hive.spark.client.rpc.max.size=5242880018/12/2110:36:08WARNNativeCodeLoader:Unabletoloadnative-hadooplibraryforyourplatform...usingbuiltin-javaclasseswhereapplicable18/12/2110:36:08INFORemoteDriver:Connectingto:dda:11184Exceptioninthread"main"java.lang.NoSuchFieldError:SPARK_RPC_SERVER_ADDRESSatorg.apache.hive.spark.client.rpc.RpcConfiguration.<clinit>(RpcConfiguration.java:47)atorg.apache.hive.spark.client.RemoteDriver.<init>(RemoteDriver.java:134)atorg.apache.hive.spark.client.RemoteDriver.main(RemoteDriver.java:516)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:894)atorg.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)atorg.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)atorg.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)18/12/2110:36:08INFOShutdownHookManager:Shutdownhookcalled18/12/2110:36:08INFOShutdownHookManager:Deletingdirectory/tmp/spark-6a9d937a-5e5a-4314-846e-a2feb5d7fde9atorg.apache.hive.spark.client.rpc.RpcServer.cancelClient(RpcServer.java:212)atorg.apache.hive.spark.client.SparkClientImpl$3.run(SparkClientImpl.java:503)...1more