在hive中创建的表,用sparkSQL删除,结果报错,但是表被删除

发布于 2021-12-03 12:57:50 字数 46511 浏览 358 评论 1

 报错内容如下:华为大数据平台。sparksql创建的表删除就没有问题,hive创建的可以删除,但是会报如下错误,请问大神这是为什么?

17/05/10 17:19:14 ERROR DDLTask: org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr
	at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322)
	at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588)
	at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1045)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588)
	at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr
	at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322)
	at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588)
	at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
)
	at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:172)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039)
	... 48 more

FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr
	at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322)
	at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588)
	at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
)
17/05/10 17:19:14 ERROR Driver: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr
	at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322)
	at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588)
	at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
)
17/05/10 17:19:14 ERROR ClientWrapper: 
======================
HIVE FAILURE OUTPUT
======================
SET spark.sql.bigdata.register.preExecutionRule=org.apache.spark.sql.hbase.execution.AddCoprocessor$,org.apache.spark.sql.execution.EnsureRowFormats$
SET spark.sql.ui.retainedExecutions=1000
SET spark.sql.sources.maxConcurrentWrites=5
SET spark.sql.autoBroadcastJoinThreshold=10485760
SET spark.sql.parquet.int96AsTimestamp=true
SET spark.sql.parquet.cacheMetadata=true
SET spark.sql.authorization.enabled=true
SET spark.sql.bigdata.register.dialect=org.apache.spark.sql.hbase.HBaseSQLParser,org.apache.spark.sql.CarbonInternalSqlParser
SET spark.sql.bigdata.register.strategyRule=org.apache.spark.sql.hbase.DummySparkPlanner,org.apache.spark.sql.hive.CarbonStrategy,org.apache.spark.sql.hive.CarbonDDLStrategy
SET spark.sql.sources.parallelPartitionDiscovery.threshold=32
SET spark.sql.inMemoryColumnarStorage.partitionPruning=true
SET spark.sql.bigdata.register.extendedResolutionRule=org.apache.spark.sql.hbase.catalyst.analysis.ReplaceOutput$
SET spark.sql.bigdata.register.analyseRule=org.apache.spark.sql.hive.acl.CarbonAccessControlRules
SET spark.sql.tungsten.enabled=true
SET spark.sql.parquet.binaryAsString=false
SET spark.sql.inMemoryColumnarStorage.batchSize=10000
SET spark.sql.bigdata.initFunction=org.apache.spark.sql.hbase.HBaseEnv,org.apache.spark.sql.CarbonEnv
SET spark.sql.parquet.compression.codec=gzip
SET spark.sql.parquet.filterPushdown=true
SET spark.sql.shuffle.partitions=200
SET spark.sql.caseSensitive=false
SET spark.sql.inMemoryColumnarStorage.compressed=true
SET spark.sql.dialect=org.apache.spark.sql.hive.huawei.BigSQLDialect
SET spark.sql.broadcastTimeout=300
SET hive.support.sql11.reserved.keywords=false

======================
END HIVE FAILURE OUTPUT
======================
          
17/05/10 17:19:14 ERROR SparkSQLDriver: Failed in [drop table adata.wang_solr]
org.apache.spark.sql.execution.QueryExecutionException: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr
	at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322)
	at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588)
	at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:951)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
org.apache.spark.sql.execution.QueryExecutionException: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr
	at org.apache.hadoop.hbase.client.HBaseAdmin.checkTableExistence(HBaseAdmin.java:1302)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1313)
	at org.apache.hadoop.hbase.client.HBaseAdmin.isTableEnabled(HBaseAdmin.java:1322)
	at org.apache.hadoop.hive.hbase.HBaseStorageHandler.commitDropTable(HBaseStorageHandler.java:166)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1092)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(HiveMetaStoreClient.java:1026)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2107)
	at com.sun.proxy.$Proxy24.dropTable(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:1039)
	at org.apache.hadoop.hive.ql.metadata.Hive.dropTable(Hive.java:976)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTable(DDLTask.java:3732)
	at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3588)
	at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:338)
	at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:158)
	at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:89)
	at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1900)
	at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1659)
	at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1412)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1241)
	at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1231)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:946)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:951)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:337)
	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:283)
	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:282)
	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:325)
	at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:934)
	at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:871)
	at org.apache.spark.sql.hive.InnerHiveContext.runSqlHive(InnerHiveContext.scala:815)
	at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:78)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:57)
	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:69)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:151)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:149)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:149)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:1253)
	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:1253)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
	at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:1060)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:68)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:329)
	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:377)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:232)
	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:764)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

 

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(1

勿忘初心 2021-12-04 21:47:55
17/05/10 17:19:14 ERROR DDLTask: org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:org.apache.hadoop.hbase.TableNotFoundException: wang_solr

这不是说了, hbase中该表不存在,你删除的表是不是hive和hbase做了关联

~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文