您好,就是发现不是后面建表的问题,之前建的外表也有些不行了,就处于时好时坏的情况,之前报错的表又好了,但是又有新的表报同样的错误了;
fe.log报错如下:
2022-08-29 10:09:00,069 WARN (starrocks-mysql-nio-pool-9709|24739) [ConnectProcessor.dispatch():449] Unsupported command(COM_STMT_PREPARE)
2022-08-29 10:09:00,330 WARN (starrocks-mysql-nio-pool-9696|24702) [HiveMetaClient.getPartitionKeys():191] Fail to access meta store of Hive
org.apache.thrift.transport.TTransportException: java.net.SocketTimeoutException: Read timed out
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:127) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:455) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:354) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:243) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_partition_name_to_vals(ThriftHiveMetastore.java:3528) ~[hive-apache-3.0.0-7.jar:3.0.0-7]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.partition_name_to_vals(ThriftHiveMetastore.java:3515) ~[hive-apache-3.0.0-7.jar:3.0.0-7]
at com.starrocks.external.hive.HiveMetaStoreThriftClient.partitionNameToVals(HiveMetaStoreThriftClient.java:641) ~[starrocks-fe.jar:?]
at sun.reflect.GeneratedMethodAccessor12.invoke(Unknown Source) ~[?:?]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_202]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_202]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:208) ~[hive-apache-3.0.0-7.jar:3.0.0-7]
at com.sun.proxy.$Proxy37.partitionNameToVals(Unknown Source) ~[?:?]
at com.starrocks.external.hive.HiveMetaClient.getPartitionKeys(HiveMetaClient.java:180) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.loadPartitionKeys(HiveMetaCache.java:118) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.access$000(HiveMetaCache.java:34) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$1.load(HiveMetaCache.java:66) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$1.load(HiveMetaCache.java:63) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.external.hive.HiveMetaCache.getPartitionKeys(HiveMetaCache.java:172) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getPartitionKeys(HiveRepository.java:123) ~[starrocks-fe.jar:?]
at com.starrocks.external.HiveMetaStoreTableUtils.getPartitionKeys(HiveMetaStoreTableUtils.java:65) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.getPartitionKeys(HiveTable.java:161) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rule.transformation.RemoteScanPartitionPruneRule.initPartitionInfo(RemoteScanPartitionPruneRule.java:92) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rule.transformation.RemoteScanPartitionPruneRule.transform(RemoteScanPartitionPruneRule.java:63) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteTask.doExecute(TopDownRewriteTask.java:49) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteOnceTask.execute(TopDownRewriteOnceTask.java:31) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.SeriallyTaskScheduler.executeTasks(SeriallyTaskScheduler.java:42) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.ruleRewriteOnlyOnce(Optimizer.java:303) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.logicalRuleRewrite(Optimizer.java:221) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:78) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.createQueryPlan(StatementPlanner.java:84) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:54) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:315) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:285) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:432) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:668) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_202]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_202]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_202]
Caused by: java.net.SocketTimeoutException: Read timed out
at java.net.SocketInputStream.socketRead0(Native Method) ~[?:1.8.0_202]
at java.net.SocketInputStream.socketRead(SocketInputStream.java:116) ~[?:1.8.0_202]
at java.net.SocketInputStream.read(SocketInputStream.java:171) ~[?:1.8.0_202]
at java.net.SocketInputStream.read(SocketInputStream.java:141) ~[?:1.8.0_202]
at java.io.BufferedInputStream.fill(BufferedInputStream.java:246) ~[?:1.8.0_202]
at java.io.BufferedInputStream.read1(BufferedInputStream.java:286) ~[?:1.8.0_202]
at java.io.BufferedInputStream.read(BufferedInputStream.java:345) ~[?:1.8.0_202]
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:125) ~[libthrift-0.13.0.jar:0.13.0]
… 48 more
2022-08-29 10:09:00,338 WARN (starrocks-mysql-nio-pool-9696|24702) [HiveMetaCache.getPartitionKeys():175] get partition keys failed
java.util.concurrent.ExecutionException: com.starrocks.common.DdlException: Fail to access meta store of Hive. error: java.net.SocketTimeoutException: Read timed out
at com.google.common.util.concurrent.AbstractFuture.getDoneValue(AbstractFuture.java:566) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:527) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.util.concurrent.AbstractFuture$TrustedFuture.get(AbstractFuture.java:104) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:240) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2313) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2279) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.external.hive.HiveMetaCache.getPartitionKeys(HiveMetaCache.java:172) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getPartitionKeys(HiveRepository.java:123) ~[starrocks-fe.jar:?]
at com.starrocks.external.HiveMetaStoreTableUtils.getPartitionKeys(HiveMetaStoreTableUtils.java:65) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.getPartitionKeys(HiveTable.java:161) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rule.transformation.RemoteScanPartitionPruneRule.initPartitionInfo(RemoteScanPartitionPruneRule.java:92) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rule.transformation.RemoteScanPartitionPruneRule.transform(RemoteScanPartitionPruneRule.java:63) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteTask.doExecute(TopDownRewriteTask.java:49) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteOnceTask.execute(TopDownRewriteOnceTask.java:31) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.SeriallyTaskScheduler.executeTasks(SeriallyTaskScheduler.java:42) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.ruleRewriteOnlyOnce(Optimizer.java:303) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.logicalRuleRewrite(Optimizer.java:221) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:78) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.createQueryPlan(StatementPlanner.java:84) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:54) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:315) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:285) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:432) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:668) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_202]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_202]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_202]
Caused by: com.starrocks.common.DdlException: Fail to access meta store of Hive. error: java.net.SocketTimeoutException: Read timed out
at com.starrocks.external.hive.HiveMetaClient.getPartitionKeys(HiveMetaClient.java:192) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.loadPartitionKeys(HiveMetaCache.java:118) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.access$000(HiveMetaCache.java:34) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$1.load(HiveMetaCache.java:66) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$1.load(HiveMetaCache.java:63) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
… 27 more
2022-08-29 10:09:00,338 WARN (starrocks-mysql-nio-pool-9696|24702) [RemoteScanPartitionPruneRule.transform():67] HMS table partition prune failed : com.starrocks.common.DdlException: get partition keys failed: com.starrocks.common.DdlException: Fail to access meta store of Hive. error: java.net.SocketTimeoutException: Read timed out
2022-08-29 10:09:00,350 WARN (starrocks-mysql-nio-pool-9696|24702) [StmtExecutor.execute():330] New planner error: select * from table_name limit 1
com.starrocks.sql.common.StarRocksPlannerException: get partition keys failed: com.starrocks.common.DdlException: Fail to access meta store of Hive. error: java.net.SocketTimeoutException: Read timed out
at com.starrocks.sql.optimizer.rule.transformation.RemoteScanPartitionPruneRule.transform(RemoteScanPartitionPruneRule.java:68) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteTask.doExecute(TopDownRewriteTask.java:49) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteOnceTask.execute(TopDownRewriteOnceTask.java:31) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.SeriallyTaskScheduler.executeTasks(SeriallyTaskScheduler.java:42) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.ruleRewriteOnlyOnce(Optimizer.java:303) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.logicalRuleRewrite(Optimizer.java:221) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:78) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.createQueryPlan(StatementPlanner.java:84) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:54) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:315) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:285) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:432) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:668) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_202]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_202]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_202]
2022-08-29 10:09:00,350 WARN (starrocks-mysql-nio-pool-9696|24702) [StmtExecutor.execute():476] execute Exception, sql select * from table_name limit 1
com.starrocks.sql.common.StarRocksPlannerException: get partition keys failed: com.starrocks.common.DdlException: Fail to access meta store of Hive. error: java.net.SocketTimeoutException: Read timed out
at com.starrocks.sql.optimizer.rule.transformation.RemoteScanPartitionPruneRule.transform(RemoteScanPartitionPruneRule.java:68) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteTask.doExecute(TopDownRewriteTask.java:49) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.TopDownRewriteOnceTask.execute(TopDownRewriteOnceTask.java:31) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.SeriallyTaskScheduler.executeTasks(SeriallyTaskScheduler.java:42) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.ruleRewriteOnlyOnce(Optimizer.java:303) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.logicalRuleRewrite(Optimizer.java:221) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:78) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.createQueryPlan(StatementPlanner.java:84) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:54) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:315) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:285) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:432) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:668) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_202]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_202]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_202]
2022-08-29 10:09:00,372 WARN (starrocks-mysql-nio-pool-9709|24739) [ConnectProcessor.dispatch():449] Unsupported command(COM_STMT_PREPARE)
2022-08-29 10:09:00,387 WARN (starrocks-mysql-nio-pool-9709|24739) [ConnectProcessor.dispatch():449] Unsupported command(COM_STMT_PREPARE)
2022-08-29 10:09:01,044 WARN (starrocks-mysql-nio-pool-9709|24739) [ConnectProcessor.dispatch():449] Unsupported command(COM_STMT_PREPARE)
2022-08-29 10:09:01,691 WARN (starrocks-mysql-nio-pool-9709|24739) [ConnectProcessor.dispatch():449] Unsupported command(COM_STMT_PREPARE)