为了更快的定位您的问题,请提供以下信息,谢谢
【详述】问题详细描述
【背景】做过哪些操作?
【业务影响】是
【是否存算分离】是
【StarRocks版本】3.2.8
【集群规模】例如:1fe(1 follower)+2be(fe与be混部)
【机器信息】16C/128G/万兆
【附件】
2024-08-01 09:00:00.148+08:00 ERROR (starrocks-mysql-nio-pool-59757|3432774) [CachingHiveMetastore.get():625] Error occurred when loading cache
com.google.common.util.concurrent.UncheckedExecutionException: com.starrocks.connector.exception.StarRocksConnectorException: Failed to get partitionKeys on [mysql_prod_enlightent_daily.animation_total_heat], msg: null
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2085) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:4011) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4034) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:5010) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:5017) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:623) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getPartitionKeysByValue(CachingHiveMetastore.java:270) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetastoreOperations.getPartitionKeys(HiveMetastoreOperations.java:246) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.listPartitionNames(HiveMetadata.java:203) ~[starrocks-fe.jar:?]
at com.starrocks.connector.CatalogConnectorMetadata.listPartitionNames(CatalogConnectorMetadata.java:105) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.listPartitionNames(MetadataMgr.java:374) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rewrite.OptExternalPartitionPruner.initPartitionInfo(OptExternalPartitionPruner.java:236) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rewrite.OptExternalPartitionPruner.prunePartitionsImpl(OptExternalPartitionPruner.java:124) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rewrite.OptExternalPartitionPruner.prunePartitions(OptExternalPartitionPruner.java:81) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.rule.transformation.ExternalScanPartitionPruneRule.transform(ExternalScanPartitionPruneRule.java:59) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:81) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.rewrite(RewriteTreeTask.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.RewriteTreeTask.execute(RewriteTreeTask.java:59) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.SeriallyTaskScheduler.executeTasks(SeriallyTaskScheduler.java:65) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.ruleRewriteOnlyOnce(Optimizer.java:857) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.logicalRuleRewrite(Optimizer.java:504) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.rewriteAndValidatePlan(Optimizer.java:609) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimizeByCost(Optimizer.java:225) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:174) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:151) ~[starrocks-fe.jar:?]
at com.starrocks.sql.InsertPlanner.buildExecPlan(InsertPlanner.java:391) ~[starrocks-fe.jar:?]
at com.starrocks.sql.InsertPlanner.plan(InsertPlanner.java:203) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.planInsertStmt(StatementPlanner.java:161) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:140) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:92) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:532) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:415) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:610) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:917) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:69) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) ~[?:?]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) ~[?:?]
at java.lang.Thread.run(Thread.java:834) ~[?:?]
Caused by: com.starrocks.connector.exception.StarRocksConnectorException: Failed to get partitionKeys on [mysql_prod_enlightent_daily.animation_total_heat], msg: null
at com.starrocks.connector.hive.HiveMetaClient.callRPC(HiveMetaClient.java:164) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaClient.callRPC(HiveMetaClient.java:150) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaClient.getPartitionKeys(HiveMetaClient.java:238) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetastore.getPartitionKeysByValue(HiveMetastore.java:141) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadPartitionKeys(CachingHiveMetastore.java:279) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:169) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3570) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2312) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2189) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2079) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:4011) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4034) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:5010) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:5017) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:623) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getPartitionKeysByValue(CachingHiveMetastore.java:270) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadPartitionKeys(CachingHiveMetastore.java:279) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:169) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3570) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2312) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2189) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2079) ~[spark-dpp-1.0.0.jar:?]
… 45 more
Caused by: java.lang.reflect.InvocationTargetException
at jdk.internal.reflect.GeneratedMethodAccessor26.invoke(Unknown Source) ~[?:?]
at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
at java.lang.reflect.Method.invoke(Method.java:566) ~[?:?]
at com.starrocks.connector.hive.HiveMetaClient.callRPC(HiveMetaClient.java:161) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaClient.callRPC(HiveMetaClient.java:150) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaClient.getPartitionKeys(HiveMetaClient.java:238) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetastore.getPartitionKeysByValue(HiveMetastore.java:141) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadPartitionKeys(CachingHiveMetastore.java:279) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:169) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3570) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2312) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2189) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2079) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:4011) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4034) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:5010) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:5017) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:623) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getPartitionKeysByValue(CachingHiveMetastore.java:270) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadPartitionKeys(CachingHiveMetastore.java:279) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:169) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3570) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2312) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2189) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2079) ~[spark-dpp-1.0.0.jar:?]
… 45 more
Caused by: java.lang.StackOverflowError
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]
at java.util.Collections$UnmodifiableCollection.isEmpty(Collections.java:1033) ~[?:?]