【详述】我把fe和be的conf下面加上 core-site.xml、hdfs-site.xml、hive-site.xml,fe.conf 配置了-Djava.security.krb5.conf:/etc/krb5.conf -Dsun.security.krb5.debug=true
【背景】登录客户端,创建drop catalog hms;
CREATE EXTERNAL CATALOG hms
PROPERTIES
(
"type" = "hive",
"hive.metastore.type" = "hive",
"hive.metastore.uris" = "thrift://xxxx:9083"
);
show databases from hms;
【业务影响】
【是否存算分离】
【StarRocks版本】例如:2.3.2
【集群规模】例如:3fe(1 follower+2observer)+3be(fe与be混部)
【联系方式】我的邮件是821193713@qq.com
【附件】
- fe.log/beINFO/
2024-12-04 21:49:08,122 WARN (starrocks-mysql-nio-pool-10|127) [HiveMetaStoreThriftClient.open():522] set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it.
org.apache.thrift.transport.TTransportException: Socket is closed by peer.
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:130) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readStringBody(TBinaryProtocol.java:411) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:254) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_set_ugi(ThriftHiveMetastore.java:4931) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.set_ugi(ThriftHiveMetastore.java:4917) ~[hive-apache-3.1.2-13.jar:?]
at com.starrocks.external.hive.HiveMetaStoreThriftClient.open(HiveMetaStoreThriftClient.java:514) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaStoreThriftClient.reconnect(HiveMetaStoreThriftClient.java:400) ~[starrocks-fe.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient$1.run(RetryingMetaStoreClient.java:187) ~[hive-apache-3.1.2-13.jar:?]
at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_112]
at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_112]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1845) ~[hadoop-common-3.3.0.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:183) ~[hive-apache-3.1.2-13.jar:?]
at com.sun.proxy.$Proxy37.getAllDatabases(Unknown Source) ~[?:?]
at com.starrocks.external.hive.HiveMetaClient.getAllDatabaseNames(HiveMetaClient.java:181) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.loadAllDatabaseNames(HiveMetaCache.java:292) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.access$500(HiveMetaCache.java:39) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:128) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:125) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.external.hive.HiveMetaCache.getAllDatabaseNames(HiveMetaCache.java:285) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.listDbNames(HiveMetadata.java:37) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.listDbNames(MetadataMgr.java:83) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.handleShowDb(ShowExecutor.java:505) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.execute(ShowExecutor.java:182) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleShow(StmtExecutor.java:905) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:443) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:321) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:439) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:675) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
2024-12-04 21:49:08,123 INFO (starrocks-mysql-nio-pool-10|127) [HiveMetaStoreThriftClient.open():551] Connected to metastore.
2024-12-04 21:49:08,123 WARN (starrocks-mysql-nio-pool-10|127) [HiveMetaClient.getAllDatabaseNames():183] Failed to get all database names
org.apache.thrift.transport.TTransportException: Socket is closed by peer.
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:130) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:455) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:354) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:243) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_all_databases(ThriftHiveMetastore.java:1236) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_all_databases(ThriftHiveMetastore.java:1224) ~[hive-apache-3.1.2-13.jar:?]
at com.starrocks.external.hive.HiveMetaStoreThriftClient.getAllDatabases(HiveMetaStoreThriftClient.java:761) ~[starrocks-fe.jar:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:208) ~[hive-apache-3.1.2-13.jar:?]
at com.sun.proxy.$Proxy37.getAllDatabases(Unknown Source) ~[?:?]
at com.starrocks.external.hive.HiveMetaClient.getAllDatabaseNames(HiveMetaClient.java:181) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.loadAllDatabaseNames(HiveMetaCache.java:292) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.access$500(HiveMetaCache.java:39) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:128) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:125) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.external.hive.HiveMetaCache.getAllDatabaseNames(HiveMetaCache.java:285) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.listDbNames(HiveMetadata.java:37) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.listDbNames(MetadataMgr.java:83) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.handleShowDb(ShowExecutor.java:505) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.execute(ShowExecutor.java:182) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleShow(StmtExecutor.java:905) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:443) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:321) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:439) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:675) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
2024-12-04 21:49:08,123 ERROR (starrocks-mysql-nio-pool-10|127) [MetadataMgr.listDbNames():85] Failed to listDbNames on catalog hms
com.starrocks.common.DdlException: Failed to get all databases name on thrift://hadoop04.bigdata.dj:9083
at com.starrocks.external.hive.HiveMetaCache.getAllDatabaseNames(HiveMetaCache.java:287) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.listDbNames(HiveMetadata.java:37) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.listDbNames(MetadataMgr.java:83) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.handleShowDb(ShowExecutor.java:505) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.execute(ShowExecutor.java:182) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleShow(StmtExecutor.java:905) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:443) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:321) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:439) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:675) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
2024-12-04 21:49:08,123 INFO (starrocks-mysql-nio-pool-10|127) [StmtExecutor.execute():469] execute Exception. Failed to get all databases name on thrift://hadoop04.bigdata.dj:9083
-
fe.warn.log 日志
2024-12-04 23:31:39,739 WARN (starrocks-mysql-nio-pool-44|383) [HiveMetaStoreThriftClient.open():506] Failed to connect to the MetaStore Server…
2024-12-04 23:31:40,745 WARN (starrocks-mysql-nio-pool-44|383) [HiveMetaStoreThriftClient.open():506] Failed to connect to the MetaStore Server…
2024-12-04 23:31:41,751 WARN (starrocks-mysql-nio-pool-44|383) [HiveMetaStoreThriftClient.open():506] Failed to connect to the MetaStore Server…
2024-12-04 23:31:42,752 WARN (starrocks-mysql-nio-pool-44|383) [HiveMetaClient.getAllDatabaseNames():183] Failed to get all database names
java.lang.RuntimeException: Unable to instantiate com.starrocks.external.hive.HiveMetaStoreThriftClient
at org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:86) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:95) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:148) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:119) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:112) ~[hive-apache-3.1.2-13.jar:?]
at com.starrocks.external.hive.HiveMetaClient$AutoCloseClient.(HiveMetaClient.java:127) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaClient$AutoCloseClient.(HiveMetaClient.java:122) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaClient.getClient(HiveMetaClient.java:163) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaClient.getAllDatabaseNames(HiveMetaClient.java:180) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.loadAllDatabaseNames(HiveMetaCache.java:292) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache.access$500(HiveMetaCache.java:39) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:128) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:125) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.external.hive.HiveMetaCache.getAllDatabaseNames(HiveMetaCache.java:285) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.listDbNames(HiveMetadata.java:37) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.listDbNames(MetadataMgr.java:83) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.handleShowDb(ShowExecutor.java:505) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.execute(ShowExecutor.java:182) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleShow(StmtExecutor.java:905) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:443) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:321) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:439) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:675) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.8.0_112]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[?:1.8.0_112]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.8.0_112]
at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[?:1.8.0_112]
at org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84) ~[hive-apache-3.1.2-13.jar:?]
… 34 more
Caused by: org.apache.hadoop.hive.metastore.api.MetaException: Could not connect to meta store using any of the URIs provided. Most recent failure: org.apache.thrift.transport.TTransportException: Peer indicated failure: GSS initiate failed
at org.apache.thrift.transport.TSaslTransport.receiveSaslMessage(TSaslTransport.java:198)
at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:38)
at org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:51)
at org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:48)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1845)
at org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport.open(TUGIAssumingTransport.java:48)
at com.starrocks.external.hive.HiveMetaStoreThriftClient.open(HiveMetaStoreThriftClient.java:495)
at com.starrocks.external.hive.HiveMetaStoreThriftClient.(HiveMetaStoreThriftClient.java:267)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:95)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:148)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:119)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:112)
at com.starrocks.external.hive.HiveMetaClient$AutoCloseClient.(HiveMetaClient.java:127)
at com.starrocks.external.hive.HiveMetaClient$AutoCloseClient.(HiveMetaClient.java:122)
at com.starrocks.external.hive.HiveMetaClient.getClient(HiveMetaClient.java:163)
at com.starrocks.external.hive.HiveMetaClient.getAllDatabaseNames(HiveMetaClient.java:180)
at com.starrocks.external.hive.HiveMetaCache.loadAllDatabaseNames(HiveMetaCache.java:292)
at com.starrocks.external.hive.HiveMetaCache.access$500(HiveMetaCache.java:39)
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:128)
at com.starrocks.external.hive.HiveMetaCache$6.load(HiveMetaCache.java:125)
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529)
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278)
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045)
at com.google.common.cache.LocalCache.get(LocalCache.java:3962)
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985)
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946)
at com.starrocks.external.hive.HiveMetaCache.getAllDatabaseNames(HiveMetaCache.java:285)
at com.starrocks.connector.hive.HiveMetadata.listDbNames(HiveMetadata.java:37)
at com.starrocks.server.MetadataMgr.listDbNames(MetadataMgr.java:83)
at com.starrocks.qe.ShowExecutor.handleShowDb(ShowExecutor.java:505)
at com.starrocks.qe.ShowExecutor.execute(ShowExecutor.java:182)
at com.starrocks.qe.StmtExecutor.handleShow(StmtExecutor.java:905)
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:443)
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:321)
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:439)
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:675)
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)at com.starrocks.external.hive.HiveMetaStoreThriftClient.open(HiveMetaStoreThriftClient.java:546) ~[starrocks-fe.jar:?] at com.starrocks.external.hive.HiveMetaStoreThriftClient.<init>(HiveMetaStoreThriftClient.java:267) ~[starrocks-fe.jar:?] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.8.0_112] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[?:1.8.0_112] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.8.0_112] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[?:1.8.0_112] at org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84) ~[hive-apache-3.1.2-13.jar:?] ... 34 more
2024-12-04 23:31:42,753 ERROR (starrocks-mysql-nio-pool-44|383) [MetadataMgr.listDbNames():85] Failed to listDbNames on catalog hms
com.starrocks.common.DdlException: Failed to get all databases name on thrift://hadoop03.bigdata.dj:9083
at com.starrocks.external.hive.HiveMetaCache.getAllDatabaseNames(HiveMetaCache.java:287) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.listDbNames(HiveMetadata.java:37) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.listDbNames(MetadataMgr.java:83) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.handleShowDb(ShowExecutor.java:505) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ShowExecutor.execute(ShowExecutor.java:182) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleShow(StmtExecutor.java:905) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:443) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:321) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:439) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:675) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]