starrocks创建hive外部表报错

【详述】starrocks创建hive外部表报错
【背景】安装Kerberos、创建Hive 资源、创建外部表失败
【业务影响】
【StarRocks版本】例如:2.5.4
【集群规模】例如:3fe(1 follower+2observer)+3be(fe与be混部)
【机器信息】CPU虚拟核/内存/网卡,例如:80C/128G/万兆
【联系方式】为了在解决问题过程中能及时联系到您获取一些日志信息,请补充下您的联系方式,例如:社区群4-小李或者邮箱,谢谢
MySQL [db_1]> CREATE EXTERNAL RESOURCE “hive_161”
-> PROPERTIES (
-> “type” = “hive”,
-> “hive.metastore.uris” = “thrift://127.0.0.0:9083”,
-> ‘dfs.nameservices’=‘nameservice1’,
-> “dfs.data.transfer.protection”=“authentication”,
-> “hadoop.rpc.protection”=“authentication”,
-> ‘dfs.ha.namenodes.nameservice1’=‘hostname1,hostname2’,
-> ‘dfs.namenode.rpc-address.nameservice1.hostname1’=‘127.0.0.0:8020’,
-> ‘dfs.namenode.rpc-address.nameservice1.hostname2’=‘127.0.0.0:8020’,
-> ‘dfs.client.failover.proxy.provider.nameservice1’=‘org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider’,
-> ‘hadoop.security.authentication’=‘kerberos’,
-> ‘hadoop.kerberos.principal’=‘hive@GENIUSAFC.COM’,
-> ‘hadoop.kerberos.keytab’=’/etc/security/keytab/hive.keytab’
-> );

create EXTERNAL table student(
id int, name string
)
ENGINE=HIVE
PROPERTIES (
“resource” = “hive_161”,
‘database’ = ‘db_7’,
‘table’ = ‘student’
);

2023-05-12 16:37:21,776 WARN (starrocks-mysql-nio-pool-49|11708) [HiveMetaStoreThriftClient.open():521] set_ugi() not successful, Likely cause: new client talking to old server. Co
ntinuing without it.
org.apache.thrift.transport.TTransportException: Socket is closed by peer.
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:130) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readStringBody(TBinaryProtocol.java:411) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:254) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_set_ugi(ThriftHiveMetastore.java:4931) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.set_ugi(ThriftHiveMetastore.java:4917) ~[hive-apache-3.1.2-13.jar:?]
at com.starrocks.connector.hive.HiveMetaStoreThriftClient.open(HiveMetaStoreThriftClient.java:513) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaStoreThriftClient.reconnect(HiveMetaStoreThriftClient.java:399) ~[starrocks-fe.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient$1.run(RetryingMetaStoreClient.java:187) ~[hive-apache-3.1.2-13.jar:?]
at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_281]
at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_281]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878) ~[hadoop-common-3.3.3.jar:?]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:183) ~[hive-apache-3.1.2-13.jar:?]
at com.sun.proxy.$Proxy43.getTable(Unknown Source) ~[?:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_281]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_281]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_281]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_281]
at com.starrocks.connector.hive.HiveMetaClient.callRPC(HiveMetaClient.java:142) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaClient.getTable(HiveMetaClient.java:187) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetastore.getTable(HiveMetastore.java:57) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadTable(CachingHiveMetastore.java:193) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:168) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4952) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:368) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getTable(CachingHiveMetastore.java:189) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadTable(CachingHiveMetastore.java:193) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:168) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4952) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:368) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getTable(CachingHiveMetastore.java:189) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetastoreOperations.getTable(HiveMetastoreOperations.java:45) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.getTable(HiveMetadata.java:88) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.lambda$getTable$2(MetadataMgr.java:134) ~[starrocks-fe.jar:?]
at java.util.Optional.map(Optional.java:215) ~[?:1.8.0_281]
at com.starrocks.server.MetadataMgr.getTable(MetadataMgr.java:134) ~[starrocks-fe.jar:?]
at com.starrocks.server.TableFactory.getTableFromResourceMappingCatalog(TableFactory.java:283) ~[starrocks-fe.jar:?]
at com.starrocks.server.TableFactory.createHiveTable(TableFactory.java:67) ~[starrocks-fe.jar:?]
at com.starrocks.server.TableFactory.createTable(TableFactory.java:47) ~[starrocks-fe.jar:?]
at com.starrocks.server.LocalMetastore.createHiveTable(LocalMetastore.java:2511) ~[starrocks-fe.jar:?]
at com.starrocks.server.LocalMetastore.createTable(LocalMetastore.java:783) ~[starrocks-fe.jar:?]
at com.starrocks.server.GlobalStateMgr.createTable(GlobalStateMgr.java:2044) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor$StmtExecutorVisitor.lambda$visitCreateTableStatement$4(DDLStmtExecutor.java:208) ~[starrocks-fe.jar:?]
at com.starrocks.common.ErrorReport.wrapWithRuntimeException(ErrorReport.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor$StmtExecutorVisitor.visitCreateTableStatement(DDLStmtExecutor.java:207) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor$StmtExecutorVisitor.visitCreateTableStatement(DDLStmtExecutor.java:127) ~[starrocks-fe.jar:?]
at com.starrocks.sql.ast.CreateTableStmt.accept(CreateTableStmt.java:307) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor.execute(DDLStmtExecutor.java:113) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleDdlStmt(StmtExecutor.java:1138) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:492) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:323) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:440) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:698) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_281]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_281]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_281]

2023-05-12 16:37:21,776 INFO (starrocks-mysql-nio-pool-49|11708) [HiveMetaStoreThriftClient.open():550] Connected to metastore.
2023-05-12 16:37:21,777 ERROR (starrocks-mysql-nio-pool-49|11708) [HiveMetaClient.callRPC():144] Failed to get table [db_7.student]
java.lang.reflect.InvocationTargetException: null
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_281]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_281]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_281]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_281]
at com.starrocks.connector.hive.HiveMetaClient.callRPC(HiveMetaClient.java:142) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaClient.getTable(HiveMetaClient.java:187) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetastore.getTable(HiveMetastore.java:57) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadTable(CachingHiveMetastore.java:193) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:168) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4952) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:368) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getTable(CachingHiveMetastore.java:189) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadTable(CachingHiveMetastore.java:193) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:168) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4952) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:368) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getTable(CachingHiveMetastore.java:189) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetastoreOperations.getTable(HiveMetastoreOperations.java:45) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetadata.getTable(HiveMetadata.java:88) ~[starrocks-fe.jar:?]
at com.starrocks.server.MetadataMgr.lambda$getTable$2(MetadataMgr.java:134) ~[starrocks-fe.jar:?]
at java.util.Optional.map(Optional.java:215) ~[?:1.8.0_281]
at com.starrocks.server.MetadataMgr.getTable(MetadataMgr.java:134) ~[starrocks-fe.jar:?]
at com.starrocks.server.TableFactory.getTableFromResourceMappingCatalog(TableFactory.java:283) ~[starrocks-fe.jar:?]
at com.starrocks.server.TableFactory.createHiveTable(TableFactory.java:67) ~[starrocks-fe.jar:?]
at com.starrocks.server.TableFactory.createTable(TableFactory.java:47) ~[starrocks-fe.jar:?]
at com.starrocks.server.LocalMetastore.createHiveTable(LocalMetastore.java:2511) ~[starrocks-fe.jar:?]
at com.starrocks.server.LocalMetastore.createTable(LocalMetastore.java:783) ~[starrocks-fe.jar:?]
at com.starrocks.server.GlobalStateMgr.createTable(GlobalStateMgr.java:2044) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor$StmtExecutorVisitor.lambda$visitCreateTableStatement$4(DDLStmtExecutor.java:208) ~[starrocks-fe.jar:?]
at com.starrocks.common.ErrorReport.wrapWithRuntimeException(ErrorReport.java:99) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor$StmtExecutorVisitor.visitCreateTableStatement(DDLStmtExecutor.java:207) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor$StmtExecutorVisitor.visitCreateTableStatement(DDLStmtExecutor.java:127) ~[starrocks-fe.jar:?]
at com.starrocks.sql.ast.CreateTableStmt.accept(CreateTableStmt.java:307) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DDLStmtExecutor.execute(DDLStmtExecutor.java:113) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleDdlStmt(StmtExecutor.java:1138) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:492) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:323) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:440) ~[starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:698) ~[starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:55) ~[starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_281]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_281]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_281]
Caused by: org.apache.thrift.transport.TTransportException: Socket is closed by peer.
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:130) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:455) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:354) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:243) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77) ~[libthrift-0.13.0.jar:0.13.0]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_table(ThriftHiveMetastore.java:2042) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_table(ThriftHiveMetastore.java:2028) ~[hive-apache-3.1.2-13.jar:?]
at com.starrocks.connector.hive.HiveMetaStoreThriftClient.getTable(HiveMetaStoreThriftClient.java:586) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaStoreThriftClient.getTable(HiveMetaStoreThriftClient.java:581) ~[starrocks-fe.jar:?]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_table(ThriftHiveMetastore.java:2042) ~[hive-apache-3.1.2-13.jar:?]
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_table(ThriftHiveMetastore.java:2028) ~[hive-apache-3.1.2-13.jar:?]
at com.starrocks.connector.hive.HiveMetaStoreThriftClient.getTable(HiveMetaStoreThriftClient.java:586) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.HiveMetaStoreThriftClient.getTable(HiveMetaStoreThriftClient.java:581) ~[starrocks-fe.jar:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_281]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_281]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_281]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_281]
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:208) ~[hive-apache-3.1.2-13.jar:?]
at com.sun.proxy.$Proxy43.getTable(Unknown Source) ~[?:?]
… 59 more
2023-05-12 16:37:21,777 ERROR (starrocks-mysql-nio-pool-49|11708) [HiveMetaClient.callRPC():151] An exception occurred when using the current long link to access metastore. msg: F
ailed to get table [db_7.student]
2023-05-12 16:37:21,777 INFO (starrocks-mysql-nio-pool-49|11708) [HiveMetaStoreThriftClient.close():575] Closed a connection to metastore, current connections: 0
2023-05-12 16:37:21,777 ERROR (starrocks-mysql-nio-pool-49|11708) [CachingHiveMetastore.get():370] Error occurred when loading cache
com.google.common.util.concurrent.UncheckedExecutionException: com.starrocks.connector.exception.StarRocksConnectorException: Failed to get table [db_7.student], msg: null
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2051) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3985) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4946) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4952) ~[spark-dpp-1.0.0.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.get(CachingHiveMetastore.java:368) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.getTable(CachingHiveMetastore.java:189) ~[starrocks-fe.jar:?]
at com.starrocks.connector.hive.CachingHiveMetastore.loadTable(CachingHiveMetastore.java:193) ~[starrocks-fe.jar:?]
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:168) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.CacheLoader$1.load(CacheLoader.java:192) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045) ~[spark-dpp-1.0.0.jar:?]
at com.google.common.cache.LocalCache.get(LocalCache.java:3962) ~[spark-dpp-1.0.0.jar:?]

建议使用hive catalog的方式创建hive外表。可参考如下文档 https://docs.starrocks.io/zh-cn/latest/data_source/catalog/hive_catalog