From 7f310eef5b672d7dad1dd2c1c723e5ccbf9a816f Mon Sep 17 00:00:00 2001
From: qinfeng <212841557@qq.com>
Date: Thu, 21 Sep 2023 16:27:39 +0800
Subject: [PATCH 1/6] =?UTF-8?q?scriptis=20=E5=B7=A6=E4=BE=A7=E6=95=B0?=
=?UTF-8?q?=E6=8D=AE=E5=BA=93=E8=8F=9C=E5=8D=95=E6=A0=8F=E6=94=AF=E6=8C=81?=
=?UTF-8?q?hiveserver2=E8=8E=B7=E5=8F=96=E5=BA=93=E8=A1=A8=E5=AD=97?=
=?UTF-8?q?=E6=AE=B5?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../linkis-datasource/linkis-metadata/pom.xml | 6 +
.../restful/api/DataSourceRestfulApi.java | 24 +-
.../linkis/metadata/util/DWSConfig.java | 12 +
.../metadata/util/HiveService2Utils.java | 237 ++++++++++++++++++
4 files changed, 275 insertions(+), 4 deletions(-)
create mode 100644 linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
index 3f683a33f5..3e42b8f138 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
@@ -95,6 +95,12 @@
provided
+
+ org.apache.hive
+ hive-jdbc
+ ${hive.version}
+
+
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
index d1d0fec0f5..24dbf2fb4f 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
@@ -21,6 +21,7 @@
import org.apache.linkis.metadata.restful.remote.DataSourceRestfulRemote;
import org.apache.linkis.metadata.service.DataSourceService;
import org.apache.linkis.metadata.service.HiveMetaWithPermissionService;
+import org.apache.linkis.metadata.util.HiveService2Utils;
import org.apache.linkis.server.Message;
import org.apache.linkis.server.utils.ModuleUserUtils;
@@ -62,7 +63,12 @@ public class DataSourceRestfulApi implements DataSourceRestfulRemote {
public Message queryDatabaseInfo(HttpServletRequest req) {
String userName = ModuleUserUtils.getOperationUser(req, "get dbs");
try {
- JsonNode dbs = dataSourceService.getDbs(userName);
+ JsonNode dbs;
+ if (HiveService2Utils.checkHiveServer2Enable()) {
+ dbs = HiveService2Utils.getDbs(userName);
+ } else {
+ dbs = dataSourceService.getDbs(userName);
+ }
return Message.ok("").data("dbs", dbs);
} catch (Exception e) {
logger.error("Failed to get database(获取数据库失败)", e);
@@ -137,7 +143,12 @@ public Message queryTables(
String userName = ModuleUserUtils.getOperationUser(req, "get tables");
MetadataQueryParam queryParam = MetadataQueryParam.of(userName).withDbName(database);
try {
- JsonNode tables = dataSourceService.queryTables(queryParam);
+ JsonNode tables;
+ if (HiveService2Utils.checkHiveServer2Enable()) {
+ tables = HiveService2Utils.getTables(userName, database);
+ } else {
+ tables = dataSourceService.queryTables(queryParam);
+ }
return Message.ok("").data("tables", tables);
} catch (Exception e) {
logger.error("Failed to queryTables", e);
@@ -160,8 +171,13 @@ public Message queryTableMeta(
MetadataQueryParam queryParam =
MetadataQueryParam.of(userName).withDbName(database).withTableName(table);
try {
- JsonNode columns =
- hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(queryParam);
+ JsonNode columns;
+ if (HiveService2Utils.checkHiveServer2Enable()) {
+ columns = HiveService2Utils.getColumns(userName, database, table);
+ } else {
+ columns =
+ hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(queryParam);
+ }
return Message.ok("").data("columns", columns);
} catch (Exception e) {
logger.error("Failed to get data table structure(获取数据表结构失败)", e);
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
index d44cb0a830..52d6bbbc4e 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
@@ -49,4 +49,16 @@ public class DWSConfig {
"wds.linkis.hdfs.rest.errs",
".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*")
.getValue();
+
+ public static CommonVars HIVE_SERVER2_URL =
+ CommonVars$.MODULE$.apply("linkis.hive.server2.address", "jdbc:hive2://127.0.0.1:10000/");
+
+ public static CommonVars HIVE_SERVER2_USERNAME =
+ CommonVars$.MODULE$.apply("linkis.hive.server2.username", "");
+
+ public static CommonVars HIVE_SERVER2_PASSWORD =
+ CommonVars$.MODULE$.apply("linkis.hive.server2.password", "");
+
+ public static CommonVars HIVE_SERVER2_ENABLE =
+ CommonVars$.MODULE$.apply("linkis.hive.server2.enable", false);
}
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java
new file mode 100644
index 0000000000..5dff3f19bc
--- /dev/null
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.metadata.util;
+
+import org.apache.commons.lang.StringUtils;
+
+import java.sql.*;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.stream.Collectors;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/** @author Qin* */
+public class HiveService2Utils {
+
+ private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+
+ private static final String defaultDb = "default";
+ private static Connection conn = null;
+ private static Statement stat = null;
+ private static ResultSet rs = null;
+
+ static ObjectMapper jsonMapper = new ObjectMapper();
+ static SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US);
+
+ /** 判断是否启动hiveServer2查询左侧菜单栏 */
+ public static Boolean checkHiveServer2Enable() {
+ return DWSConfig.HIVE_SERVER2_ENABLE.getValue();
+ }
+
+ static String hiveServer2Address = DWSConfig.HIVE_SERVER2_URL.getValue();
+ static String hiveServer2Username = DWSConfig.HIVE_SERVER2_USERNAME.getValue();
+ static String hiveServer2Password = DWSConfig.HIVE_SERVER2_PASSWORD.getValue();
+
+ /**
+ * 获取链接
+ *
+ * @param username 用户名
+ */
+ private static void getConn(String username, String db) throws Exception {
+ Class.forName(driverName);
+ String url =
+ hiveServer2Address.endsWith("/") ? hiveServer2Address + db : hiveServer2Address + "/" + db;
+ if (StringUtils.isNotBlank(hiveServer2Username)) {
+ username = hiveServer2Username;
+ }
+ conn = DriverManager.getConnection(url, username, hiveServer2Password);
+ stat = conn.createStatement();
+ }
+
+ /** 获取数据库 */
+ public static JsonNode getDbs(String username) throws Exception {
+ ArrayNode dbsNode = jsonMapper.createArrayNode();
+ List dbs = new CopyOnWriteArrayList<>();
+ try {
+ getConn(username, defaultDb);
+ rs = stat.executeQuery("show databases");
+ while (rs.next()) {
+ dbs.add(rs.getString(1));
+ }
+ } finally {
+ destroy();
+ }
+ for (String db : dbs) {
+ ObjectNode dbNode = jsonMapper.createObjectNode();
+ dbNode.put("dbName", db);
+ dbsNode.add(dbNode);
+ }
+ return dbsNode;
+ }
+
+ /**
+ * 获取指定数据库的所有表
+ *
+ * @param dbName 数据库
+ */
+ public static JsonNode getTables(String username, String dbName) throws Exception {
+ ArrayNode tablesNode = jsonMapper.createArrayNode();
+ try {
+ List tableNames = new ArrayList<>();
+ getConn(username, dbName);
+ rs = stat.executeQuery("show tables");
+ while (rs.next()) {
+ String tableName = rs.getString(1);
+ tableNames.add(tableName);
+ }
+
+ // 获取每个表的详细信息
+ for (String tableName : tableNames) {
+ ObjectNode tableNode = jsonMapper.createObjectNode();
+ // 获取表详细信息
+ ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName);
+ while (describeRs.next()) {
+ String columnName = describeRs.getString(1);
+ String dataType = describeRs.getString(2);
+ if (null != columnName) {
+ columnName = columnName.trim();
+ }
+ if (null != dataType) {
+ dataType = dataType.trim();
+ }
+ if (columnName.contains("Owner:")) {
+ tableNode.put("createdBy", dataType);
+ }
+ if (columnName.contains("CreateTime:")) {
+ long createdAt = sdf.parse(dataType).getTime() / 1000;
+ tableNode.put("createdAt", createdAt);
+ break;
+ }
+ }
+ describeRs.close();
+ tableNode.put("databaseName", dbName);
+ tableNode.put("tableName", tableName);
+ tableNode.put("lastAccessAt", 0);
+ tableNode.put("isView", false);
+ tablesNode.add(tableNode);
+ }
+ } finally {
+ destroy();
+ }
+
+ return tablesNode;
+ }
+
+ /**
+ * 获取指定表所有字段信息
+ *
+ * @param dbName 数据库
+ * @param tbName 数据表
+ */
+ public static JsonNode getColumns(String username, String dbName, String tbName)
+ throws Exception {
+ ArrayNode columnsNode = jsonMapper.createArrayNode();
+ List