From 7f310eef5b672d7dad1dd2c1c723e5ccbf9a816f Mon Sep 17 00:00:00 2001 From: qinfeng <212841557@qq.com> Date: Thu, 21 Sep 2023 16:27:39 +0800 Subject: [PATCH 1/6] =?UTF-8?q?scriptis=20=E5=B7=A6=E4=BE=A7=E6=95=B0?= =?UTF-8?q?=E6=8D=AE=E5=BA=93=E8=8F=9C=E5=8D=95=E6=A0=8F=E6=94=AF=E6=8C=81?= =?UTF-8?q?hiveserver2=E8=8E=B7=E5=8F=96=E5=BA=93=E8=A1=A8=E5=AD=97?= =?UTF-8?q?=E6=AE=B5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../linkis-datasource/linkis-metadata/pom.xml | 6 + .../restful/api/DataSourceRestfulApi.java | 24 +- .../linkis/metadata/util/DWSConfig.java | 12 + .../metadata/util/HiveService2Utils.java | 237 ++++++++++++++++++ 4 files changed, 275 insertions(+), 4 deletions(-) create mode 100644 linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml index 3f683a33f5..3e42b8f138 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml @@ -95,6 +95,12 @@ provided + + org.apache.hive + hive-jdbc + ${hive.version} + + diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java index d1d0fec0f5..24dbf2fb4f 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java @@ -21,6 +21,7 @@ import org.apache.linkis.metadata.restful.remote.DataSourceRestfulRemote; import org.apache.linkis.metadata.service.DataSourceService; import org.apache.linkis.metadata.service.HiveMetaWithPermissionService; +import org.apache.linkis.metadata.util.HiveService2Utils; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -62,7 +63,12 @@ public class DataSourceRestfulApi implements DataSourceRestfulRemote { public Message queryDatabaseInfo(HttpServletRequest req) { String userName = ModuleUserUtils.getOperationUser(req, "get dbs"); try { - JsonNode dbs = dataSourceService.getDbs(userName); + JsonNode dbs; + if (HiveService2Utils.checkHiveServer2Enable()) { + dbs = HiveService2Utils.getDbs(userName); + } else { + dbs = dataSourceService.getDbs(userName); + } return Message.ok("").data("dbs", dbs); } catch (Exception e) { logger.error("Failed to get database(获取数据库失败)", e); @@ -137,7 +143,12 @@ public Message queryTables( String userName = ModuleUserUtils.getOperationUser(req, "get tables"); MetadataQueryParam queryParam = MetadataQueryParam.of(userName).withDbName(database); try { - JsonNode tables = dataSourceService.queryTables(queryParam); + JsonNode tables; + if (HiveService2Utils.checkHiveServer2Enable()) { + tables = HiveService2Utils.getTables(userName, database); + } else { + tables = dataSourceService.queryTables(queryParam); + } return Message.ok("").data("tables", tables); } catch (Exception e) { logger.error("Failed to queryTables", e); @@ -160,8 +171,13 @@ public Message queryTableMeta( MetadataQueryParam queryParam = MetadataQueryParam.of(userName).withDbName(database).withTableName(table); try { - JsonNode columns = - hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(queryParam); + JsonNode columns; + if (HiveService2Utils.checkHiveServer2Enable()) { + columns = HiveService2Utils.getColumns(userName, database, table); + } else { + columns = + hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(queryParam); + } return Message.ok("").data("columns", columns); } catch (Exception e) { logger.error("Failed to get data table structure(获取数据表结构失败)", e); diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java index d44cb0a830..52d6bbbc4e 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java @@ -49,4 +49,16 @@ public class DWSConfig { "wds.linkis.hdfs.rest.errs", ".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*") .getValue(); + + public static CommonVars HIVE_SERVER2_URL = + CommonVars$.MODULE$.apply("linkis.hive.server2.address", "jdbc:hive2://127.0.0.1:10000/"); + + public static CommonVars HIVE_SERVER2_USERNAME = + CommonVars$.MODULE$.apply("linkis.hive.server2.username", ""); + + public static CommonVars HIVE_SERVER2_PASSWORD = + CommonVars$.MODULE$.apply("linkis.hive.server2.password", ""); + + public static CommonVars HIVE_SERVER2_ENABLE = + CommonVars$.MODULE$.apply("linkis.hive.server2.enable", false); } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java new file mode 100644 index 0000000000..5dff3f19bc --- /dev/null +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java @@ -0,0 +1,237 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.metadata.util; + +import org.apache.commons.lang.StringUtils; + +import java.sql.*; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** @author Qin* */ +public class HiveService2Utils { + + private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; + + private static final String defaultDb = "default"; + private static Connection conn = null; + private static Statement stat = null; + private static ResultSet rs = null; + + static ObjectMapper jsonMapper = new ObjectMapper(); + static SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US); + + /** 判断是否启动hiveServer2查询左侧菜单栏 */ + public static Boolean checkHiveServer2Enable() { + return DWSConfig.HIVE_SERVER2_ENABLE.getValue(); + } + + static String hiveServer2Address = DWSConfig.HIVE_SERVER2_URL.getValue(); + static String hiveServer2Username = DWSConfig.HIVE_SERVER2_USERNAME.getValue(); + static String hiveServer2Password = DWSConfig.HIVE_SERVER2_PASSWORD.getValue(); + + /** + * 获取链接 + * + * @param username 用户名 + */ + private static void getConn(String username, String db) throws Exception { + Class.forName(driverName); + String url = + hiveServer2Address.endsWith("/") ? hiveServer2Address + db : hiveServer2Address + "/" + db; + if (StringUtils.isNotBlank(hiveServer2Username)) { + username = hiveServer2Username; + } + conn = DriverManager.getConnection(url, username, hiveServer2Password); + stat = conn.createStatement(); + } + + /** 获取数据库 */ + public static JsonNode getDbs(String username) throws Exception { + ArrayNode dbsNode = jsonMapper.createArrayNode(); + List dbs = new CopyOnWriteArrayList<>(); + try { + getConn(username, defaultDb); + rs = stat.executeQuery("show databases"); + while (rs.next()) { + dbs.add(rs.getString(1)); + } + } finally { + destroy(); + } + for (String db : dbs) { + ObjectNode dbNode = jsonMapper.createObjectNode(); + dbNode.put("dbName", db); + dbsNode.add(dbNode); + } + return dbsNode; + } + + /** + * 获取指定数据库的所有表 + * + * @param dbName 数据库 + */ + public static JsonNode getTables(String username, String dbName) throws Exception { + ArrayNode tablesNode = jsonMapper.createArrayNode(); + try { + List tableNames = new ArrayList<>(); + getConn(username, dbName); + rs = stat.executeQuery("show tables"); + while (rs.next()) { + String tableName = rs.getString(1); + tableNames.add(tableName); + } + + // 获取每个表的详细信息 + for (String tableName : tableNames) { + ObjectNode tableNode = jsonMapper.createObjectNode(); + // 获取表详细信息 + ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName); + while (describeRs.next()) { + String columnName = describeRs.getString(1); + String dataType = describeRs.getString(2); + if (null != columnName) { + columnName = columnName.trim(); + } + if (null != dataType) { + dataType = dataType.trim(); + } + if (columnName.contains("Owner:")) { + tableNode.put("createdBy", dataType); + } + if (columnName.contains("CreateTime:")) { + long createdAt = sdf.parse(dataType).getTime() / 1000; + tableNode.put("createdAt", createdAt); + break; + } + } + describeRs.close(); + tableNode.put("databaseName", dbName); + tableNode.put("tableName", tableName); + tableNode.put("lastAccessAt", 0); + tableNode.put("isView", false); + tablesNode.add(tableNode); + } + } finally { + destroy(); + } + + return tablesNode; + } + + /** + * 获取指定表所有字段信息 + * + * @param dbName 数据库 + * @param tbName 数据表 + */ + public static JsonNode getColumns(String username, String dbName, String tbName) + throws Exception { + ArrayNode columnsNode = jsonMapper.createArrayNode(); + List> columnMapList = new ArrayList<>(); + List partitionColumnList = new ArrayList<>(); + try { + getConn(username, dbName); + rs = stat.executeQuery("desc " + tbName); + while (rs.next()) { + Map colum = new HashMap<>(); + String colName = rs.getString("col_name"); + String dataType = rs.getString("data_type"); + if (StringUtils.isNotBlank(colName) + && StringUtils.isNotBlank(dataType) + && !colName.contains("# Partition Information") + && !colName.contains("# col_name")) { + colum.put("columnName", rs.getString("col_name")); + colum.put("columnType", rs.getString("data_type")); + colum.put("columnComment", rs.getString("comment")); + columnMapList.add(colum); + } + } + + boolean partition = false; + boolean parColName = false; + ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tbName); + while (describeRs.next()) { + String columnName = describeRs.getString(1); + String dataType = describeRs.getString(2); + if (null != columnName) { + columnName = columnName.trim(); + } + if (null != dataType) { + dataType = dataType.trim(); + } + + // 判断获取分区字段 + if (columnName.contains("# Partition Information")) { + partition = true; + parColName = false; + continue; + } + if (columnName.contains("# col_name")) { + parColName = true; + continue; + } + + if (partition && parColName) { + if ("".equals(columnName) && null == dataType) { + partition = false; + parColName = false; + } else { + partitionColumnList.add(columnName); + } + } + } + describeRs.close(); + } finally { + destroy(); + } + + for (Map map : columnMapList.stream().distinct().collect(Collectors.toList())) { + ObjectNode fieldNode = jsonMapper.createObjectNode(); + String columnName = map.get("columnName").toString(); + fieldNode.put("columnName", columnName); + fieldNode.put("columnType", map.get("columnType").toString()); + fieldNode.put("columnComment", map.get("columnComment").toString()); + fieldNode.put("partitioned", partitionColumnList.contains(columnName)); + columnsNode.add(fieldNode); + } + + return columnsNode; + } + + // 释放资源 + private static void destroy() throws SQLException { + if (rs != null) { + rs.close(); + } + if (stat != null) { + stat.close(); + } + if (conn != null) { + conn.close(); + } + } +} From e99b1c08f64cfb62433e69dbe4ad8bcc39bfd1bd Mon Sep 17 00:00:00 2001 From: qinfeng <212841557@qq.com> Date: Fri, 22 Sep 2023 15:21:30 +0800 Subject: [PATCH 2/6] scriptis left database menu bar supports hiveserver2 to get library table fields --- .../linkis/metadata/util/DWSConfig.java | 6 - .../metadata/util/HiveService2Utils.java | 390 +++++++++--------- 2 files changed, 199 insertions(+), 197 deletions(-) diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java index 52d6bbbc4e..379c09449a 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java @@ -53,12 +53,6 @@ public class DWSConfig { public static CommonVars HIVE_SERVER2_URL = CommonVars$.MODULE$.apply("linkis.hive.server2.address", "jdbc:hive2://127.0.0.1:10000/"); - public static CommonVars HIVE_SERVER2_USERNAME = - CommonVars$.MODULE$.apply("linkis.hive.server2.username", ""); - - public static CommonVars HIVE_SERVER2_PASSWORD = - CommonVars$.MODULE$.apply("linkis.hive.server2.password", ""); - public static CommonVars HIVE_SERVER2_ENABLE = CommonVars$.MODULE$.apply("linkis.hive.server2.enable", false); } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java index 5dff3f19bc..8069e37ba1 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java @@ -19,10 +19,17 @@ import org.apache.commons.lang.StringUtils; -import java.sql.*; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; +import java.sql.DriverManager; +import java.util.Locale; +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.sql.SQLException; +import java.util.ArrayList; import java.text.SimpleDateFormat; -import java.util.*; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.stream.Collectors; import com.fasterxml.jackson.databind.JsonNode; @@ -30,208 +37,209 @@ import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; -/** @author Qin* */ public class HiveService2Utils { - private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; - - private static final String defaultDb = "default"; - private static Connection conn = null; - private static Statement stat = null; - private static ResultSet rs = null; - - static ObjectMapper jsonMapper = new ObjectMapper(); - static SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US); - - /** 判断是否启动hiveServer2查询左侧菜单栏 */ - public static Boolean checkHiveServer2Enable() { - return DWSConfig.HIVE_SERVER2_ENABLE.getValue(); - } - - static String hiveServer2Address = DWSConfig.HIVE_SERVER2_URL.getValue(); - static String hiveServer2Username = DWSConfig.HIVE_SERVER2_USERNAME.getValue(); - static String hiveServer2Password = DWSConfig.HIVE_SERVER2_PASSWORD.getValue(); - - /** - * 获取链接 - * - * @param username 用户名 - */ - private static void getConn(String username, String db) throws Exception { - Class.forName(driverName); - String url = - hiveServer2Address.endsWith("/") ? hiveServer2Address + db : hiveServer2Address + "/" + db; - if (StringUtils.isNotBlank(hiveServer2Username)) { - username = hiveServer2Username; - } - conn = DriverManager.getConnection(url, username, hiveServer2Password); - stat = conn.createStatement(); - } - - /** 获取数据库 */ - public static JsonNode getDbs(String username) throws Exception { - ArrayNode dbsNode = jsonMapper.createArrayNode(); - List dbs = new CopyOnWriteArrayList<>(); - try { - getConn(username, defaultDb); - rs = stat.executeQuery("show databases"); - while (rs.next()) { - dbs.add(rs.getString(1)); - } - } finally { - destroy(); - } - for (String db : dbs) { - ObjectNode dbNode = jsonMapper.createObjectNode(); - dbNode.put("dbName", db); - dbsNode.add(dbNode); + private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; + + private static final String defaultDb = "default"; + private static final String defaultPassword = "123456"; + + static ObjectMapper jsonMapper = new ObjectMapper(); + static SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US); + + /** + * Determine whether to start hiveServer2 Query the left menu bar + */ + public static Boolean checkHiveServer2Enable() { + return DWSConfig.HIVE_SERVER2_ENABLE.getValue(); } - return dbsNode; - } - - /** - * 获取指定数据库的所有表 - * - * @param dbName 数据库 - */ - public static JsonNode getTables(String username, String dbName) throws Exception { - ArrayNode tablesNode = jsonMapper.createArrayNode(); - try { - List tableNames = new ArrayList<>(); - getConn(username, dbName); - rs = stat.executeQuery("show tables"); - while (rs.next()) { - String tableName = rs.getString(1); - tableNames.add(tableName); - } - - // 获取每个表的详细信息 - for (String tableName : tableNames) { - ObjectNode tableNode = jsonMapper.createObjectNode(); - // 获取表详细信息 - ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName); - while (describeRs.next()) { - String columnName = describeRs.getString(1); - String dataType = describeRs.getString(2); - if (null != columnName) { - columnName = columnName.trim(); - } - if (null != dataType) { - dataType = dataType.trim(); - } - if (columnName.contains("Owner:")) { - tableNode.put("createdBy", dataType); - } - if (columnName.contains("CreateTime:")) { - long createdAt = sdf.parse(dataType).getTime() / 1000; - tableNode.put("createdAt", createdAt); - break; - } - } - describeRs.close(); - tableNode.put("databaseName", dbName); - tableNode.put("tableName", tableName); - tableNode.put("lastAccessAt", 0); - tableNode.put("isView", false); - tablesNode.add(tableNode); - } - } finally { - destroy(); + + /** + * Get connection + */ + private static Connection getConn(String username, String db) throws Exception { + Class.forName(driverName); + String hiveServer2Address = DWSConfig.HIVE_SERVER2_URL.getValue(); + String url = + hiveServer2Address.endsWith("/") ? hiveServer2Address + db : hiveServer2Address + "/" + db; + return DriverManager.getConnection(url, username, defaultPassword); } - return tablesNode; - } - - /** - * 获取指定表所有字段信息 - * - * @param dbName 数据库 - * @param tbName 数据表 - */ - public static JsonNode getColumns(String username, String dbName, String tbName) - throws Exception { - ArrayNode columnsNode = jsonMapper.createArrayNode(); - List> columnMapList = new ArrayList<>(); - List partitionColumnList = new ArrayList<>(); - try { - getConn(username, dbName); - rs = stat.executeQuery("desc " + tbName); - while (rs.next()) { - Map colum = new HashMap<>(); - String colName = rs.getString("col_name"); - String dataType = rs.getString("data_type"); - if (StringUtils.isNotBlank(colName) - && StringUtils.isNotBlank(dataType) - && !colName.contains("# Partition Information") - && !colName.contains("# col_name")) { - colum.put("columnName", rs.getString("col_name")); - colum.put("columnType", rs.getString("data_type")); - colum.put("columnComment", rs.getString("comment")); - columnMapList.add(colum); - } - } - - boolean partition = false; - boolean parColName = false; - ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tbName); - while (describeRs.next()) { - String columnName = describeRs.getString(1); - String dataType = describeRs.getString(2); - if (null != columnName) { - columnName = columnName.trim(); + /** + * Get database + */ + public static JsonNode getDbs(String username) throws Exception { + ArrayNode dbsNode = jsonMapper.createArrayNode(); + List dbs = new ArrayList<>(); + Connection conn = null; + Statement stat = null; + ResultSet rs = null; + try { + conn = getConn(username, defaultDb); + stat = conn.createStatement(); + rs = stat.executeQuery("show databases"); + while (rs.next()) { + dbs.add(rs.getString(1)); + } + } finally { + close(conn, stat, rs); } - if (null != dataType) { - dataType = dataType.trim(); + for (String db : dbs) { + ObjectNode dbNode = jsonMapper.createObjectNode(); + dbNode.put("dbName", db); + dbsNode.add(dbNode); } + return dbsNode; + } - // 判断获取分区字段 - if (columnName.contains("# Partition Information")) { - partition = true; - parColName = false; - continue; - } - if (columnName.contains("# col_name")) { - parColName = true; - continue; + /** + * Gets all tables for the specified database + */ + public static JsonNode getTables(String username, String dbName) throws Exception { + ArrayNode tablesNode = jsonMapper.createArrayNode(); + Connection conn = null; + Statement stat = null; + ResultSet rs = null; + try { + List tableNames = new ArrayList<>(); + conn = getConn(username, dbName); + stat = conn.createStatement(); + rs = stat.executeQuery("show tables"); + while (rs.next()) { + String tableName = rs.getString(1); + tableNames.add(tableName); + } + + // 获取每个表的详细信息 + for (String tableName : tableNames) { + ObjectNode tableNode = jsonMapper.createObjectNode(); + // 获取表详细信息 + ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName); + while (describeRs.next()) { + String columnName = describeRs.getString(1); + String dataType = describeRs.getString(2); + if (null != columnName) { + columnName = columnName.trim(); + } + if (null != dataType) { + dataType = dataType.trim(); + } + if (columnName.contains("Owner:")) { + tableNode.put("createdBy", dataType); + } + if (columnName.contains("CreateTime:")) { + long createdAt = sdf.parse(dataType).getTime() / 1000; + tableNode.put("createdAt", createdAt); + break; + } + } + describeRs.close(); + tableNode.put("databaseName", dbName); + tableNode.put("tableName", tableName); + tableNode.put("lastAccessAt", 0); + tableNode.put("isView", false); + tablesNode.add(tableNode); + } + } finally { + close(conn, stat, rs); } - if (partition && parColName) { - if ("".equals(columnName) && null == dataType) { - partition = false; - parColName = false; - } else { - partitionColumnList.add(columnName); - } - } - } - describeRs.close(); - } finally { - destroy(); + return tablesNode; } - for (Map map : columnMapList.stream().distinct().collect(Collectors.toList())) { - ObjectNode fieldNode = jsonMapper.createObjectNode(); - String columnName = map.get("columnName").toString(); - fieldNode.put("columnName", columnName); - fieldNode.put("columnType", map.get("columnType").toString()); - fieldNode.put("columnComment", map.get("columnComment").toString()); - fieldNode.put("partitioned", partitionColumnList.contains(columnName)); - columnsNode.add(fieldNode); - } + /** + * Gets information about all fields of a specified table + */ + public static JsonNode getColumns(String username, String dbName, String tbName) + throws Exception { + ArrayNode columnsNode = jsonMapper.createArrayNode(); + List> columnMapList = new ArrayList<>(); + List partitionColumnList = new ArrayList<>(); + Connection conn = null; + Statement stat = null; + ResultSet rs = null; + try { + conn = getConn(username, dbName); + stat = conn.createStatement(); + rs = stat.executeQuery("desc " + tbName); + while (rs.next()) { + Map colum = new HashMap<>(); + String colName = rs.getString("col_name"); + String dataType = rs.getString("data_type"); + if (StringUtils.isNotBlank(colName) + && StringUtils.isNotBlank(dataType) + && !colName.contains("# Partition Information") + && !colName.contains("# col_name")) { + colum.put("columnName", rs.getString("col_name")); + colum.put("columnType", rs.getString("data_type")); + colum.put("columnComment", rs.getString("comment")); + columnMapList.add(colum); + } + } + + boolean partition = false; + boolean parColName = false; + ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tbName); + while (describeRs.next()) { + String columnName = describeRs.getString(1); + String dataType = describeRs.getString(2); + if (null != columnName) { + columnName = columnName.trim(); + } + if (null != dataType) { + dataType = dataType.trim(); + } + + // 判断获取分区字段 + if (columnName.contains("# Partition Information")) { + partition = true; + parColName = false; + continue; + } + if (columnName.contains("# col_name")) { + parColName = true; + continue; + } + + if (partition && parColName) { + if ("".equals(columnName) && null == dataType) { + partition = false; + parColName = false; + } else { + partitionColumnList.add(columnName); + } + } + } + describeRs.close(); + } finally { + close(conn, stat, rs); + } - return columnsNode; - } + for (Map map : columnMapList.stream().distinct().collect(Collectors.toList())) { + ObjectNode fieldNode = jsonMapper.createObjectNode(); + String columnName = map.get("columnName").toString(); + fieldNode.put("columnName", columnName); + fieldNode.put("columnType", map.get("columnType").toString()); + fieldNode.put("columnComment", map.get("columnComment").toString()); + fieldNode.put("partitioned", partitionColumnList.contains(columnName)); + columnsNode.add(fieldNode); + } - // 释放资源 - private static void destroy() throws SQLException { - if (rs != null) { - rs.close(); - } - if (stat != null) { - stat.close(); + return columnsNode; } - if (conn != null) { - conn.close(); + + /** + * Close resource + */ + private static void close(Connection conn, Statement stat, ResultSet rs) throws SQLException { + if (rs != null) { + rs.close(); + } + if (stat != null) { + stat.close(); + } + if (conn != null) { + conn.close(); + } } - } } From 922de12c41a94dc9b9455c42cddc20bebdbd4fe6 Mon Sep 17 00:00:00 2001 From: qinfeng <212841557@qq.com> Date: Fri, 22 Sep 2023 15:29:06 +0800 Subject: [PATCH 3/6] scriptis left database menu bar supports hiveserver2 to get library table fields --- .../metadata/util/HiveService2Utils.java | 378 +++++++++--------- 1 file changed, 183 insertions(+), 195 deletions(-) diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java index 8069e37ba1..4e8026ae82 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java @@ -20,16 +20,16 @@ import org.apache.commons.lang.StringUtils; import java.sql.Connection; +import java.sql.DriverManager; import java.sql.ResultSet; +import java.sql.SQLException; import java.sql.Statement; -import java.sql.DriverManager; -import java.util.Locale; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; -import java.util.HashMap; -import java.sql.SQLException; -import java.util.ArrayList; -import java.text.SimpleDateFormat; import java.util.stream.Collectors; import com.fasterxml.jackson.databind.JsonNode; @@ -39,207 +39,195 @@ public class HiveService2Utils { - private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; - - private static final String defaultDb = "default"; - private static final String defaultPassword = "123456"; - - static ObjectMapper jsonMapper = new ObjectMapper(); - static SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US); - - /** - * Determine whether to start hiveServer2 Query the left menu bar - */ - public static Boolean checkHiveServer2Enable() { - return DWSConfig.HIVE_SERVER2_ENABLE.getValue(); + private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; + + private static final String defaultDb = "default"; + private static final String defaultPassword = "123456"; + + static ObjectMapper jsonMapper = new ObjectMapper(); + static SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US); + + /** Determine whether to start hiveServer2 Query the left menu bar */ + public static Boolean checkHiveServer2Enable() { + return DWSConfig.HIVE_SERVER2_ENABLE.getValue(); + } + + /** Get connection */ + private static Connection getConn(String username, String db) throws Exception { + Class.forName(driverName); + String hiveServer2Address = DWSConfig.HIVE_SERVER2_URL.getValue(); + String url = + hiveServer2Address.endsWith("/") ? hiveServer2Address + db : hiveServer2Address + "/" + db; + return DriverManager.getConnection(url, username, defaultPassword); + } + + /** Get database */ + public static JsonNode getDbs(String username) throws Exception { + ArrayNode dbsNode = jsonMapper.createArrayNode(); + List dbs = new ArrayList<>(); + Connection conn = null; + Statement stat = null; + ResultSet rs = null; + try { + conn = getConn(username, defaultDb); + stat = conn.createStatement(); + rs = stat.executeQuery("show databases"); + while (rs.next()) { + dbs.add(rs.getString(1)); + } + } finally { + close(conn, stat, rs); } - - /** - * Get connection - */ - private static Connection getConn(String username, String db) throws Exception { - Class.forName(driverName); - String hiveServer2Address = DWSConfig.HIVE_SERVER2_URL.getValue(); - String url = - hiveServer2Address.endsWith("/") ? hiveServer2Address + db : hiveServer2Address + "/" + db; - return DriverManager.getConnection(url, username, defaultPassword); + for (String db : dbs) { + ObjectNode dbNode = jsonMapper.createObjectNode(); + dbNode.put("dbName", db); + dbsNode.add(dbNode); } - - /** - * Get database - */ - public static JsonNode getDbs(String username) throws Exception { - ArrayNode dbsNode = jsonMapper.createArrayNode(); - List dbs = new ArrayList<>(); - Connection conn = null; - Statement stat = null; - ResultSet rs = null; - try { - conn = getConn(username, defaultDb); - stat = conn.createStatement(); - rs = stat.executeQuery("show databases"); - while (rs.next()) { - dbs.add(rs.getString(1)); - } - } finally { - close(conn, stat, rs); + return dbsNode; + } + + /** Gets all tables for the specified database */ + public static JsonNode getTables(String username, String dbName) throws Exception { + ArrayNode tablesNode = jsonMapper.createArrayNode(); + Connection conn = null; + Statement stat = null; + ResultSet rs = null; + try { + List tableNames = new ArrayList<>(); + conn = getConn(username, dbName); + stat = conn.createStatement(); + rs = stat.executeQuery("show tables"); + while (rs.next()) { + String tableName = rs.getString(1); + tableNames.add(tableName); + } + + // 获取每个表的详细信息 + for (String tableName : tableNames) { + ObjectNode tableNode = jsonMapper.createObjectNode(); + // 获取表详细信息 + ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName); + while (describeRs.next()) { + String columnName = describeRs.getString(1); + String dataType = describeRs.getString(2); + if (null != columnName) { + columnName = columnName.trim(); + } + if (null != dataType) { + dataType = dataType.trim(); + } + if (columnName.contains("Owner:")) { + tableNode.put("createdBy", dataType); + } + if (columnName.contains("CreateTime:")) { + long createdAt = sdf.parse(dataType).getTime() / 1000; + tableNode.put("createdAt", createdAt); + break; + } } - for (String db : dbs) { - ObjectNode dbNode = jsonMapper.createObjectNode(); - dbNode.put("dbName", db); - dbsNode.add(dbNode); - } - return dbsNode; + describeRs.close(); + tableNode.put("databaseName", dbName); + tableNode.put("tableName", tableName); + tableNode.put("lastAccessAt", 0); + tableNode.put("isView", false); + tablesNode.add(tableNode); + } + } finally { + close(conn, stat, rs); } - /** - * Gets all tables for the specified database - */ - public static JsonNode getTables(String username, String dbName) throws Exception { - ArrayNode tablesNode = jsonMapper.createArrayNode(); - Connection conn = null; - Statement stat = null; - ResultSet rs = null; - try { - List tableNames = new ArrayList<>(); - conn = getConn(username, dbName); - stat = conn.createStatement(); - rs = stat.executeQuery("show tables"); - while (rs.next()) { - String tableName = rs.getString(1); - tableNames.add(tableName); - } - - // 获取每个表的详细信息 - for (String tableName : tableNames) { - ObjectNode tableNode = jsonMapper.createObjectNode(); - // 获取表详细信息 - ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName); - while (describeRs.next()) { - String columnName = describeRs.getString(1); - String dataType = describeRs.getString(2); - if (null != columnName) { - columnName = columnName.trim(); - } - if (null != dataType) { - dataType = dataType.trim(); - } - if (columnName.contains("Owner:")) { - tableNode.put("createdBy", dataType); - } - if (columnName.contains("CreateTime:")) { - long createdAt = sdf.parse(dataType).getTime() / 1000; - tableNode.put("createdAt", createdAt); - break; - } - } - describeRs.close(); - tableNode.put("databaseName", dbName); - tableNode.put("tableName", tableName); - tableNode.put("lastAccessAt", 0); - tableNode.put("isView", false); - tablesNode.add(tableNode); - } - } finally { - close(conn, stat, rs); + return tablesNode; + } + + /** Gets information about all fields of a specified table */ + public static JsonNode getColumns(String username, String dbName, String tbName) + throws Exception { + ArrayNode columnsNode = jsonMapper.createArrayNode(); + List> columnMapList = new ArrayList<>(); + List partitionColumnList = new ArrayList<>(); + Connection conn = null; + Statement stat = null; + ResultSet rs = null; + try { + conn = getConn(username, dbName); + stat = conn.createStatement(); + rs = stat.executeQuery("desc " + tbName); + while (rs.next()) { + Map colum = new HashMap<>(); + String colName = rs.getString("col_name"); + String dataType = rs.getString("data_type"); + if (StringUtils.isNotBlank(colName) + && StringUtils.isNotBlank(dataType) + && !colName.contains("# Partition Information") + && !colName.contains("# col_name")) { + colum.put("columnName", rs.getString("col_name")); + colum.put("columnType", rs.getString("data_type")); + colum.put("columnComment", rs.getString("comment")); + columnMapList.add(colum); + } + } + + boolean partition = false; + boolean parColName = false; + ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tbName); + while (describeRs.next()) { + String columnName = describeRs.getString(1); + String dataType = describeRs.getString(2); + if (null != columnName) { + columnName = columnName.trim(); + } + if (null != dataType) { + dataType = dataType.trim(); } - return tablesNode; - } - - /** - * Gets information about all fields of a specified table - */ - public static JsonNode getColumns(String username, String dbName, String tbName) - throws Exception { - ArrayNode columnsNode = jsonMapper.createArrayNode(); - List> columnMapList = new ArrayList<>(); - List partitionColumnList = new ArrayList<>(); - Connection conn = null; - Statement stat = null; - ResultSet rs = null; - try { - conn = getConn(username, dbName); - stat = conn.createStatement(); - rs = stat.executeQuery("desc " + tbName); - while (rs.next()) { - Map colum = new HashMap<>(); - String colName = rs.getString("col_name"); - String dataType = rs.getString("data_type"); - if (StringUtils.isNotBlank(colName) - && StringUtils.isNotBlank(dataType) - && !colName.contains("# Partition Information") - && !colName.contains("# col_name")) { - colum.put("columnName", rs.getString("col_name")); - colum.put("columnType", rs.getString("data_type")); - colum.put("columnComment", rs.getString("comment")); - columnMapList.add(colum); - } - } - - boolean partition = false; - boolean parColName = false; - ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tbName); - while (describeRs.next()) { - String columnName = describeRs.getString(1); - String dataType = describeRs.getString(2); - if (null != columnName) { - columnName = columnName.trim(); - } - if (null != dataType) { - dataType = dataType.trim(); - } - - // 判断获取分区字段 - if (columnName.contains("# Partition Information")) { - partition = true; - parColName = false; - continue; - } - if (columnName.contains("# col_name")) { - parColName = true; - continue; - } - - if (partition && parColName) { - if ("".equals(columnName) && null == dataType) { - partition = false; - parColName = false; - } else { - partitionColumnList.add(columnName); - } - } - } - describeRs.close(); - } finally { - close(conn, stat, rs); + // 判断获取分区字段 + if (columnName.contains("# Partition Information")) { + partition = true; + parColName = false; + continue; + } + if (columnName.contains("# col_name")) { + parColName = true; + continue; } - for (Map map : columnMapList.stream().distinct().collect(Collectors.toList())) { - ObjectNode fieldNode = jsonMapper.createObjectNode(); - String columnName = map.get("columnName").toString(); - fieldNode.put("columnName", columnName); - fieldNode.put("columnType", map.get("columnType").toString()); - fieldNode.put("columnComment", map.get("columnComment").toString()); - fieldNode.put("partitioned", partitionColumnList.contains(columnName)); - columnsNode.add(fieldNode); + if (partition && parColName) { + if ("".equals(columnName) && null == dataType) { + partition = false; + parColName = false; + } else { + partitionColumnList.add(columnName); + } } + } + describeRs.close(); + } finally { + close(conn, stat, rs); + } - return columnsNode; + for (Map map : columnMapList.stream().distinct().collect(Collectors.toList())) { + ObjectNode fieldNode = jsonMapper.createObjectNode(); + String columnName = map.get("columnName").toString(); + fieldNode.put("columnName", columnName); + fieldNode.put("columnType", map.get("columnType").toString()); + fieldNode.put("columnComment", map.get("columnComment").toString()); + fieldNode.put("partitioned", partitionColumnList.contains(columnName)); + columnsNode.add(fieldNode); } - /** - * Close resource - */ - private static void close(Connection conn, Statement stat, ResultSet rs) throws SQLException { - if (rs != null) { - rs.close(); - } - if (stat != null) { - stat.close(); - } - if (conn != null) { - conn.close(); - } + return columnsNode; + } + + /** Close resource */ + private static void close(Connection conn, Statement stat, ResultSet rs) throws SQLException { + if (rs != null) { + rs.close(); + } + if (stat != null) { + stat.close(); + } + if (conn != null) { + conn.close(); } + } } From 9452ba50d1cf2536e0d7db96db62e9d47a85003b Mon Sep 17 00:00:00 2001 From: qinfeng <212841557@qq.com> Date: Fri, 22 Sep 2023 15:41:28 +0800 Subject: [PATCH 4/6] Change the Chinese comments to English --- .../org/apache/linkis/metadata/util/HiveService2Utils.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java index 4e8026ae82..09406cfb42 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java @@ -102,10 +102,9 @@ public static JsonNode getTables(String username, String dbName) throws Exceptio tableNames.add(tableName); } - // 获取每个表的详细信息 + // Get detailed information about each table for (String tableName : tableNames) { ObjectNode tableNode = jsonMapper.createObjectNode(); - // 获取表详细信息 ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName); while (describeRs.next()) { String columnName = describeRs.getString(1); @@ -180,7 +179,7 @@ public static JsonNode getColumns(String username, String dbName, String tbName) dataType = dataType.trim(); } - // 判断获取分区字段 + // Judgment Get partition field if (columnName.contains("# Partition Information")) { partition = true; parColName = false; From f1a1c0efc70885d9b76ed5e6ced72b71e0b93ca1 Mon Sep 17 00:00:00 2001 From: qinfeng <212841557@qq.com> Date: Fri, 22 Sep 2023 15:52:29 +0800 Subject: [PATCH 5/6] scriptis left database menu bar supports hiveserver2 to get library table fields --- .../java/org/apache/linkis/metadata/util/HiveService2Utils.java | 1 - 1 file changed, 1 deletion(-) diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java index 09406cfb42..b8aaef08c6 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java @@ -179,7 +179,6 @@ public static JsonNode getColumns(String username, String dbName, String tbName) dataType = dataType.trim(); } - // Judgment Get partition field if (columnName.contains("# Partition Information")) { partition = true; parColName = false; From 42e7b04950dd1dda8cf690dfbac8941978fb0936 Mon Sep 17 00:00:00 2001 From: qinfeng <212841557@qq.com> Date: Fri, 22 Sep 2023 15:59:23 +0800 Subject: [PATCH 6/6] Translation of comments to English --- .../java/org/apache/linkis/metadata/util/HiveService2Utils.java | 1 + 1 file changed, 1 insertion(+) diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java index b8aaef08c6..0d8e43b99c 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java @@ -179,6 +179,7 @@ public static JsonNode getColumns(String username, String dbName, String tbName) dataType = dataType.trim(); } + // Partition field judgment if (columnName.contains("# Partition Information")) { partition = true; parColName = false;