diff --git a/linkis-dist/bin/checkAdd.sh b/linkis-dist/bin/checkAdd.sh new file mode 100644 index 0000000000..99c8a58bf4 --- /dev/null +++ b/linkis-dist/bin/checkAdd.sh @@ -0,0 +1,293 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +shellDir=`dirname $0` +workDir=`cd ${shellDir}/..;pwd` +DBParDir=${workDir}/deploy-config/db.sh +LinkisParDir=${workDir}/deploy-config/linkis-env.sh + +source ${workDir}/bin/common.sh +source ${workDir}/deploy-config/linkis-env.sh +source ${workDir}/deploy-config/db.sh + +function print_usage(){ + echo "Usage: checkAdd [EngineName]" + echo " EngineName : The Engine name that you want to check" + echo " Engine list as bellow: JDBC Flink openLooKeng Presto Sqoop Elasticsearch Impala Trino Seatunnel" +} + +if [ $# -gt 1 ]; then + print_usage + exit 2 +fi + +# Define verification functions for addtional engines: 1,check command;2,check parameters;3,check server status. + +function checkJDBC(){ + +# 1. check command + java -version > /dev/null 2>&1 + isSuccess "execute cmd: java -version" + +# 2. check parameters + if [ -z "${MYSQL_HOST}" ] || [ -z "${MYSQL_PORT}" ] || [ -z "${MYSQL_DB}" ] || [ -z "${MYSQL_USER}" ] || [ -z "${MYSQL_PASSWORD}" ];then + echo "[MYSQL_HOST/MYSQL_PORT/MYSQL_USER/MYSQL_PASSWORD] are Invalid,Pls check parameters in ${DBParDir} " + exit 2 + fi + + if [ -z "${MYSQL_CONNECT_JAVA_PATH}" ] || [ ! -f ${MYSQL_CONNECT_JAVA_PATH} ];then + echo "MySQL connector ${MYSQL_CONNECT_JAVA_PATH} is not exist,Pls check parameters in ${LinkisParDir} " + exit 2 + fi + +# 3. check server status +# set java path and other independency +CLASSPATH=$CLASSPATH:${MYSQL_CONNECT_JAVA_PATH} + +# prepare java code to connect with JDBC dirver +echo "import java.sql.*; + +public class JdbcTest { + public static void main(String[] args) { + // define connection variables + String url = \"jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}\"; + String username = \"${MYSQL_USER}\"; + String password = \"${MYSQL_PASSWORD}\"; + + // Try to connect use JDBC Driver + try (Connection connection = DriverManager.getConnection(url, username, password)) { + System.out.println(\"jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT} connection successful!\"); + } catch (SQLException e) { + System.out.println(\"connection failed:\"); + e.printStackTrace(); + } + } +}" > JdbcTest.java + +# compile java source + javac -cp "$CLASSPATH" JdbcTest.java + +# execute java program + java -cp "$CLASSPATH":. JdbcTest + isSuccess "execute cmd: java -cp CLASSPATH:. JdbcTest" + +#clear files + rm JdbcTest.* + +} + +function checkFlink(){ + +# 1. check command + flink --version > /dev/null 2>&1 + isSuccess "execute cmd: flink--version" + +# 2. check parameters + if [ -z ${FLINK_HOME} ];then + echo "Parameter [FLINK_HOME] is Invalid,Pls check parameters in ${LinkisParDir} " + exit 2 + fi + +# 3. check server status + cd ${FLINK_HOME} + ./bin/flink run -m yarn-cluster ./examples/batch/WordCount.jar > /dev/null 2>&1 + isSuccess "execute cmd: Flink run -m yarn-cluster " + +} + +function checkopenLooKeng(){ + +# 2. check parameters + if [ -z "${OLK_HOST}" ] || [ -z "${OLK_PORT}" ] || [ -z "${OLK_CATALOG}" ]|| [ -z "${OLK_SCHEMA}" ] || [ -z "${OLK_USER}" ] || [ -z "${OLK_PASSWORD}" ];then + echo "[OLK_HOST/OLK_PORT/OLK_USER/OLK_PASSWORD] are Invalid,Pls check parameters in ${DBParDir} " + exit 2 + fi + + if [ -z "${OLK_JDBC_PATH}" ] || [ ! -f ${OLK_JDBC_PATH} ];then + echo "openLooKeng connector ${OLK_JDBC_PATH} is not exist,Pls check parameters in ${LinkisParDir} " + exit 2 + fi + +# 3. check server status +# set java path and other independency +CLASSPATH=$CLASSPATH:${OLK_JDBC_PATH} + +# prepare java code to connect with JDBC dirver +echo "import java.sql.*; + +public class openLooKengTest{ + public static void main(String[] args) { + // define connection variables + String url = \"jdbc:lk://${OLK_HOST}:${OLK_PORT}/${OLK_CATALOG}/${OLK_SCHEMA}\"; + String username = \"${OLK_USER}\"; + String password = \"${OLK_PASSWORD}\"; + + // try to connect + //try (Connection connection = DriverManager.getConnection(url, username, password)) { + try (Connection connection = DriverManager.getConnection(url, username,null)) { + System.out.println(\"connection successfully!\"); + } catch (SQLException e) { + System.out.println(\"connection failed:\"); + e.printStackTrace(); + } + } +}" > openLooKengTest.java + +# compile java source code +javac -cp "$CLASSPATH" openLooKengTest.java + +# execute java program +java -cp "$CLASSPATH":. openLooKengTest +isSuccess "execute cmd: java -cp CLASSPATH:. openLooKengTest" + +# clear temple files +rm openLooKengTest.* + +} + +function checkPresto(){ + +# 1. check command + presto --version > /dev/null 2>&1 + isSuccess "execute cmd: presto --version" + +# 2. check parameters + if [ -z "${PRESTO_HOST}" ] || [ -z "${PRESTO_PORT}" ] || [ -z "${PRESTO_CATALOG}" ]|| [ -z "${PRESTO_SCHEMA}" ];then + echo "[PRESTO_HOST/PRESTO_PORT/PRESTO_CATALOG/PRESTO_SCHEMA] are Invalid,Pls check parameters in ${DBParDir}" + exit 2 + fi + +# 3. check server status + presto --server ${PRESTO_HOST}:${PRESTO_PORT} --catalog ${PRESTO_CATALOG} --schema ${PRESTO_SCHEMA} --execute "show catalogs" >/dev/null 2>&1 + isSuccess "execute cmd: presto --server ${PRESTO_HOST}:${PRESTO_PORT}" +} + +function checkSqoop(){ + +# 1. check command + sqoop version > /dev/null 2>&1 + isSuccess "execute cmd: sqoop version" + +# 2. check parameters + if [ -z "${HIVE_META_URL}" ] || [ -z "${HIVE_META_USER}" ] || [ -z "${HIVE_META_PASSWORD}" ];then + echo "[HIVE_META_URL/HIVE_META_USER/HIVE_META_PASSWORD] are Invalid,Pls check parameters in ${DBParDir}" + exit 2 + fi + +# 3. check server status + sqoop list-databases --connect ${HIVE_META_URL} --username ${HIVE_META_USER} --password ${HIVE_META_PASSWORD} >/dev/null 2>&1 + isSuccess "execute cmd: sqoop list-databases --connect ${HIVE_META_URL}" + } + +function checkElasticsearch(){ + +# 2. check parameters + if [ -z "${ES_RESTFUL_URL}" ]; then + echo "Parameter [ES_RESTFUL_URL] is Invalid,Pls check parameters in ${LinkisParDir}" + fi + +# 3. check server status + curl ${ES_RESTFUL_URL} > /dev/null 2>&1 + isSuccess "execute cmd: curl ElasticSearch address ${ES_RESTFUL_URL}" +} + +function checkImpala(){ + +# 1. check command + impala-shell --version > /dev/null 2>&1 + isSuccess "execute cmd: impala-shell --version" + +# 2. check parameters + if [ -z "${IMPALA_HOST}" ] || [ -z "${IMPALA_PORT}" ];then + echo "Parameter [IMPALA_HOST/IMPALA_PORT] are Invalid ,Pls check parameters in ${DBParDir}" + exit 2 + fi + +# 3. check server status + impala-shell -i ${IMPALA_HOST}:${IMPALA_PORT} > /dev/null 2>&1 + isSuccess "execute cmd: impala-shell -i ${IMPALA_HOST}:${IMPALA_PORT}" + +} + +function checkTrino(){ +# 1. check command + trino-cli --version > /dev/null 2>&1 + isSuccess "execute cmd: trino-shell --version" + +# 2. check parameters + if [ -z "${TRINO_COORDINATOR_HOST}" ] || [ -z "${TRINO_COORDINATOR_PORT}" ];then + echo "Parameter [TRINO_COORDINATOR_HOST/TRINO_COORDINATOR_PORT] are Invalid ,Pls check parameters in ${DBParDir}" + exit 2 + fi + +# 3. check server status + trino-cli --server ${TRINO_COORDINATOR_HOST}:${TRINO_COORDINATOR_PORT} --catalog ${TRINO_COORDINATOR_CATALOG} --schema ${TRINO_COORDINATOR_SCHEMA} --execute "show catalogs" > /dev/null 2>&1 + isSuccess "trino-cli --server ${TRINO_COORDINATOR_HOST}:${TRINO_COORDINATOR_PORT}" +} + +function checkSeatunnel(){ + +# 2. check parameters + if [ -z "${SEATUNNEL_HOST}" ] || [ -z "${SEATUNNEL_PORT}" ];then + echo "Parameter [SEATUNNEL_HOST/SEATUNNEL_PORT] are Invalid ,Pls check parameters in ${DBParDir}" + exit 2 + fi + +# 3. check server status + curl http://${SEATUNNEL_HOST}:${SEATUNNEL_PORT} > /dev/null 2>&1 + isSuccess "execute cmd: curl http://${SEATUNNEL_HOST}:${SEATUNNEL_PORT}" +} + + +# Begin check addtional engine parameters +echo "======== Begin to check Engine: ${1} ======== " + +EngineName=$1 +case $EngineName in + "JDBC") + checkJDBC + ;; + "Flink") + checkFlink + ;; + "openLooKeng") + checkopenLooKeng + ;; + "Presto") + checkPresto + ;; + "Sqoop") + checkSqoop + ;; + "Elasticsearch") + checkElasticsearch + ;; + "Impala") + checkImpala + ;; + "Trino") + checkTrino + ;; + "Seatunnel") + checkSeatunnel + ;; + *) + print_usage + exit 2 + ;; +esac + +echo "======== End checking Engine: ${1} ========== " \ No newline at end of file diff --git a/linkis-dist/bin/checkEnv.sh b/linkis-dist/bin/checkEnv.sh index 36ba64bbe7..68682a2338 100644 --- a/linkis-dist/bin/checkEnv.sh +++ b/linkis-dist/bin/checkEnv.sh @@ -18,6 +18,7 @@ shellDir=`dirname $0` workDir=`cd ${shellDir}/..;pwd` source ${workDir}/bin/common.sh source ${workDir}/deploy-config/linkis-env.sh +source ${workDir}/deploy-config/db.sh say() { printf 'check command fail \n %s\n' "$1" @@ -29,20 +30,50 @@ err() { } function checkPythonAndJava(){ + python --version > /dev/null 2>&1 isSuccess "execute cmd: python --version" + java -version > /dev/null 2>&1 isSuccess "execute cmd: java --version" } function checkHdfs(){ - hadoopVersion="`hdfs version`" + +# --- 1. check command + hdfs version > /dev/null 2>&1 + isSuccess "execute cmd: hdfs version" + +# --- 2. check version + hadoopVersion=`hdfs version` defaultHadoopVersion="3.3" + checkversion "$hadoopVersion" $defaultHadoopVersion hadoop + +# ---3. check service status + hdfs dfsadmin -report > /dev/null 2>&1 + isSuccess "execute cmd: hdfs dfsadmin -report" + } function checkHive(){ + +# --- 1. check command + hive --version > /dev/null 2>&1 + isSuccess "execute cmd: hive --version" + +# --- 2. check version & Parameters checkversion "$(whereis hive)" "3.1" hive + + if [ -z "${HIVE_META_URL}" ] || [ -z "${HIVE_META_USER}" ] || [ -z "${MYSQL_PASSWORD}" ] ;then + echo "Parameter [HIVE_META_URL/HIVE_META_USER/MYSQL_PASSWORD] are Invalid,Pls check" + exit 2 + fi + +# --- 3. check server status + beeline -u${HIVE_META_URL} -n${HIVE_META_USER} -p${MYSQL_PASSWORD} > /dev/null 2>&1 + isSuccess "execute cmd: beeline -u${HIVE_META_URL} " + } function checkversion(){ @@ -69,8 +100,32 @@ fi } function checkSpark(){ + +# --- 1. check command spark-submit --version > /dev/null 2>&1 isSuccess "execute cmd: spark-submit --version " + +# --- 2. check Parameters + if [ -z "${SPARK_HOME}" ];then + echo "Parameter SPARK_HOME is not valid, Please check" + exit 2 + fi + +# --- 3. check server status + spark-submit --class org.apache.spark.examples.SparkPi --master local ${SPARK_HOME}/examples/jars/spark-examples_2.12-3.2.1.jar 10 > /dev/null 2>&1 + isSuccess "execute cmd: spark-submit --class org.apache.spark.examples.SparkPi " + +} + +function checkMysql(){ + + if [ -z "${MYSQL_HOST}" ] || [ -z "${MYSQL_PORT}" ] || [ -z "${MYSQL_DB}" ] || [ -z "${MYSQL_USER}" ] || [ -z "${MYSQL_PASSWORD}" ];then + echo "MYSQL_HOST/MYSQL_PORT/MYSQL_USER/MYSQL_PASSWORD] are Invalid,Pls check parameter define" + exit 2 + fi + + mysql -h${MYSQL_HOST} -P${MYSQL_PORT} -u${MYSQL_USER} -p${MYSQL_PASSWORD} -e "select version();">/dev/null 2>&1 + isSuccess "execute cmd: mysql -h${MYSQL_HOST} -P${MYSQL_PORT}" } portIsOccupy=false @@ -93,7 +148,8 @@ need_cmd() { } -echo "<-----start to check used cmd---->" +echo -e "1. <-----start to check used cmd---->\n" + echo "check yum" need_cmd yum echo "check java" @@ -108,9 +164,44 @@ echo "check sed" need_cmd sed echo "check lsof" need_cmd lsof -echo "<-----end to check used cmd---->" +echo "check hdfs" +need_cmd hdfs +echo "check shell" +need_cmd $SHELL +echo "check spark-submit" +need_cmd spark-submit +echo "check spark-shell" +need_cmd spark-shell +echo "check spark-sql" +need_cmd spark-sql +echo "check hadoop" +need_cmd hadoop + +echo -e "\n<-----end to check used cmd---->" + +# --- Begin to check Spark/HDFS/Hive Service Status + +echo -e "\n2. <-----start to check service status---->\n" checkPythonAndJava +checkMysql + +if [ "$ENABLE_SPARK" == "true" ]; then + checkSpark +fi + +if [ "$ENABLE_HDFS" == "true" ]; then + checkHdfs +fi + +if [ "$ENABLE_HIVE" == "true" ]; then + checkHive +fi + +echo -e "\n<-----End to check service status---->" + +# --- check Service Port +echo -e "\n3. <-----Start to check service Port---->" SERVER_PORT=$EUREKA_PORT check_service_port @@ -130,21 +221,10 @@ check_service_port SERVER_PORT=$PUBLICSERVICE_PORT check_service_port + if [ "$portIsOccupy" = true ];then echo "The port is already in use, please check before installing" exit 1 fi -if [ "$ENABLE_SPARK" == "true" ]; then - checkSpark -fi - - -if [ "$ENABLE_HDFS" == "true" ]; then - checkHdfs -fi - - -if [ "$ENABLE_HIVE" == "true" ]; then - checkHive -fi +echo "\n <-----End to check service Port---->" \ No newline at end of file diff --git a/linkis-dist/bin/install.sh b/linkis-dist/bin/install.sh index 9702e1e005..d7e52128ba 100644 --- a/linkis-dist/bin/install.sh +++ b/linkis-dist/bin/install.sh @@ -78,7 +78,6 @@ else done fi - ######################## init LINKIS related env ################################ if [ "$LINKIS_HOME" = "" ] then @@ -575,4 +574,4 @@ sudo chmod -R 777 $LINKIS_HOME/sbin/* echo -e "\n" echo -e "${GREEN}Congratulations!${NC} You have installed Linkis $LINKIS_VERSION successfully, please use sh $LINKIS_HOME/sbin/linkis-start-all.sh to start it!" -echo -e "Your default account/password is ${GREEN}[$deployUser/$deployPwd]${NC}, you can find in $LINKIS_HOME/conf/linkis-mg-gateway.properties" \ No newline at end of file +echo -e "Your default account/password is ${GREEN}[$deployUser/$deployPwd]${NC}, you can find in $LINKIS_HOME/conf/linkis-mg-gateway.properties" diff --git a/linkis-dist/deploy-config/db.sh b/linkis-dist/deploy-config/db.sh index 24d3ae217e..7302ad035d 100644 --- a/linkis-dist/deploy-config/db.sh +++ b/linkis-dist/deploy-config/db.sh @@ -28,9 +28,36 @@ PG_SCHEMA= PG_USER= PG_PASSWORD= - ### Provide the DB information of Hive metadata database. ### Attention! If there are special characters like "&", they need to be enclosed in quotation marks. HIVE_META_URL="" HIVE_META_USER="" -HIVE_META_PASSWORD="" \ No newline at end of file +HIVE_META_PASSWORD="" + +### define openLookeng parameters for connection. +OLK_HOST= +OLK_PORT= +OLK_CATALOG= +OLK_SCHEMA= +OLK_USER= +OLK_PASSWORD= + +### define Presto parameters for connection. +PRESTO_HOST= +PRESTO_PORT= +PRESTO_CATALOG= +PRESTO_SCHEMA= + +### define impala parameters for connection. +IMPALA_HOST= +IMPALA_PORT= + +### define trino parameters for connection. +TRINO_COORDINATOR_HOST= +TRINO_COORDINATOR_PORT= +TRINO_COORDINATOR_CATALOG= +TRINO_COORDINATOR_SCHEMA= + +### define seatunnel parameters for connection. +SEATUNNEL_HOST= +SEATUNNEL_PORT= \ No newline at end of file diff --git a/linkis-dist/deploy-config/linkis-env.sh b/linkis-dist/deploy-config/linkis-env.sh index 2f04d5de18..17cd8ca44a 100644 --- a/linkis-dist/deploy-config/linkis-env.sh +++ b/linkis-dist/deploy-config/linkis-env.sh @@ -177,3 +177,7 @@ export PROMETHEUS_ENABLE=false export ENABLE_HDFS=true export ENABLE_HIVE=true export ENABLE_SPARK=true + +## define MYSQL_CONNECT_JAVA_PATH&OLK_JDBC_PATH, the linkis can check JDBC driver +MYSQL_CONNECT_JAVA_PATH= +OLK_JDBC_PATH= \ No newline at end of file